Skip to content

Commit 36c1263

Browse files
committed
Merge pull request #3914 from jtratner/fix-network-using-tests
TST: Move explicit connectivity checks to decorator.
2 parents 78a71b1 + 7ddb586 commit 36c1263

File tree

7 files changed

+252
-197
lines changed

7 files changed

+252
-197
lines changed

doc/source/release.rst

+5
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,11 @@ pandas 0.11.1
9191
integers or floats that are in an epoch unit of ``D, s, ms, us, ns``, thanks @mtkini (:issue:`3969`)
9292
(e.g. unix timestamps or epoch ``s``, with fracional seconds allowed) (:issue:`3540`)
9393
- DataFrame corr method (spearman) is now cythonized.
94+
- Improved ``network`` test decorator to catch ``IOError`` (and therefore
95+
``URLError`` as well). Added ``with_connectivity_check`` decorator to allow
96+
explicitly checking a website as a proxy for seeing if there is network
97+
connectivity. Plus, new ``optional_args`` decorator factory for decorators.
98+
(:issue:`3910`, :issue:`3914`)
9499

95100
**API Changes**
96101

doc/source/v0.11.1.txt

+5
Original file line numberDiff line numberDiff line change
@@ -386,6 +386,11 @@ Bug Fixes
386386
- ``read_html`` now correctly skips tests (:issue:`3741`)
387387
- Fixed a bug where ``DataFrame.replace`` with a compiled regular expression
388388
in the ``to_replace`` argument wasn't working (:issue:`3907`)
389+
- Improved ``network`` test decorator to catch ``IOError`` (and therefore
390+
``URLError`` as well). Added ``with_connectivity_check`` decorator to allow
391+
explicitly checking a website as a proxy for seeing if there is network
392+
connectivity. Plus, new ``optional_args`` decorator factory for decorators.
393+
(:issue:`3910`, :issue:`3914`)
389394

390395
See the :ref:`full release notes
391396
<release>` or issue tracker

pandas/io/tests/test_fred.py

+9-17
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import pandas.io.data as web
99
from pandas.util.testing import (network, assert_frame_equal,
1010
assert_series_equal,
11-
assert_almost_equal)
11+
assert_almost_equal, with_connectivity_check)
1212
from numpy.testing.decorators import slow
1313

1414
import urllib2
@@ -17,7 +17,7 @@
1717
class TestFred(unittest.TestCase):
1818

1919
@slow
20-
@network
20+
@with_connectivity_check("http://www.google.com")
2121
def test_fred(self):
2222
"""
2323
Throws an exception when DataReader can't get a 200 response from
@@ -26,22 +26,14 @@ def test_fred(self):
2626
start = datetime(2010, 1, 1)
2727
end = datetime(2013, 01, 27)
2828

29-
try:
30-
self.assertEquals(
31-
web.DataReader("GDP", "fred", start, end)['GDP'].tail(1),
32-
16004.5)
29+
self.assertEquals(
30+
web.DataReader("GDP", "fred", start, end)['GDP'].tail(1),
31+
16004.5)
3332

34-
self.assertRaises(
35-
Exception,
36-
lambda: web.DataReader("NON EXISTENT SERIES", 'fred',
37-
start, end))
38-
except urllib2.URLError:
39-
try:
40-
urllib2.urlopen('http://google.com')
41-
except urllib2.URLError:
42-
raise nose.SkipTest
43-
else:
44-
raise
33+
self.assertRaises(
34+
Exception,
35+
lambda: web.DataReader("NON EXISTENT SERIES", 'fred',
36+
start, end))
4537

4638
@slow
4739
@network

pandas/io/tests/test_ga.py

+12-56
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,26 @@
1+
import os
12
import unittest
2-
import nose
33
from datetime import datetime
44

5+
import nose
56
import pandas as pd
6-
import pandas.core.common as com
77
from pandas import DataFrame
8-
from pandas.util.testing import network, assert_frame_equal
8+
from pandas.util.testing import network, assert_frame_equal, with_connectivity_check
99
from numpy.testing.decorators import slow
1010

11+
try:
12+
import httplib2
13+
from pandas.io.ga import GAnalytics, read_ga
14+
from pandas.io.auth import AuthenticationConfigError, reset_token_store
15+
from pandas.io import auth
16+
except ImportError:
17+
raise nose.SkipTest
1118

1219
class TestGoogle(unittest.TestCase):
1320

1421
_multiprocess_can_split_ = True
1522

1623
def test_remove_token_store(self):
17-
import os
18-
try:
19-
import pandas.io.auth as auth
20-
from pandas.io.ga import reset_token_store
21-
except ImportError:
22-
raise nose.SkipTest
23-
2424
auth.DEFAULT_TOKEN_FILE = 'test.dat'
2525
with open(auth.DEFAULT_TOKEN_FILE, 'w') as fh:
2626
fh.write('test')
@@ -31,13 +31,6 @@ def test_remove_token_store(self):
3131
@slow
3232
@network
3333
def test_getdata(self):
34-
try:
35-
import httplib2
36-
from pandas.io.ga import GAnalytics, read_ga
37-
from pandas.io.auth import AuthenticationConfigError
38-
except ImportError:
39-
raise nose.SkipTest
40-
4134
try:
4235
end_date = datetime.now()
4336
start_date = end_date - pd.offsets.Day() * 5
@@ -76,24 +69,10 @@ def test_getdata(self):
7669

7770
except AuthenticationConfigError:
7871
raise nose.SkipTest
79-
except httplib2.ServerNotFoundError:
80-
try:
81-
h = httplib2.Http()
82-
response, content = h.request("http://www.google.com")
83-
raise
84-
except httplib2.ServerNotFoundError:
85-
raise nose.SkipTest
8672

8773
@slow
88-
@network
74+
@with_connectivity_check("http://www.google.com")
8975
def test_iterator(self):
90-
try:
91-
import httplib2
92-
from pandas.io.ga import GAnalytics, read_ga
93-
from pandas.io.auth import AuthenticationConfigError
94-
except ImportError:
95-
raise nose.SkipTest
96-
9776
try:
9877
reader = GAnalytics()
9978

@@ -118,24 +97,10 @@ def test_iterator(self):
11897

11998
except AuthenticationConfigError:
12099
raise nose.SkipTest
121-
except httplib2.ServerNotFoundError:
122-
try:
123-
h = httplib2.Http()
124-
response, content = h.request("http://www.google.com")
125-
raise
126-
except httplib2.ServerNotFoundError:
127-
raise nose.SkipTest
128100

129101
@slow
130-
@network
102+
@with_connectivity_check("http://www.google.com")
131103
def test_segment(self):
132-
try:
133-
import httplib2
134-
from pandas.io.ga import GAnalytics, read_ga
135-
from pandas.io.auth import AuthenticationConfigError
136-
except ImportError:
137-
raise nose.SkipTest
138-
139104
try:
140105
end_date = datetime.now()
141106
start_date = end_date - pd.offsets.Day() * 5
@@ -186,16 +151,7 @@ def test_segment(self):
186151

187152
except AuthenticationConfigError:
188153
raise nose.SkipTest
189-
except httplib2.ServerNotFoundError:
190-
try:
191-
h = httplib2.Http()
192-
response, content = h.request("http://www.google.com")
193-
raise
194-
except httplib2.ServerNotFoundError:
195-
raise nose.SkipTest
196-
197154

198155
if __name__ == '__main__':
199-
import nose
200156
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
201157
exit=False)

pandas/io/tests/test_google.py

+40-91
Original file line numberDiff line numberDiff line change
@@ -2,122 +2,71 @@
22
import nose
33
from datetime import datetime
44

5+
import numpy as np
56
import pandas as pd
67
import pandas.io.data as web
7-
from pandas.util.testing import (network, assert_series_equal)
8-
from numpy.testing.decorators import slow
9-
import numpy as np
10-
11-
import urllib2
8+
from pandas.util.testing import network, with_connectivity_check
129

1310

1411
class TestGoogle(unittest.TestCase):
1512

16-
@network
13+
@with_connectivity_check("http://www.google.com")
1714
def test_google(self):
1815
# asserts that google is minimally working and that it throws
19-
# an excecption when DataReader can't get a 200 response from
16+
# an exception when DataReader can't get a 200 response from
2017
# google
2118
start = datetime(2010, 1, 1)
2219
end = datetime(2013, 01, 27)
2320

24-
try:
25-
self.assertEquals(
26-
web.DataReader("F", 'google', start, end)['Close'][-1],
27-
13.68)
28-
except urllib2.URLError:
29-
try:
30-
urllib2.urlopen('http://www.google.com')
31-
except urllib2.URLError:
32-
raise nose.SkipTest
33-
else:
34-
raise
35-
36-
@network
37-
def test_google_non_existent(self):
38-
# asserts that google is minimally working and that it throws
39-
# an excecption when DataReader can't get a 200 response from
40-
# google
41-
start = datetime(2010, 1, 1)
42-
end = datetime(2013, 01, 27)
21+
self.assertEquals(
22+
web.DataReader("F", 'google', start, end)['Close'][-1],
23+
13.68)
4324

44-
try:
45-
self.assertRaises(
46-
Exception,
47-
lambda: web.DataReader("NON EXISTENT TICKER", 'google',
48-
start, end))
49-
except urllib2.URLError:
50-
try:
51-
urllib2.urlopen('http://www.google.com')
52-
except urllib2.URLError:
53-
raise nose.SkipTest
54-
else:
55-
raise
25+
self.assertRaises(
26+
Exception,
27+
lambda: web.DataReader("NON EXISTENT TICKER", 'google',
28+
start, end))
5629

5730

5831
@network
5932
def test_get_quote(self):
6033
self.assertRaises(NotImplementedError,
6134
lambda: web.get_quote_google(pd.Series(['GOOG', 'AAPL', 'GOOG'])))
6235

63-
@network
36+
@with_connectivity_check('http://www.google.com')
6437
def test_get_goog_volume(self):
65-
try:
66-
df = web.get_data_google('GOOG')
67-
assert df.Volume.ix['OCT-08-2010'] == 2863473
68-
except IOError:
69-
try:
70-
urllib2.urlopen('http://www.google.com')
71-
except IOError:
72-
raise nose.SkipTest
73-
else:
74-
raise
38+
df = web.get_data_google('GOOG')
39+
assert df.Volume.ix['OCT-08-2010'] == 2863473
7540

76-
@network
41+
@with_connectivity_check('http://www.google.com')
7742
def test_get_multi1(self):
78-
try:
79-
sl = ['AAPL', 'AMZN', 'GOOG']
80-
pan = web.get_data_google(sl, '2012')
81-
ts = pan.Close.GOOG.index[pan.Close.AAPL > pan.Close.GOOG]
82-
assert ts[0].dayofyear == 96
83-
except IOError:
84-
try:
85-
urllib2.urlopen('http://www.google.com')
86-
except IOError:
87-
raise nose.SkipTest
88-
else:
89-
raise
43+
sl = ['AAPL', 'AMZN', 'GOOG']
44+
pan = web.get_data_google(sl, '2012')
45+
ts = pan.Close.GOOG.index[pan.Close.AAPL > pan.Close.GOOG]
46+
assert ts[0].dayofyear == 96
9047

91-
@network
48+
@with_connectivity_check('http://www.google.com')
9249
def test_get_multi2(self):
93-
try:
94-
pan = web.get_data_google(['GE', 'MSFT', 'INTC'], 'JAN-01-12', 'JAN-31-12')
95-
expected = [19.02, 28.23, 25.39]
96-
result = pan.Close.ix['01-18-12'][['GE', 'MSFT', 'INTC']].tolist()
97-
assert result == expected
98-
99-
# sanity checking
100-
t= np.array(result)
101-
assert np.issubdtype(t.dtype, np.floating)
102-
assert t.shape == (3,)
103-
104-
expected = [[ 18.99, 28.4 , 25.18],
105-
[ 18.58, 28.31, 25.13],
106-
[ 19.03, 28.16, 25.52],
107-
[ 18.81, 28.82, 25.87]]
108-
result = pan.Open.ix['Jan-15-12':'Jan-20-12'][['GE', 'MSFT', 'INTC']].values
109-
assert (result == expected).all()
110-
111-
# sanity checking
112-
t= np.array(pan)
113-
assert np.issubdtype(t.dtype, np.floating)
114-
except IOError:
115-
try:
116-
urllib2.urlopen('http://www.google.com')
117-
except IOError:
118-
raise nose.SkipTest
119-
else:
120-
raise
50+
pan = web.get_data_google(['GE', 'MSFT', 'INTC'], 'JAN-01-12', 'JAN-31-12')
51+
expected = [19.02, 28.23, 25.39]
52+
result = pan.Close.ix['01-18-12'][['GE', 'MSFT', 'INTC']].tolist()
53+
assert result == expected
54+
55+
# sanity checking
56+
t= np.array(result)
57+
assert np.issubdtype(t.dtype, np.floating)
58+
assert t.shape == (3,)
59+
60+
expected = [[ 18.99, 28.4 , 25.18],
61+
[ 18.58, 28.31, 25.13],
62+
[ 19.03, 28.16, 25.52],
63+
[ 18.81, 28.82, 25.87]]
64+
result = pan.Open.ix['Jan-15-12':'Jan-20-12'][['GE', 'MSFT', 'INTC']].values
65+
assert (result == expected).all()
66+
67+
# sanity checking
68+
t= np.array(pan)
69+
assert np.issubdtype(t.dtype, np.floating)
12170

12271
if __name__ == '__main__':
12372
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],

0 commit comments

Comments
 (0)