diff --git a/pandas/io/tests/test_date_converters.py b/pandas/io/tests/test_date_converters.py
index 6aa1f7e1786a1..e1e6286aabcc1 100644
--- a/pandas/io/tests/test_date_converters.py
+++ b/pandas/io/tests/test_date_converters.py
@@ -49,7 +49,7 @@ def test_parse_date_time(self):
datecols = {'date_time': [0, 1]}
df = read_table(StringIO(data), sep=',', header=0,
parse_dates=datecols, date_parser=conv.parse_date_time)
- self.assert_('date_time' in df)
+ self.assertIn('date_time', df)
self.assertEqual(df.date_time.ix[0], datetime(2001, 1, 5, 10, 0, 0))
data = ("KORD,19990127, 19:00:00, 18:56:00, 0.8100\n"
@@ -73,7 +73,7 @@ def test_parse_date_fields(self):
df = read_table(StringIO(data), sep=',', header=0,
parse_dates=datecols,
date_parser=conv.parse_date_fields)
- self.assert_('ymd' in df)
+ self.assertIn('ymd', df)
self.assertEqual(df.ymd.ix[0], datetime(2001, 1, 10))
def test_datetime_six_col(self):
@@ -90,7 +90,7 @@ def test_datetime_six_col(self):
df = read_table(StringIO(data), sep=',', header=0,
parse_dates=datecols,
date_parser=conv.parse_all_fields)
- self.assert_('ymdHMS' in df)
+ self.assertIn('ymdHMS', df)
self.assertEqual(df.ymdHMS.ix[0], datetime(2001, 1, 5, 10, 0, 0))
def test_datetime_fractional_seconds(self):
@@ -103,7 +103,7 @@ def test_datetime_fractional_seconds(self):
df = read_table(StringIO(data), sep=',', header=0,
parse_dates=datecols,
date_parser=conv.parse_all_fields)
- self.assert_('ymdHMS' in df)
+ self.assertIn('ymdHMS', df)
self.assertEqual(df.ymdHMS.ix[0], datetime(2001, 1, 5, 10, 0, 0,
microsecond=123456))
self.assertEqual(df.ymdHMS.ix[1], datetime(2001, 1, 5, 10, 0, 0,
@@ -116,7 +116,7 @@ def test_generic(self):
df = read_table(StringIO(data), sep=',', header=0,
parse_dates=datecols,
date_parser=dateconverter)
- self.assert_('ym' in df)
+ self.assertIn('ym', df)
self.assertEqual(df.ym.ix[0], date(2001, 1, 1))
diff --git a/pandas/io/tests/test_html.py b/pandas/io/tests/test_html.py
index 893b1768b00c3..77c15a6c58657 100644
--- a/pandas/io/tests/test_html.py
+++ b/pandas/io/tests/test_html.py
@@ -499,10 +499,10 @@ def test_gold_canyon(self):
with open(self.banklist_data, 'r') as f:
raw_text = f.read()
- self.assert_(gc in raw_text)
+ self.assertIn(gc, raw_text)
df = self.read_html(self.banklist_data, 'Gold Canyon',
attrs={'id': 'table'})[0]
- self.assert_(gc in df.to_string())
+ self.assertIn(gc, df.to_string())
def test_different_number_of_rows(self):
expected = """
diff --git a/pandas/io/tests/test_parsers.py b/pandas/io/tests/test_parsers.py
index efbd35bf4fe80..35cbb8089cbe7 100644
--- a/pandas/io/tests/test_parsers.py
+++ b/pandas/io/tests/test_parsers.py
@@ -302,11 +302,11 @@ def func(*date_cols):
prefix='X',
parse_dates={'nominal': [1, 2],
'actual': [1, 3]})
- self.assert_('nominal' in df)
- self.assert_('actual' in df)
- self.assert_('X1' not in df)
- self.assert_('X2' not in df)
- self.assert_('X3' not in df)
+ self.assertIn('nominal', df)
+ self.assertIn('actual', df)
+ self.assertNotIn('X1', df)
+ self.assertNotIn('X2', df)
+ self.assertNotIn('X3', df)
d = datetime(1999, 1, 27, 19, 0)
self.assertEqual(df.ix[0, 'nominal'], d)
@@ -316,12 +316,12 @@ def func(*date_cols):
parse_dates={'nominal': [1, 2],
'actual': [1, 3]},
keep_date_col=True)
- self.assert_('nominal' in df)
- self.assert_('actual' in df)
+ self.assertIn('nominal', df)
+ self.assertIn('actual', df)
- self.assert_(1 in df)
- self.assert_(2 in df)
- self.assert_(3 in df)
+ self.assertIn(1, df)
+ self.assertIn(2, df)
+ self.assertIn(3, df)
data = """\
KORD,19990127, 19:00:00, 18:56:00, 0.8100, 2.8100, 7.2000, 0.0000, 280.0000
@@ -335,11 +335,11 @@ def func(*date_cols):
prefix='X',
parse_dates=[[1, 2], [1, 3]])
- self.assert_('X1_X2' in df)
- self.assert_('X1_X3' in df)
- self.assert_('X1' not in df)
- self.assert_('X2' not in df)
- self.assert_('X3' not in df)
+ self.assertIn('X1_X2', df)
+ self.assertIn('X1_X3', df)
+ self.assertNotIn('X1', df)
+ self.assertNotIn('X2', df)
+ self.assertNotIn('X3', df)
d = datetime(1999, 1, 27, 19, 0)
self.assertEqual(df.ix[0, 'X1_X2'], d)
@@ -347,11 +347,11 @@ def func(*date_cols):
df = read_csv(StringIO(data), header=None,
parse_dates=[[1, 2], [1, 3]], keep_date_col=True)
- self.assert_('1_2' in df)
- self.assert_('1_3' in df)
- self.assert_(1 in df)
- self.assert_(2 in df)
- self.assert_(3 in df)
+ self.assertIn('1_2', df)
+ self.assertIn('1_3', df)
+ self.assertIn(1, df)
+ self.assertIn(2, df)
+ self.assertIn(3, df)
data = '''\
KORD,19990127 19:00:00, 18:56:00, 0.8100, 2.8100, 7.2000, 0.0000, 280.0000
@@ -378,7 +378,7 @@ def test_multiple_date_cols_int_cast(self):
# it works!
df = self.read_csv(StringIO(data), header=None, parse_dates=date_spec,
date_parser=conv.parse_date_time)
- self.assert_('nominal' in df)
+ self.assertIn('nominal', df)
def test_multiple_date_col_timestamp_parse(self):
data = """05/31/2012,15:30:00.029,1306.25,1,E,0,,1306.25
@@ -523,7 +523,7 @@ def test_malformed(self):
StringIO(data), sep=',', header=1, comment='#')
self.assert_(False)
except Exception as inst:
- self.assert_('Expected 3 fields in line 4, saw 5' in str(inst))
+ self.assertIn('Expected 3 fields in line 4, saw 5', str(inst))
# skip_footer
data = """ignore
@@ -540,7 +540,7 @@ def test_malformed(self):
skip_footer=1)
self.assert_(False)
except Exception as inst:
- self.assert_('Expected 3 fields in line 4, saw 5' in str(inst))
+ self.assertIn('Expected 3 fields in line 4, saw 5', str(inst))
# first chunk
data = """ignore
@@ -558,7 +558,7 @@ def test_malformed(self):
df = it.read(5)
self.assert_(False)
except Exception as inst:
- self.assert_('Expected 3 fields in line 6, saw 5' in str(inst))
+ self.assertIn('Expected 3 fields in line 6, saw 5', str(inst))
# middle chunk
data = """ignore
@@ -577,7 +577,7 @@ def test_malformed(self):
it.read(2)
self.assert_(False)
except Exception as inst:
- self.assert_('Expected 3 fields in line 6, saw 5' in str(inst))
+ self.assertIn('Expected 3 fields in line 6, saw 5', str(inst))
# last chunk
data = """ignore
@@ -596,7 +596,7 @@ def test_malformed(self):
it.read()
self.assert_(False)
except Exception as inst:
- self.assert_('Expected 3 fields in line 6, saw 5' in str(inst))
+ self.assertIn('Expected 3 fields in line 6, saw 5', str(inst))
def test_passing_dtype(self):
@@ -1698,7 +1698,7 @@ def test_multiple_date_cols_chunked(self):
chunks = list(reader)
- self.assert_('nominalTime' not in df)
+ self.assertNotIn('nominalTime', df)
tm.assert_frame_equal(chunks[0], df[:2])
tm.assert_frame_equal(chunks[1], df[2:4])
diff --git a/pandas/io/tests/test_pytables.py b/pandas/io/tests/test_pytables.py
index dcdd5408c3376..75ae124c7e3e9 100644
--- a/pandas/io/tests/test_pytables.py
+++ b/pandas/io/tests/test_pytables.py
@@ -363,18 +363,18 @@ def test_contains(self):
store['a'] = tm.makeTimeSeries()
store['b'] = tm.makeDataFrame()
store['foo/bar'] = tm.makeDataFrame()
- self.assert_('a' in store)
- self.assert_('b' in store)
- self.assert_('c' not in store)
- self.assert_('foo/bar' in store)
- self.assert_('/foo/bar' in store)
- self.assert_('/foo/b' not in store)
- self.assert_('bar' not in store)
+ self.assertIn('a', store)
+ self.assertIn('b', store)
+ self.assertNotIn('c', store)
+ self.assertIn('foo/bar', store)
+ self.assertIn('/foo/bar', store)
+ self.assertNotIn('/foo/b', store)
+ self.assertNotIn('bar', store)
# GH 2694
warnings.filterwarnings('ignore', category=tables.NaturalNameWarning)
store['node())'] = tm.makeDataFrame()
- self.assert_('node())' in store)
+ self.assertIn('node())', store)
def test_versioning(self):
@@ -3873,10 +3873,10 @@ def test_multiple_open_close(self):
# single
store = HDFStore(path)
- self.assert_('CLOSED' not in str(store))
+ self.assertNotIn('CLOSED', str(store))
self.assert_(store.is_open)
store.close()
- self.assert_('CLOSED' in str(store))
+ self.assertIn('CLOSED', str(store))
self.assert_(not store.is_open)
with ensure_clean_path(self.path) as path:
@@ -3896,20 +3896,20 @@ def f():
store1 = HDFStore(path)
store2 = HDFStore(path)
- self.assert_('CLOSED' not in str(store1))
- self.assert_('CLOSED' not in str(store2))
+ self.assertNotIn('CLOSED', str(store1))
+ self.assertNotIn('CLOSED', str(store2))
self.assert_(store1.is_open)
self.assert_(store2.is_open)
store1.close()
- self.assert_('CLOSED' in str(store1))
+ self.assertIn('CLOSED', str(store1))
self.assert_(not store1.is_open)
- self.assert_('CLOSED' not in str(store2))
+ self.assertNotIn('CLOSED', str(store2))
self.assert_(store2.is_open)
store2.close()
- self.assert_('CLOSED' in str(store1))
- self.assert_('CLOSED' in str(store2))
+ self.assertIn('CLOSED', str(store1))
+ self.assertIn('CLOSED', str(store2))
self.assert_(not store1.is_open)
self.assert_(not store2.is_open)
@@ -3920,11 +3920,11 @@ def f():
store2 = HDFStore(path)
store2.append('df2',df)
store2.close()
- self.assert_('CLOSED' in str(store2))
+ self.assertIn('CLOSED', str(store2))
self.assert_(not store2.is_open)
store.close()
- self.assert_('CLOSED' in str(store))
+ self.assertIn('CLOSED', str(store))
self.assert_(not store.is_open)
# double closing
@@ -3933,11 +3933,11 @@ def f():
store2 = HDFStore(path)
store.close()
- self.assert_('CLOSED' in str(store))
+ self.assertIn('CLOSED', str(store))
self.assert_(not store.is_open)
store2.close()
- self.assert_('CLOSED' in str(store2))
+ self.assertIn('CLOSED', str(store2))
self.assert_(not store2.is_open)
# ops on a closed store
diff --git a/pandas/sparse/tests/test_sparse.py b/pandas/sparse/tests/test_sparse.py
index 1da151fbdb5d6..603edbf2de0a1 100644
--- a/pandas/sparse/tests/test_sparse.py
+++ b/pandas/sparse/tests/test_sparse.py
@@ -1200,15 +1200,15 @@ def test_delitem(self):
C = self.frame['C']
del self.frame['B']
- self.assert_('B' not in self.frame)
+ self.assertNotIn('B', self.frame)
assert_sp_series_equal(self.frame['A'], A)
assert_sp_series_equal(self.frame['C'], C)
del self.frame['D']
- self.assert_('D' not in self.frame)
+ self.assertNotIn('D', self.frame)
del self.frame['A']
- self.assert_('A' not in self.frame)
+ self.assertNotIn('A', self.frame)
def test_set_columns(self):
self.frame.columns = self.frame.columns
@@ -1372,11 +1372,11 @@ def _check_frame(frame):
# with copy=False
reindexed = self.frame.reindex(self.frame.index, copy=False)
reindexed['F'] = reindexed['A']
- self.assert_('F' in self.frame)
+ self.assertIn('F', self.frame)
reindexed = self.frame.reindex(self.frame.index)
reindexed['G'] = reindexed['A']
- self.assert_('G' not in self.frame)
+ self.assertNotIn('G', self.frame)
def test_reindex_fill_value(self):
rng = bdate_range('20110110', periods=20)
@@ -1702,7 +1702,7 @@ def _compare_with_dense(swp, items, major, minor):
# test copying
cp = self.panel.reindex(self.panel.major_axis, copy=True)
cp['ItemA']['E'] = cp['ItemA']['A']
- self.assert_('E' not in self.panel['ItemA'])
+ self.assertNotIn('E', self.panel['ItemA'])
def test_operators(self):
def _check_ops(panel):
diff --git a/pandas/tools/tests/test_merge.py b/pandas/tools/tests/test_merge.py
index 3dee4a671e1f9..286488d704b70 100644
--- a/pandas/tools/tests/test_merge.py
+++ b/pandas/tools/tests/test_merge.py
@@ -179,15 +179,15 @@ def test_handle_overlap(self):
joined = merge(self.df, self.df2, on='key2',
suffixes=['.foo', '.bar'])
- self.assert_('key1.foo' in joined)
- self.assert_('key1.bar' in joined)
+ self.assertIn('key1.foo', joined)
+ self.assertIn('key1.bar', joined)
def test_handle_overlap_arbitrary_key(self):
joined = merge(self.df, self.df2,
left_on='key2', right_on='key1',
suffixes=['.foo', '.bar'])
- self.assert_('key1.foo' in joined)
- self.assert_('key2.bar' in joined)
+ self.assertIn('key1.foo', joined)
+ self.assertIn('key2.bar', joined)
def test_merge_common(self):
joined = merge(self.df, self.df2)
@@ -269,7 +269,7 @@ def test_join_with_len0(self):
# nothing to merge
merged = self.target.join(self.source.reindex([]), on='C')
for col in self.source:
- self.assert_(col in merged)
+ self.assertIn(col, merged)
self.assert_(merged[col].isnull().all())
merged2 = self.target.join(self.source.reindex([]), on='C',
@@ -565,8 +565,8 @@ def test_merge_overlap(self):
merged = merge(self.left, self.left, on='key')
exp_len = (self.left['key'].value_counts() ** 2).sum()
self.assertEqual(len(merged), exp_len)
- self.assert_('v1_x' in merged)
- self.assert_('v1_y' in merged)
+ self.assertIn('v1_x', merged)
+ self.assertIn('v1_y', merged)
def test_merge_different_column_key_names(self):
left = DataFrame({'lkey': ['foo', 'bar', 'baz', 'foo'],
@@ -1222,10 +1222,10 @@ def test_append(self):
del end_frame['A']
partial_appended = begin_frame.append(end_frame)
- self.assert_('A' in partial_appended)
+ self.assertIn('A', partial_appended)
partial_appended = end_frame.append(begin_frame)
- self.assert_('A' in partial_appended)
+ self.assertIn('A', partial_appended)
# mixed type handling
appended = self.mixed_frame[:5].append(self.mixed_frame[5:])
diff --git a/pandas/tseries/tests/test_period.py b/pandas/tseries/tests/test_period.py
index f008e0f8e22a2..4a4fbb146861d 100644
--- a/pandas/tseries/tests/test_period.py
+++ b/pandas/tseries/tests/test_period.py
@@ -53,7 +53,7 @@ def test_period_cons_quarterly(self):
for month in MONTHS:
freq = 'Q-%s' % month
exp = Period('1989Q3', freq=freq)
- self.assert_('1989Q3' in str(exp))
+ self.assertIn('1989Q3', str(exp))
stamp = exp.to_timestamp('D', how='end')
p = Period(stamp, freq=freq)
self.assertEquals(p, exp)
@@ -203,10 +203,10 @@ def test_freq_str(self):
def test_repr(self):
p = Period('Jan-2000')
- self.assert_('2000-01' in repr(p))
+ self.assertIn('2000-01', repr(p))
p = Period('2000-12-15')
- self.assert_('2000-12-15' in repr(p))
+ self.assertIn('2000-12-15', repr(p))
def test_millisecond_repr(self):
p = Period('2000-01-01 12:15:02.123')
diff --git a/pandas/tseries/tests/test_timeseries.py b/pandas/tseries/tests/test_timeseries.py
index e6c33ae94e289..d01548ee79e32 100644
--- a/pandas/tseries/tests/test_timeseries.py
+++ b/pandas/tseries/tests/test_timeseries.py
@@ -170,7 +170,7 @@ def test_indexing_over_size_cutoff(self):
pos = n * 3
timestamp = df.index[pos]
- self.assert_(timestamp in df.index)
+ self.assertIn(timestamp, df.index)
# it works!
df.ix[timestamp]
@@ -1034,7 +1034,7 @@ def test_reasonable_keyerror(self):
try:
index.get_loc('1/1/2000')
except KeyError as e:
- self.assert_('2000' in str(e))
+ self.assertIn('2000', str(e))
def test_reindex_with_datetimes(self):
rng = date_range('1/1/2000', periods=20)
@@ -1521,7 +1521,7 @@ def test_timestamp_repr(self):
iso8601 = '1850-01-01 01:23:45.012345'
stamp = Timestamp(iso8601, tz='US/Eastern')
result = repr(stamp)
- self.assert_(iso8601 in result)
+ self.assertIn(iso8601, result)
def test_timestamp_from_ordinal(self):
@@ -1742,7 +1742,7 @@ def test_to_html_timestamp(self):
df = DataFrame(np.random.randn(10, 4), index=rng)
result = df.to_html()
- self.assert_('2000-01-01' in result)
+ self.assertIn('2000-01-01', result)
def test_to_csv_numpy_16_bug(self):
frame = DataFrame({'a': date_range('1/1/2000', periods=10)})
@@ -1751,7 +1751,7 @@ def test_to_csv_numpy_16_bug(self):
frame.to_csv(buf)
result = buf.getvalue()
- self.assert_('2000-01-01' in result)
+ self.assertIn('2000-01-01', result)
def test_series_map_box_timestamps(self):
# #2689, #2627
diff --git a/pandas/tseries/tests/test_timezones.py b/pandas/tseries/tests/test_timezones.py
index 00d5cf2cab754..dda722366e53e 100644
--- a/pandas/tseries/tests/test_timezones.py
+++ b/pandas/tseries/tests/test_timezones.py
@@ -269,7 +269,7 @@ def test_utc_box_timestamp_and_localize(self):
# right tzinfo
rng = date_range('3/13/2012', '3/14/2012', freq='H', tz='utc')
rng_eastern = rng.tz_convert('US/Eastern')
- self.assert_('EDT' in repr(rng_eastern[0].tzinfo))
+ self.assertIn('EDT', repr(rng_eastern[0].tzinfo))
def test_timestamp_tz_convert(self):
strdates = ['1/1/2012', '3/1/2012', '4/1/2012']
@@ -426,7 +426,7 @@ def test_index_with_timezone_repr(self):
rng_eastern = rng.tz_localize('US/Eastern')
rng_repr = repr(rng_eastern)
- self.assert_('2010-04-13 00:00:00' in rng_repr)
+ self.assertIn('2010-04-13 00:00:00', rng_repr)
def test_index_astype_asobject_tzinfos(self):
# #1345