From 9fb41da7249d5e6c122d4119ac9390d29dadc747 Mon Sep 17 00:00:00 2001 From: gfyoung Date: Thu, 27 Apr 2017 11:37:37 -0400 Subject: [PATCH] MAINT: Remove self.assertEqual from testing --- pandas/compat/__init__.py | 2 + pandas/tests/computation/test_eval.py | 99 +- pandas/tests/dtypes/test_cast.py | 45 +- pandas/tests/dtypes/test_common.py | 16 +- pandas/tests/dtypes/test_concat.py | 10 +- pandas/tests/dtypes/test_dtypes.py | 38 +- pandas/tests/dtypes/test_inference.py | 144 +-- pandas/tests/frame/test_alter_axes.py | 42 +- pandas/tests/frame/test_analytics.py | 60 +- pandas/tests/frame/test_api.py | 72 +- pandas/tests/frame/test_apply.py | 33 +- .../tests/frame/test_axis_select_reindex.py | 42 +- pandas/tests/frame/test_block_internals.py | 46 +- pandas/tests/frame/test_combine_concat.py | 38 +- pandas/tests/frame/test_constructors.py | 191 ++-- pandas/tests/frame/test_convert_to.py | 22 +- pandas/tests/frame/test_dtypes.py | 17 +- pandas/tests/frame/test_indexing.py | 152 ++-- pandas/tests/frame/test_missing.py | 2 +- pandas/tests/frame/test_mutate_columns.py | 12 +- pandas/tests/frame/test_nonunique_indexes.py | 4 +- pandas/tests/frame/test_operators.py | 16 +- pandas/tests/frame/test_period.py | 8 +- pandas/tests/frame/test_quantile.py | 40 +- pandas/tests/frame/test_query_eval.py | 2 +- pandas/tests/frame/test_replace.py | 2 +- pandas/tests/frame/test_repr_info.py | 29 +- pandas/tests/frame/test_reshape.py | 45 +- pandas/tests/frame/test_subclass.py | 22 +- pandas/tests/frame/test_timeseries.py | 18 +- pandas/tests/frame/test_to_csv.py | 44 +- pandas/tests/groupby/test_aggregate.py | 6 +- pandas/tests/groupby/test_categorical.py | 2 +- pandas/tests/groupby/test_groupby.py | 153 ++-- pandas/tests/groupby/test_nth.py | 10 +- pandas/tests/groupby/test_timegrouper.py | 6 +- pandas/tests/groupby/test_transform.py | 4 +- pandas/tests/indexes/common.py | 34 +- pandas/tests/indexes/datetimes/test_astype.py | 32 +- .../indexes/datetimes/test_construction.py | 32 +- .../indexes/datetimes/test_date_range.py | 66 +- .../tests/indexes/datetimes/test_datetime.py | 102 +-- .../tests/indexes/datetimes/test_indexing.py | 36 +- pandas/tests/indexes/datetimes/test_misc.py | 157 ++-- pandas/tests/indexes/datetimes/test_ops.py | 120 ++- .../indexes/datetimes/test_partial_slicing.py | 24 +- pandas/tests/indexes/datetimes/test_setops.py | 22 +- pandas/tests/indexes/datetimes/test_tools.py | 143 ++- pandas/tests/indexes/period/test_asfreq.py | 116 +-- .../tests/indexes/period/test_construction.py | 56 +- pandas/tests/indexes/period/test_indexing.py | 76 +- pandas/tests/indexes/period/test_ops.py | 88 +- .../indexes/period/test_partial_slicing.py | 2 +- pandas/tests/indexes/period/test_period.py | 150 ++- pandas/tests/indexes/period/test_setops.py | 14 +- pandas/tests/indexes/period/test_tools.py | 184 ++-- pandas/tests/indexes/test_base.py | 274 +++--- pandas/tests/indexes/test_category.py | 68 +- pandas/tests/indexes/test_interval.py | 96 +- pandas/tests/indexes/test_multi.py | 223 +++-- pandas/tests/indexes/test_numeric.py | 40 +- pandas/tests/indexes/test_range.py | 94 +- .../indexes/timedeltas/test_construction.py | 4 +- .../tests/indexes/timedeltas/test_indexing.py | 12 +- pandas/tests/indexes/timedeltas/test_ops.py | 170 ++-- .../timedeltas/test_partial_slicing.py | 4 +- .../tests/indexes/timedeltas/test_setops.py | 4 +- .../indexes/timedeltas/test_timedelta.py | 51 +- pandas/tests/indexes/timedeltas/test_tools.py | 36 +- pandas/tests/indexing/common.py | 2 +- pandas/tests/indexing/test_callable.py | 4 +- .../indexing/test_chaining_and_caching.py | 8 +- pandas/tests/indexing/test_coercion.py | 90 +- pandas/tests/indexing/test_datetime.py | 8 +- pandas/tests/indexing/test_floats.py | 37 +- pandas/tests/indexing/test_iloc.py | 14 +- pandas/tests/indexing/test_indexing.py | 16 +- pandas/tests/indexing/test_ix.py | 18 +- pandas/tests/indexing/test_loc.py | 14 +- pandas/tests/indexing/test_multiindex.py | 29 +- pandas/tests/indexing/test_panel.py | 18 +- pandas/tests/indexing/test_partial.py | 2 +- pandas/tests/indexing/test_scalar.py | 20 +- .../tests/io/formats/test_eng_formatting.py | 16 +- pandas/tests/io/formats/test_format.py | 494 +++++----- pandas/tests/io/formats/test_printing.py | 70 +- pandas/tests/io/formats/test_style.py | 150 +-- pandas/tests/io/formats/test_to_csv.py | 82 +- pandas/tests/io/formats/test_to_html.py | 58 +- .../tests/io/json/test_json_table_schema.py | 88 +- pandas/tests/io/json/test_normalize.py | 8 +- pandas/tests/io/json/test_pandas.py | 85 +- pandas/tests/io/json/test_ujson.py | 320 +++---- pandas/tests/io/parser/c_parser_only.py | 8 +- pandas/tests/io/parser/common.py | 52 +- pandas/tests/io/parser/converters.py | 12 +- pandas/tests/io/parser/dtypes.py | 4 +- pandas/tests/io/parser/header.py | 2 +- pandas/tests/io/parser/index_col.py | 4 +- pandas/tests/io/parser/na_values.py | 4 +- pandas/tests/io/parser/parse_dates.py | 24 +- pandas/tests/io/parser/python_parser_only.py | 2 +- pandas/tests/io/parser/test_network.py | 4 +- pandas/tests/io/parser/test_textreader.py | 34 +- pandas/tests/io/parser/usecols.py | 2 +- pandas/tests/io/test_clipboard.py | 2 +- pandas/tests/io/test_common.py | 18 +- pandas/tests/io/test_excel.py | 131 ++- pandas/tests/io/test_gbq.py | 2 +- pandas/tests/io/test_html.py | 20 +- pandas/tests/io/test_packers.py | 30 +- pandas/tests/io/test_pytables.py | 122 +-- pandas/tests/io/test_sql.py | 160 ++-- pandas/tests/io/test_stata.py | 22 +- pandas/tests/plotting/common.py | 38 +- pandas/tests/plotting/test_boxplot_method.py | 16 +- pandas/tests/plotting/test_converter.py | 50 +- pandas/tests/plotting/test_datetimelike.py | 242 +++-- pandas/tests/plotting/test_frame.py | 234 +++-- pandas/tests/plotting/test_groupby.py | 4 +- pandas/tests/plotting/test_hist_method.py | 10 +- pandas/tests/plotting/test_misc.py | 4 +- pandas/tests/plotting/test_series.py | 80 +- pandas/tests/reshape/test_concat.py | 81 +- pandas/tests/reshape/test_hashing.py | 2 +- pandas/tests/reshape/test_join.py | 24 +- pandas/tests/reshape/test_merge.py | 20 +- pandas/tests/reshape/test_pivot.py | 50 +- pandas/tests/reshape/test_reshape.py | 64 +- pandas/tests/reshape/test_tile.py | 24 +- pandas/tests/scalar/test_interval.py | 44 +- pandas/tests/scalar/test_period.py | 708 +++++++-------- pandas/tests/scalar/test_period_asfreq.py | 519 ++++++----- pandas/tests/scalar/test_timedelta.py | 414 ++++----- pandas/tests/scalar/test_timestamp.py | 469 +++++----- pandas/tests/series/test_alter_axes.py | 18 +- pandas/tests/series/test_analytics.py | 126 +-- pandas/tests/series/test_api.py | 81 +- pandas/tests/series/test_apply.py | 34 +- pandas/tests/series/test_asof.py | 18 +- pandas/tests/series/test_combine_concat.py | 77 +- pandas/tests/series/test_constructors.py | 126 +-- pandas/tests/series/test_datetime_values.py | 21 +- pandas/tests/series/test_indexing.py | 194 ++-- pandas/tests/series/test_internals.py | 4 +- pandas/tests/series/test_io.py | 6 +- pandas/tests/series/test_missing.py | 24 +- pandas/tests/series/test_operators.py | 28 +- pandas/tests/series/test_period.py | 12 +- pandas/tests/series/test_quantile.py | 39 +- pandas/tests/series/test_repr.py | 6 +- pandas/tests/series/test_subclass.py | 12 +- pandas/tests/series/test_timeseries.py | 100 +- pandas/tests/sparse/test_arithmetics.py | 34 +- pandas/tests/sparse/test_array.py | 122 +-- pandas/tests/sparse/test_format.py | 20 +- pandas/tests/sparse/test_frame.py | 92 +- pandas/tests/sparse/test_indexing.py | 125 +-- pandas/tests/sparse/test_libsparse.py | 68 +- pandas/tests/sparse/test_list.py | 14 +- pandas/tests/sparse/test_series.py | 130 +-- pandas/tests/test_algos.py | 30 +- pandas/tests/test_base.py | 80 +- pandas/tests/test_categorical.py | 262 +++--- pandas/tests/test_config.py | 118 +-- pandas/tests/test_multilevel.py | 89 +- pandas/tests/test_nanops.py | 19 +- pandas/tests/test_panel.py | 83 +- pandas/tests/test_panel4d.py | 36 +- pandas/tests/test_resample.py | 142 ++- pandas/tests/test_strings.py | 121 ++- pandas/tests/test_take.py | 4 +- pandas/tests/test_testing.py | 4 +- pandas/tests/test_util.py | 20 +- pandas/tests/test_window.py | 22 +- pandas/tests/tools/test_numeric.py | 10 +- pandas/tests/tseries/test_frequencies.py | 243 +++-- pandas/tests/tseries/test_holiday.py | 123 ++- pandas/tests/tseries/test_offsets.py | 855 +++++++++--------- pandas/tests/tseries/test_timezones.py | 208 ++--- 180 files changed, 6578 insertions(+), 6853 deletions(-) diff --git a/pandas/compat/__init__.py b/pandas/compat/__init__.py index 7ebdd9735b967..2fe6359fd1ea6 100644 --- a/pandas/compat/__init__.py +++ b/pandas/compat/__init__.py @@ -104,6 +104,7 @@ def signature(f): map = map zip = zip filter = filter + intern = sys.intern reduce = functools.reduce long = int unichr = chr @@ -146,6 +147,7 @@ def signature(f): # import iterator versions of these functions range = xrange + intern = intern zip = itertools.izip filter = itertools.ifilter map = itertools.imap diff --git a/pandas/tests/computation/test_eval.py b/pandas/tests/computation/test_eval.py index 827a4668ed0bc..f8f84985142a8 100644 --- a/pandas/tests/computation/test_eval.py +++ b/pandas/tests/computation/test_eval.py @@ -8,7 +8,7 @@ from numpy.random import randn, rand, randint import numpy as np -from pandas.core.dtypes.common import is_list_like, is_scalar +from pandas.core.dtypes.common import is_bool, is_list_like, is_scalar import pandas as pd from pandas.core import common as com from pandas.errors import PerformanceWarning @@ -209,7 +209,7 @@ def check_equal(self, result, expected): elif isinstance(result, np.ndarray): tm.assert_numpy_array_equal(result, expected) else: - self.assertEqual(result, expected) + assert result == expected def check_complex_cmp_op(self, lhs, cmp1, rhs, binop, cmp2): skip_these = _scalar_skip @@ -610,30 +610,28 @@ def test_scalar_unary(self): with pytest.raises(TypeError): pd.eval('~1.0', engine=self.engine, parser=self.parser) - self.assertEqual( - pd.eval('-1.0', parser=self.parser, engine=self.engine), -1.0) - self.assertEqual( - pd.eval('+1.0', parser=self.parser, engine=self.engine), +1.0) - - self.assertEqual( - pd.eval('~1', parser=self.parser, engine=self.engine), ~1) - self.assertEqual( - pd.eval('-1', parser=self.parser, engine=self.engine), -1) - self.assertEqual( - pd.eval('+1', parser=self.parser, engine=self.engine), +1) - - self.assertEqual( - pd.eval('~True', parser=self.parser, engine=self.engine), ~True) - self.assertEqual( - pd.eval('~False', parser=self.parser, engine=self.engine), ~False) - self.assertEqual( - pd.eval('-True', parser=self.parser, engine=self.engine), -True) - self.assertEqual( - pd.eval('-False', parser=self.parser, engine=self.engine), -False) - self.assertEqual( - pd.eval('+True', parser=self.parser, engine=self.engine), +True) - self.assertEqual( - pd.eval('+False', parser=self.parser, engine=self.engine), +False) + assert pd.eval('-1.0', parser=self.parser, + engine=self.engine) == -1.0 + assert pd.eval('+1.0', parser=self.parser, + engine=self.engine) == +1.0 + assert pd.eval('~1', parser=self.parser, + engine=self.engine) == ~1 + assert pd.eval('-1', parser=self.parser, + engine=self.engine) == -1 + assert pd.eval('+1', parser=self.parser, + engine=self.engine) == +1 + assert pd.eval('~True', parser=self.parser, + engine=self.engine) == ~True + assert pd.eval('~False', parser=self.parser, + engine=self.engine) == ~False + assert pd.eval('-True', parser=self.parser, + engine=self.engine) == -True + assert pd.eval('-False', parser=self.parser, + engine=self.engine) == -False + assert pd.eval('+True', parser=self.parser, + engine=self.engine) == +True + assert pd.eval('+False', parser=self.parser, + engine=self.engine) == +False def test_unary_in_array(self): # GH 11235 @@ -658,50 +656,51 @@ def test_disallow_scalar_bool_ops(self): pd.eval(ex, engine=self.engine, parser=self.parser) def test_identical(self): - # GH 10546 + # see gh-10546 x = 1 result = pd.eval('x', engine=self.engine, parser=self.parser) - self.assertEqual(result, 1) + assert result == 1 assert is_scalar(result) x = 1.5 result = pd.eval('x', engine=self.engine, parser=self.parser) - self.assertEqual(result, 1.5) + assert result == 1.5 assert is_scalar(result) x = False result = pd.eval('x', engine=self.engine, parser=self.parser) - self.assertEqual(result, False) + assert not result + assert is_bool(result) assert is_scalar(result) x = np.array([1]) result = pd.eval('x', engine=self.engine, parser=self.parser) tm.assert_numpy_array_equal(result, np.array([1])) - self.assertEqual(result.shape, (1, )) + assert result.shape == (1, ) x = np.array([1.5]) result = pd.eval('x', engine=self.engine, parser=self.parser) tm.assert_numpy_array_equal(result, np.array([1.5])) - self.assertEqual(result.shape, (1, )) + assert result.shape == (1, ) x = np.array([False]) # noqa result = pd.eval('x', engine=self.engine, parser=self.parser) tm.assert_numpy_array_equal(result, np.array([False])) - self.assertEqual(result.shape, (1, )) + assert result.shape == (1, ) def test_line_continuation(self): # GH 11149 exp = """1 + 2 * \ 5 - 1 + 2 """ result = pd.eval(exp, engine=self.engine, parser=self.parser) - self.assertEqual(result, 12) + assert result == 12 def test_float_truncation(self): # GH 14241 exp = '1000000000.006' result = pd.eval(exp, engine=self.engine, parser=self.parser) expected = np.float64(exp) - self.assertEqual(result, expected) + assert result == expected df = pd.DataFrame({'A': [1000000000.0009, 1000000000.0011, @@ -1121,7 +1120,7 @@ def test_simple_bool_ops(self): ex = '{0} {1} {2}'.format(lhs, op, rhs) res = self.eval(ex) exp = eval(ex) - self.assertEqual(res, exp) + assert res == exp def test_bool_ops_with_constants(self): for op, lhs, rhs in product(expr._bool_ops_syms, ('True', 'False'), @@ -1129,7 +1128,7 @@ def test_bool_ops_with_constants(self): ex = '{0} {1} {2}'.format(lhs, op, rhs) res = self.eval(ex) exp = eval(ex) - self.assertEqual(res, exp) + assert res == exp def test_panel_fails(self): with catch_warnings(record=True): @@ -1169,19 +1168,19 @@ def test_truediv(self): res = self.eval('1 / 2', truediv=True) expec = 0.5 - self.assertEqual(res, expec) + assert res == expec res = self.eval('1 / 2', truediv=False) expec = 0.5 - self.assertEqual(res, expec) + assert res == expec res = self.eval('s / 2', truediv=False) expec = 0.5 - self.assertEqual(res, expec) + assert res == expec res = self.eval('s / 2', truediv=True) expec = 0.5 - self.assertEqual(res, expec) + assert res == expec else: res = self.eval(ex, truediv=False) tm.assert_numpy_array_equal(res, np.array([1])) @@ -1191,19 +1190,19 @@ def test_truediv(self): res = self.eval('1 / 2', truediv=True) expec = 0.5 - self.assertEqual(res, expec) + assert res == expec res = self.eval('1 / 2', truediv=False) expec = 0 - self.assertEqual(res, expec) + assert res == expec res = self.eval('s / 2', truediv=False) expec = 0 - self.assertEqual(res, expec) + assert res == expec res = self.eval('s / 2', truediv=True) expec = 0.5 - self.assertEqual(res, expec) + assert res == expec def test_failing_subscript_with_name_error(self): df = DataFrame(np.random.randn(5, 3)) # noqa @@ -1549,7 +1548,7 @@ def test_bool_ops_with_constants(self): else: res = self.eval(ex) exp = eval(ex) - self.assertEqual(res, exp) + assert res == exp def test_simple_bool_ops(self): for op, lhs, rhs in product(expr._bool_ops_syms, (True, False), @@ -1561,7 +1560,7 @@ def test_simple_bool_ops(self): else: res = pd.eval(ex, engine=self.engine, parser=self.parser) exp = eval(ex) - self.assertEqual(res, exp) + assert res == exp class TestOperationsPythonPython(TestOperationsNumExprPython): @@ -1650,14 +1649,14 @@ def test_df_arithmetic_subexpression(self): def check_result_type(self, dtype, expect_dtype): df = DataFrame({'a': np.random.randn(10).astype(dtype)}) - self.assertEqual(df.a.dtype, dtype) + assert df.a.dtype == dtype df.eval("b = sin(a)", engine=self.engine, parser=self.parser, inplace=True) got = df.b expect = np.sin(df.a) - self.assertEqual(expect.dtype, got.dtype) - self.assertEqual(expect_dtype, got.dtype) + assert expect.dtype == got.dtype + assert expect_dtype == got.dtype tm.assert_series_equal(got, expect, check_names=False) def test_result_types(self): diff --git a/pandas/tests/dtypes/test_cast.py b/pandas/tests/dtypes/test_cast.py index 22640729c262f..cbf049b95b6ef 100644 --- a/pandas/tests/dtypes/test_cast.py +++ b/pandas/tests/dtypes/test_cast.py @@ -164,7 +164,7 @@ def test_maybe_convert_string_to_array(self): assert result.dtype == object result = maybe_convert_string_to_object(1) - self.assertEqual(result, 1) + assert result == 1 arr = np.array(['x', 'y'], dtype=str) result = maybe_convert_string_to_object(arr) @@ -187,31 +187,31 @@ def test_maybe_convert_scalar(self): # pass thru result = maybe_convert_scalar('x') - self.assertEqual(result, 'x') + assert result == 'x' result = maybe_convert_scalar(np.array([1])) - self.assertEqual(result, np.array([1])) + assert result == np.array([1]) # leave scalar dtype result = maybe_convert_scalar(np.int64(1)) - self.assertEqual(result, np.int64(1)) + assert result == np.int64(1) result = maybe_convert_scalar(np.int32(1)) - self.assertEqual(result, np.int32(1)) + assert result == np.int32(1) result = maybe_convert_scalar(np.float32(1)) - self.assertEqual(result, np.float32(1)) + assert result == np.float32(1) result = maybe_convert_scalar(np.int64(1)) - self.assertEqual(result, np.float64(1)) + assert result == np.float64(1) # coerce result = maybe_convert_scalar(1) - self.assertEqual(result, np.int64(1)) + assert result == np.int64(1) result = maybe_convert_scalar(1.0) - self.assertEqual(result, np.float64(1)) + assert result == np.float64(1) result = maybe_convert_scalar(Timestamp('20130101')) - self.assertEqual(result, Timestamp('20130101').value) + assert result == Timestamp('20130101').value result = maybe_convert_scalar(datetime(2013, 1, 1)) - self.assertEqual(result, Timestamp('20130101').value) + assert result == Timestamp('20130101').value result = maybe_convert_scalar(Timedelta('1 day 1 min')) - self.assertEqual(result, Timedelta('1 day 1 min').value) + assert result == Timedelta('1 day 1 min').value class TestConvert(tm.TestCase): @@ -291,7 +291,7 @@ def test_numpy_dtypes(self): ((np.dtype('datetime64[ns]'), np.int64), np.object) ) for src, common in testcases: - self.assertEqual(find_common_type(src), common) + assert find_common_type(src) == common with pytest.raises(ValueError): # empty @@ -299,26 +299,25 @@ def test_numpy_dtypes(self): def test_categorical_dtype(self): dtype = CategoricalDtype() - self.assertEqual(find_common_type([dtype]), 'category') - self.assertEqual(find_common_type([dtype, dtype]), 'category') - self.assertEqual(find_common_type([np.object, dtype]), np.object) + assert find_common_type([dtype]) == 'category' + assert find_common_type([dtype, dtype]) == 'category' + assert find_common_type([np.object, dtype]) == np.object def test_datetimetz_dtype(self): dtype = DatetimeTZDtype(unit='ns', tz='US/Eastern') - self.assertEqual(find_common_type([dtype, dtype]), - 'datetime64[ns, US/Eastern]') + assert find_common_type([dtype, dtype]) == 'datetime64[ns, US/Eastern]' for dtype2 in [DatetimeTZDtype(unit='ns', tz='Asia/Tokyo'), np.dtype('datetime64[ns]'), np.object, np.int64]: - self.assertEqual(find_common_type([dtype, dtype2]), np.object) - self.assertEqual(find_common_type([dtype2, dtype]), np.object) + assert find_common_type([dtype, dtype2]) == np.object + assert find_common_type([dtype2, dtype]) == np.object def test_period_dtype(self): dtype = PeriodDtype(freq='D') - self.assertEqual(find_common_type([dtype, dtype]), 'period[D]') + assert find_common_type([dtype, dtype]) == 'period[D]' for dtype2 in [DatetimeTZDtype(unit='ns', tz='Asia/Tokyo'), PeriodDtype(freq='2D'), PeriodDtype(freq='H'), np.dtype('datetime64[ns]'), np.object, np.int64]: - self.assertEqual(find_common_type([dtype, dtype2]), np.object) - self.assertEqual(find_common_type([dtype2, dtype]), np.object) + assert find_common_type([dtype, dtype2]) == np.object + assert find_common_type([dtype2, dtype]) == np.object diff --git a/pandas/tests/dtypes/test_common.py b/pandas/tests/dtypes/test_common.py index 2aad1b6baaac0..0472f0599cd9b 100644 --- a/pandas/tests/dtypes/test_common.py +++ b/pandas/tests/dtypes/test_common.py @@ -30,30 +30,30 @@ def test_invalid_dtype_error(self): def test_numpy_dtype(self): for dtype in ['M8[ns]', 'm8[ns]', 'object', 'float64', 'int64']: - self.assertEqual(pandas_dtype(dtype), np.dtype(dtype)) + assert pandas_dtype(dtype) == np.dtype(dtype) def test_numpy_string_dtype(self): # do not parse freq-like string as period dtype - self.assertEqual(pandas_dtype('U'), np.dtype('U')) - self.assertEqual(pandas_dtype('S'), np.dtype('S')) + assert pandas_dtype('U') == np.dtype('U') + assert pandas_dtype('S') == np.dtype('S') def test_datetimetz_dtype(self): for dtype in ['datetime64[ns, US/Eastern]', 'datetime64[ns, Asia/Tokyo]', 'datetime64[ns, UTC]']: assert pandas_dtype(dtype) is DatetimeTZDtype(dtype) - self.assertEqual(pandas_dtype(dtype), DatetimeTZDtype(dtype)) - self.assertEqual(pandas_dtype(dtype), dtype) + assert pandas_dtype(dtype) == DatetimeTZDtype(dtype) + assert pandas_dtype(dtype) == dtype def test_categorical_dtype(self): - self.assertEqual(pandas_dtype('category'), CategoricalDtype()) + assert pandas_dtype('category') == CategoricalDtype() def test_period_dtype(self): for dtype in ['period[D]', 'period[3M]', 'period[U]', 'Period[D]', 'Period[3M]', 'Period[U]']: assert pandas_dtype(dtype) is PeriodDtype(dtype) - self.assertEqual(pandas_dtype(dtype), PeriodDtype(dtype)) - self.assertEqual(pandas_dtype(dtype), dtype) + assert pandas_dtype(dtype) == PeriodDtype(dtype) + assert pandas_dtype(dtype) == dtype dtypes = dict(datetime_tz=pandas_dtype('datetime64[ns, US/Eastern]'), diff --git a/pandas/tests/dtypes/test_concat.py b/pandas/tests/dtypes/test_concat.py index e8eb042d78f30..c0be0dc38d27f 100644 --- a/pandas/tests/dtypes/test_concat.py +++ b/pandas/tests/dtypes/test_concat.py @@ -11,7 +11,7 @@ def check_concat(self, to_concat, exp): for klass in [pd.Index, pd.Series]: to_concat_klass = [klass(c) for c in to_concat] res = _concat.get_dtype_kinds(to_concat_klass) - self.assertEqual(res, set(exp)) + assert res == set(exp) def test_get_dtype_kinds(self): to_concat = [['a'], [1, 2]] @@ -60,19 +60,19 @@ def test_get_dtype_kinds_period(self): to_concat = [pd.PeriodIndex(['2011-01'], freq='M'), pd.PeriodIndex(['2011-01'], freq='M')] res = _concat.get_dtype_kinds(to_concat) - self.assertEqual(res, set(['period[M]'])) + assert res == set(['period[M]']) to_concat = [pd.Series([pd.Period('2011-01', freq='M')]), pd.Series([pd.Period('2011-02', freq='M')])] res = _concat.get_dtype_kinds(to_concat) - self.assertEqual(res, set(['object'])) + assert res == set(['object']) to_concat = [pd.PeriodIndex(['2011-01'], freq='M'), pd.PeriodIndex(['2011-01'], freq='D')] res = _concat.get_dtype_kinds(to_concat) - self.assertEqual(res, set(['period[M]', 'period[D]'])) + assert res == set(['period[M]', 'period[D]']) to_concat = [pd.Series([pd.Period('2011-01', freq='M')]), pd.Series([pd.Period('2011-02', freq='D')])] res = _concat.get_dtype_kinds(to_concat) - self.assertEqual(res, set(['object'])) + assert res == set(['object']) diff --git a/pandas/tests/dtypes/test_dtypes.py b/pandas/tests/dtypes/test_dtypes.py index b02c846d50c89..da3120145fe38 100644 --- a/pandas/tests/dtypes/test_dtypes.py +++ b/pandas/tests/dtypes/test_dtypes.py @@ -124,10 +124,10 @@ def test_subclass(self): assert issubclass(type(a), type(b)) def test_coerce_to_dtype(self): - self.assertEqual(_coerce_to_dtype('datetime64[ns, US/Eastern]'), - DatetimeTZDtype('ns', 'US/Eastern')) - self.assertEqual(_coerce_to_dtype('datetime64[ns, Asia/Tokyo]'), - DatetimeTZDtype('ns', 'Asia/Tokyo')) + assert (_coerce_to_dtype('datetime64[ns, US/Eastern]') == + DatetimeTZDtype('ns', 'US/Eastern')) + assert (_coerce_to_dtype('datetime64[ns, Asia/Tokyo]') == + DatetimeTZDtype('ns', 'Asia/Tokyo')) def test_compat(self): assert is_datetime64tz_dtype(self.dtype) @@ -194,16 +194,14 @@ def test_dst(self): dr2 = date_range('2013-08-01', periods=3, tz='US/Eastern') s2 = Series(dr2, name='A') assert is_datetimetz(s2) - self.assertEqual(s1.dtype, s2.dtype) + assert s1.dtype == s2.dtype def test_parser(self): # pr #11245 for tz, constructor in product(('UTC', 'US/Eastern'), ('M8', 'datetime64')): - self.assertEqual( - DatetimeTZDtype('%s[ns, %s]' % (constructor, tz)), - DatetimeTZDtype('ns', tz), - ) + assert (DatetimeTZDtype('%s[ns, %s]' % (constructor, tz)) == + DatetimeTZDtype('ns', tz)) def test_empty(self): dt = DatetimeTZDtype() @@ -222,18 +220,18 @@ def test_construction(self): for s in ['period[D]', 'Period[D]', 'D']: dt = PeriodDtype(s) - self.assertEqual(dt.freq, pd.tseries.offsets.Day()) + assert dt.freq == pd.tseries.offsets.Day() assert is_period_dtype(dt) for s in ['period[3D]', 'Period[3D]', '3D']: dt = PeriodDtype(s) - self.assertEqual(dt.freq, pd.tseries.offsets.Day(3)) + assert dt.freq == pd.tseries.offsets.Day(3) assert is_period_dtype(dt) for s in ['period[26H]', 'Period[26H]', '26H', 'period[1D2H]', 'Period[1D2H]', '1D2H']: dt = PeriodDtype(s) - self.assertEqual(dt.freq, pd.tseries.offsets.Hour(26)) + assert dt.freq == pd.tseries.offsets.Hour(26) assert is_period_dtype(dt) def test_subclass(self): @@ -254,10 +252,8 @@ def test_identity(self): assert PeriodDtype('period[1S1U]') is PeriodDtype('period[1000001U]') def test_coerce_to_dtype(self): - self.assertEqual(_coerce_to_dtype('period[D]'), - PeriodDtype('period[D]')) - self.assertEqual(_coerce_to_dtype('period[3M]'), - PeriodDtype('period[3M]')) + assert _coerce_to_dtype('period[D]') == PeriodDtype('period[D]') + assert _coerce_to_dtype('period[3M]') == PeriodDtype('period[3M]') def test_compat(self): assert not is_datetime64_ns_dtype(self.dtype) @@ -354,7 +350,7 @@ def test_construction(self): for s in ['interval[int64]', 'Interval[int64]', 'int64']: i = IntervalDtype(s) - self.assertEqual(i.subtype, np.dtype('int64')) + assert i.subtype == np.dtype('int64') assert is_interval_dtype(i) def test_construction_generic(self): @@ -393,12 +389,12 @@ def test_is_dtype(self): assert not IntervalDtype.is_dtype(np.float64) def test_identity(self): - self.assertEqual(IntervalDtype('interval[int64]'), - IntervalDtype('interval[int64]')) + assert (IntervalDtype('interval[int64]') == + IntervalDtype('interval[int64]')) def test_coerce_to_dtype(self): - self.assertEqual(_coerce_to_dtype('interval[int64]'), - IntervalDtype('interval[int64]')) + assert (_coerce_to_dtype('interval[int64]') == + IntervalDtype('interval[int64]')) def test_construction_from_string(self): result = IntervalDtype('interval[int64]') diff --git a/pandas/tests/dtypes/test_inference.py b/pandas/tests/dtypes/test_inference.py index 3449d6c56167e..ec02a5a200308 100644 --- a/pandas/tests/dtypes/test_inference.py +++ b/pandas/tests/dtypes/test_inference.py @@ -233,11 +233,11 @@ def test_infer_dtype_bytes(self): # string array of bytes arr = np.array(list('abc'), dtype='S1') - self.assertEqual(lib.infer_dtype(arr), compare) + assert lib.infer_dtype(arr) == compare # object array of bytes arr = arr.astype(object) - self.assertEqual(lib.infer_dtype(arr), compare) + assert lib.infer_dtype(arr) == compare def test_isinf_scalar(self): # GH 11352 @@ -409,58 +409,58 @@ class TestTypeInference(tm.TestCase): def test_length_zero(self): result = lib.infer_dtype(np.array([], dtype='i4')) - self.assertEqual(result, 'integer') + assert result == 'integer' result = lib.infer_dtype([]) - self.assertEqual(result, 'empty') + assert result == 'empty' def test_integers(self): arr = np.array([1, 2, 3, np.int64(4), np.int32(5)], dtype='O') result = lib.infer_dtype(arr) - self.assertEqual(result, 'integer') + assert result == 'integer' arr = np.array([1, 2, 3, np.int64(4), np.int32(5), 'foo'], dtype='O') result = lib.infer_dtype(arr) - self.assertEqual(result, 'mixed-integer') + assert result == 'mixed-integer' arr = np.array([1, 2, 3, 4, 5], dtype='i4') result = lib.infer_dtype(arr) - self.assertEqual(result, 'integer') + assert result == 'integer' def test_bools(self): arr = np.array([True, False, True, True, True], dtype='O') result = lib.infer_dtype(arr) - self.assertEqual(result, 'boolean') + assert result == 'boolean' arr = np.array([np.bool_(True), np.bool_(False)], dtype='O') result = lib.infer_dtype(arr) - self.assertEqual(result, 'boolean') + assert result == 'boolean' arr = np.array([True, False, True, 'foo'], dtype='O') result = lib.infer_dtype(arr) - self.assertEqual(result, 'mixed') + assert result == 'mixed' arr = np.array([True, False, True], dtype=bool) result = lib.infer_dtype(arr) - self.assertEqual(result, 'boolean') + assert result == 'boolean' def test_floats(self): arr = np.array([1., 2., 3., np.float64(4), np.float32(5)], dtype='O') result = lib.infer_dtype(arr) - self.assertEqual(result, 'floating') + assert result == 'floating' arr = np.array([1, 2, 3, np.float64(4), np.float32(5), 'foo'], dtype='O') result = lib.infer_dtype(arr) - self.assertEqual(result, 'mixed-integer') + assert result == 'mixed-integer' arr = np.array([1, 2, 3, 4, 5], dtype='f4') result = lib.infer_dtype(arr) - self.assertEqual(result, 'floating') + assert result == 'floating' arr = np.array([1, 2, 3, 4, 5], dtype='f8') result = lib.infer_dtype(arr) - self.assertEqual(result, 'floating') + assert result == 'floating' def test_string(self): pass @@ -472,198 +472,198 @@ def test_datetime(self): dates = [datetime(2012, 1, x) for x in range(1, 20)] index = Index(dates) - self.assertEqual(index.inferred_type, 'datetime64') + assert index.inferred_type == 'datetime64' def test_infer_dtype_datetime(self): arr = np.array([Timestamp('2011-01-01'), Timestamp('2011-01-02')]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' arr = np.array([np.datetime64('2011-01-01'), np.datetime64('2011-01-01')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'datetime64') + assert lib.infer_dtype(arr) == 'datetime64' arr = np.array([datetime(2011, 1, 1), datetime(2012, 2, 1)]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' # starts with nan for n in [pd.NaT, np.nan]: arr = np.array([n, pd.Timestamp('2011-01-02')]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' arr = np.array([n, np.datetime64('2011-01-02')]) - self.assertEqual(lib.infer_dtype(arr), 'datetime64') + assert lib.infer_dtype(arr) == 'datetime64' arr = np.array([n, datetime(2011, 1, 1)]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' arr = np.array([n, pd.Timestamp('2011-01-02'), n]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' arr = np.array([n, np.datetime64('2011-01-02'), n]) - self.assertEqual(lib.infer_dtype(arr), 'datetime64') + assert lib.infer_dtype(arr) == 'datetime64' arr = np.array([n, datetime(2011, 1, 1), n]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' # different type of nat arr = np.array([np.timedelta64('nat'), np.datetime64('2011-01-02')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' arr = np.array([np.datetime64('2011-01-02'), np.timedelta64('nat')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' # mixed datetime arr = np.array([datetime(2011, 1, 1), pd.Timestamp('2011-01-02')]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' # should be datetime? arr = np.array([np.datetime64('2011-01-01'), pd.Timestamp('2011-01-02')]) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' arr = np.array([pd.Timestamp('2011-01-02'), np.datetime64('2011-01-01')]) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' arr = np.array([np.nan, pd.Timestamp('2011-01-02'), 1]) - self.assertEqual(lib.infer_dtype(arr), 'mixed-integer') + assert lib.infer_dtype(arr) == 'mixed-integer' arr = np.array([np.nan, pd.Timestamp('2011-01-02'), 1.1]) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' arr = np.array([np.nan, '2011-01-01', pd.Timestamp('2011-01-02')]) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' def test_infer_dtype_timedelta(self): arr = np.array([pd.Timedelta('1 days'), pd.Timedelta('2 days')]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' arr = np.array([np.timedelta64(1, 'D'), np.timedelta64(2, 'D')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' arr = np.array([timedelta(1), timedelta(2)]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' # starts with nan for n in [pd.NaT, np.nan]: arr = np.array([n, Timedelta('1 days')]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' arr = np.array([n, np.timedelta64(1, 'D')]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' arr = np.array([n, timedelta(1)]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' arr = np.array([n, pd.Timedelta('1 days'), n]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' arr = np.array([n, np.timedelta64(1, 'D'), n]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' arr = np.array([n, timedelta(1), n]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' # different type of nat arr = np.array([np.datetime64('nat'), np.timedelta64(1, 'D')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' arr = np.array([np.timedelta64(1, 'D'), np.datetime64('nat')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' def test_infer_dtype_period(self): # GH 13664 arr = np.array([pd.Period('2011-01', freq='D'), pd.Period('2011-02', freq='D')]) - self.assertEqual(lib.infer_dtype(arr), 'period') + assert lib.infer_dtype(arr) == 'period' arr = np.array([pd.Period('2011-01', freq='D'), pd.Period('2011-02', freq='M')]) - self.assertEqual(lib.infer_dtype(arr), 'period') + assert lib.infer_dtype(arr) == 'period' # starts with nan for n in [pd.NaT, np.nan]: arr = np.array([n, pd.Period('2011-01', freq='D')]) - self.assertEqual(lib.infer_dtype(arr), 'period') + assert lib.infer_dtype(arr) == 'period' arr = np.array([n, pd.Period('2011-01', freq='D'), n]) - self.assertEqual(lib.infer_dtype(arr), 'period') + assert lib.infer_dtype(arr) == 'period' # different type of nat arr = np.array([np.datetime64('nat'), pd.Period('2011-01', freq='M')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' arr = np.array([pd.Period('2011-01', freq='M'), np.datetime64('nat')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' def test_infer_dtype_all_nan_nat_like(self): arr = np.array([np.nan, np.nan]) - self.assertEqual(lib.infer_dtype(arr), 'floating') + assert lib.infer_dtype(arr) == 'floating' # nan and None mix are result in mixed arr = np.array([np.nan, np.nan, None]) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' arr = np.array([None, np.nan, np.nan]) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' # pd.NaT arr = np.array([pd.NaT]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' arr = np.array([pd.NaT, np.nan]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' arr = np.array([np.nan, pd.NaT]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' arr = np.array([np.nan, pd.NaT, np.nan]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' arr = np.array([None, pd.NaT, None]) - self.assertEqual(lib.infer_dtype(arr), 'datetime') + assert lib.infer_dtype(arr) == 'datetime' # np.datetime64(nat) arr = np.array([np.datetime64('nat')]) - self.assertEqual(lib.infer_dtype(arr), 'datetime64') + assert lib.infer_dtype(arr) == 'datetime64' for n in [np.nan, pd.NaT, None]: arr = np.array([n, np.datetime64('nat'), n]) - self.assertEqual(lib.infer_dtype(arr), 'datetime64') + assert lib.infer_dtype(arr) == 'datetime64' arr = np.array([pd.NaT, n, np.datetime64('nat'), n]) - self.assertEqual(lib.infer_dtype(arr), 'datetime64') + assert lib.infer_dtype(arr) == 'datetime64' arr = np.array([np.timedelta64('nat')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' for n in [np.nan, pd.NaT, None]: arr = np.array([n, np.timedelta64('nat'), n]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' arr = np.array([pd.NaT, n, np.timedelta64('nat'), n]) - self.assertEqual(lib.infer_dtype(arr), 'timedelta') + assert lib.infer_dtype(arr) == 'timedelta' # datetime / timedelta mixed arr = np.array([pd.NaT, np.datetime64('nat'), np.timedelta64('nat'), np.nan]) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' arr = np.array([np.timedelta64('nat'), np.datetime64('nat')], dtype=object) - self.assertEqual(lib.infer_dtype(arr), 'mixed') + assert lib.infer_dtype(arr) == 'mixed' def test_is_datetimelike_array_all_nan_nat_like(self): arr = np.array([np.nan, pd.NaT, np.datetime64('nat')]) @@ -706,7 +706,7 @@ def test_date(self): dates = [date(2012, 1, x) for x in range(1, 20)] index = Index(dates) - self.assertEqual(index.inferred_type, 'date') + assert index.inferred_type == 'date' def test_to_object_array_tuples(self): r = (5, 6) @@ -729,7 +729,7 @@ def test_object(self): # cannot infer more than this as only a single element arr = np.array([None], dtype='O') result = lib.infer_dtype(arr) - self.assertEqual(result, 'mixed') + assert result == 'mixed' def test_to_object_array_width(self): # see gh-13320 @@ -761,17 +761,17 @@ def test_categorical(self): from pandas import Categorical, Series arr = Categorical(list('abc')) result = lib.infer_dtype(arr) - self.assertEqual(result, 'categorical') + assert result == 'categorical' result = lib.infer_dtype(Series(arr)) - self.assertEqual(result, 'categorical') + assert result == 'categorical' arr = Categorical(list('abc'), categories=['cegfab'], ordered=True) result = lib.infer_dtype(arr) - self.assertEqual(result, 'categorical') + assert result == 'categorical' result = lib.infer_dtype(Series(arr)) - self.assertEqual(result, 'categorical') + assert result == 'categorical' class TestNumberScalar(tm.TestCase): diff --git a/pandas/tests/frame/test_alter_axes.py b/pandas/tests/frame/test_alter_axes.py index 303c8cb6e858a..34ab0b72f9b9a 100644 --- a/pandas/tests/frame/test_alter_axes.py +++ b/pandas/tests/frame/test_alter_axes.py @@ -69,7 +69,7 @@ def test_set_index2(self): assert_frame_equal(result, expected) assert_frame_equal(result_nodrop, expected_nodrop) - self.assertEqual(result.index.name, index.name) + assert result.index.name == index.name # inplace, single df2 = df.copy() @@ -97,7 +97,7 @@ def test_set_index2(self): assert_frame_equal(result, expected) assert_frame_equal(result_nodrop, expected_nodrop) - self.assertEqual(result.index.names, index.names) + assert result.index.names == index.names # inplace df2 = df.copy() @@ -127,7 +127,7 @@ def test_set_index2(self): # Series result = df.set_index(df.C) - self.assertEqual(result.index.name, 'C') + assert result.index.name == 'C' def test_set_index_nonuniq(self): df = DataFrame({'A': ['foo', 'foo', 'foo', 'bar', 'bar'], @@ -174,7 +174,7 @@ def test_construction_with_categorical_index(self): idf = df.set_index('B') str(idf) tm.assert_index_equal(idf.index, ci, check_names=False) - self.assertEqual(idf.index.name, 'B') + assert idf.index.name == 'B' # from a CategoricalIndex df = DataFrame({'A': np.random.randn(10), @@ -182,17 +182,17 @@ def test_construction_with_categorical_index(self): idf = df.set_index('B') str(idf) tm.assert_index_equal(idf.index, ci, check_names=False) - self.assertEqual(idf.index.name, 'B') + assert idf.index.name == 'B' idf = df.set_index('B').reset_index().set_index('B') str(idf) tm.assert_index_equal(idf.index, ci, check_names=False) - self.assertEqual(idf.index.name, 'B') + assert idf.index.name == 'B' new_df = idf.reset_index() new_df.index = df.B tm.assert_index_equal(new_df.index, ci, check_names=False) - self.assertEqual(idf.index.name, 'B') + assert idf.index.name == 'B' def test_set_index_cast_datetimeindex(self): df = DataFrame({'A': [datetime(2000, 1, 1) + timedelta(i) @@ -224,7 +224,7 @@ def test_set_index_cast_datetimeindex(self): df['B'] = i result = df['B'] assert_series_equal(result, expected, check_names=False) - self.assertEqual(result.name, 'B') + assert result.name == 'B' # keep the timezone result = i.to_series(keep_tz=True) @@ -241,7 +241,7 @@ def test_set_index_cast_datetimeindex(self): df['D'] = i.to_pydatetime() result = df['D'] assert_series_equal(result, expected, check_names=False) - self.assertEqual(result.name, 'D') + assert result.name == 'D' # GH 6785 # set the index manually @@ -279,9 +279,9 @@ def test_set_index_timezone(self): i = pd.to_datetime(["2014-01-01 10:10:10"], utc=True).tz_convert('Europe/Rome') df = DataFrame({'i': i}) - self.assertEqual(df.set_index(i).index[0].hour, 11) - self.assertEqual(pd.DatetimeIndex(pd.Series(df.i))[0].hour, 11) - self.assertEqual(df.set_index(df.i).index[0].hour, 11) + assert df.set_index(i).index[0].hour == 11 + assert pd.DatetimeIndex(pd.Series(df.i))[0].hour == 11 + assert df.set_index(df.i).index[0].hour == 11 def test_set_index_dst(self): di = pd.date_range('2006-10-29 00:00:00', periods=3, @@ -365,7 +365,7 @@ def test_dti_set_index_reindex(self): # TODO: unused? result = df.set_index(new_index) # noqa - self.assertEqual(new_index.freq, index.freq) + assert new_index.freq == index.freq # Renaming @@ -416,7 +416,7 @@ def test_rename(self): renamed = renamer.rename(index={'foo': 'bar', 'bar': 'foo'}) tm.assert_index_equal(renamed.index, pd.Index(['bar', 'foo'], name='name')) - self.assertEqual(renamed.index.name, renamer.index.name) + assert renamed.index.name == renamer.index.name def test_rename_multiindex(self): @@ -440,8 +440,8 @@ def test_rename_multiindex(self): names=['fizz', 'buzz']) tm.assert_index_equal(renamed.index, new_index) tm.assert_index_equal(renamed.columns, new_columns) - self.assertEqual(renamed.index.names, df.index.names) - self.assertEqual(renamed.columns.names, df.columns.names) + assert renamed.index.names == df.index.names + assert renamed.columns.names == df.columns.names # # with specifying a level (GH13766) @@ -609,7 +609,7 @@ def test_reset_index(self): # preserve column names self.frame.columns.name = 'columns' resetted = self.frame.reset_index() - self.assertEqual(resetted.columns.name, 'columns') + assert resetted.columns.name == 'columns' # only remove certain columns frame = self.frame.reset_index().set_index(['index', 'A', 'B']) @@ -649,10 +649,10 @@ def test_reset_index_right_dtype(self): df = DataFrame(s1) resetted = s1.reset_index() - self.assertEqual(resetted['time'].dtype, np.float64) + assert resetted['time'].dtype == np.float64 resetted = df.reset_index() - self.assertEqual(resetted['time'].dtype, np.float64) + assert resetted['time'].dtype == np.float64 def test_reset_index_multiindex_col(self): vals = np.random.randn(3, 3).astype(object) @@ -752,7 +752,7 @@ def test_set_index_names(self): df = pd.util.testing.makeDataFrame() df.index.name = 'name' - self.assertEqual(df.set_index(df.index).index.names, ['name']) + assert df.set_index(df.index).index.names == ['name'] mi = MultiIndex.from_arrays(df[['A', 'B']].T.values, names=['A', 'B']) mi2 = MultiIndex.from_arrays(df[['A', 'B', 'A', 'B']].T.values, @@ -760,7 +760,7 @@ def test_set_index_names(self): df = df.set_index(['A', 'B']) - self.assertEqual(df.set_index(df.index).index.names, ['A', 'B']) + assert df.set_index(df.index).index.names == ['A', 'B'] # Check that set_index isn't converting a MultiIndex into an Index assert isinstance(df.set_index(df.index).index, MultiIndex) diff --git a/pandas/tests/frame/test_analytics.py b/pandas/tests/frame/test_analytics.py index 8f46f055343d4..89ee096b4434e 100644 --- a/pandas/tests/frame/test_analytics.py +++ b/pandas/tests/frame/test_analytics.py @@ -83,8 +83,8 @@ def test_corr_nooverlap(self): rs = df.corr(meth) assert isnull(rs.loc['A', 'B']) assert isnull(rs.loc['B', 'A']) - self.assertEqual(rs.loc['A', 'A'], 1) - self.assertEqual(rs.loc['B', 'B'], 1) + assert rs.loc['A', 'A'] == 1 + assert rs.loc['B', 'B'] == 1 assert isnull(rs.loc['C', 'C']) def test_corr_constant(self): @@ -335,8 +335,8 @@ def test_describe_datetime_columns(self): '50%', '75%', 'max']) expected.columns = exp_columns tm.assert_frame_equal(result, expected) - self.assertEqual(result.columns.freq, 'MS') - self.assertEqual(result.columns.tz, expected.columns.tz) + assert result.columns.freq == 'MS' + assert result.columns.tz == expected.columns.tz def test_describe_timedelta_values(self): # GH 6145 @@ -373,7 +373,7 @@ def test_describe_timedelta_values(self): "50% 3 days 00:00:00 0 days 03:00:00\n" "75% 4 days 00:00:00 0 days 04:00:00\n" "max 5 days 00:00:00 0 days 05:00:00") - self.assertEqual(repr(res), exp_repr) + assert repr(res) == exp_repr def test_reduce_mixed_frame(self): # GH 6806 @@ -462,7 +462,7 @@ def test_stat_operators_attempt_obj_array(self): for df in [df1, df2]: for meth in methods: - self.assertEqual(df.values.dtype, np.object_) + assert df.values.dtype == np.object_ result = getattr(df, meth)(1) expected = getattr(df.astype('f8'), meth)(1) @@ -508,7 +508,7 @@ def test_cummin(self): # fix issue cummin_xs = self.tsframe.cummin(axis=1) - self.assertEqual(np.shape(cummin_xs), np.shape(self.tsframe)) + assert np.shape(cummin_xs) == np.shape(self.tsframe) def test_cummax(self): self.tsframe.loc[5:10, 0] = nan @@ -531,7 +531,7 @@ def test_cummax(self): # fix issue cummax_xs = self.tsframe.cummax(axis=1) - self.assertEqual(np.shape(cummax_xs), np.shape(self.tsframe)) + assert np.shape(cummax_xs) == np.shape(self.tsframe) def test_max(self): self._check_stat_op('max', np.max, check_dates=True) @@ -629,7 +629,7 @@ def test_cumsum(self): # fix issue cumsum_xs = self.tsframe.cumsum(axis=1) - self.assertEqual(np.shape(cumsum_xs), np.shape(self.tsframe)) + assert np.shape(cumsum_xs) == np.shape(self.tsframe) def test_cumprod(self): self.tsframe.loc[5:10, 0] = nan @@ -648,7 +648,7 @@ def test_cumprod(self): # fix issue cumprod_xs = self.tsframe.cumprod(axis=1) - self.assertEqual(np.shape(cumprod_xs), np.shape(self.tsframe)) + assert np.shape(cumprod_xs) == np.shape(self.tsframe) # ints df = self.tsframe.fillna(0).astype(int) @@ -711,7 +711,7 @@ def alt(x): kurt2 = df.kurt(level=0).xs('bar') tm.assert_series_equal(kurt, kurt2, check_names=False) assert kurt.name is None - self.assertEqual(kurt2.name, 'bar') + assert kurt2.name == 'bar' def _check_stat_op(self, name, alternative, frame=None, has_skipna=True, has_numeric_only=False, check_dtype=True, @@ -771,8 +771,8 @@ def wrapper(x): # check dtypes if check_dtype: lcd_dtype = frame.values.dtype - self.assertEqual(lcd_dtype, result0.dtype) - self.assertEqual(lcd_dtype, result1.dtype) + assert lcd_dtype == result0.dtype + assert lcd_dtype == result1.dtype # result = f(axis=1) # comp = frame.apply(alternative, axis=1).reindex(result.index) @@ -860,16 +860,16 @@ def test_operators_timedelta64(self): # min result = diffs.min() - self.assertEqual(result[0], diffs.loc[0, 'A']) - self.assertEqual(result[1], diffs.loc[0, 'B']) + assert result[0] == diffs.loc[0, 'A'] + assert result[1] == diffs.loc[0, 'B'] result = diffs.min(axis=1) assert (result == diffs.loc[0, 'B']).all() # max result = diffs.max() - self.assertEqual(result[0], diffs.loc[2, 'A']) - self.assertEqual(result[1], diffs.loc[2, 'B']) + assert result[0] == diffs.loc[2, 'A'] + assert result[1] == diffs.loc[2, 'B'] result = diffs.max(axis=1) assert (result == diffs['A']).all() @@ -920,7 +920,7 @@ def test_operators_timedelta64(self): df = DataFrame({'time': date_range('20130102', periods=5), 'time2': date_range('20130105', periods=5)}) df['off1'] = df['time2'] - df['time'] - self.assertEqual(df['off1'].dtype, 'timedelta64[ns]') + assert df['off1'].dtype == 'timedelta64[ns]' df['off2'] = df['time'] - df['time2'] df._consolidate_inplace() @@ -932,8 +932,8 @@ def test_sum_corner(self): axis1 = self.empty.sum(1) assert isinstance(axis0, Series) assert isinstance(axis1, Series) - self.assertEqual(len(axis0), 0) - self.assertEqual(len(axis1), 0) + assert len(axis0) == 0 + assert len(axis1) == 0 def test_sum_object(self): values = self.frame.values.astype(int) @@ -963,7 +963,7 @@ def test_mean_corner(self): # take mean of boolean column self.frame['bool'] = self.frame['A'] > 0 means = self.frame.mean(0) - self.assertEqual(means['bool'], self.frame['bool'].values.mean()) + assert means['bool'] == self.frame['bool'].values.mean() def test_stats_mixed_type(self): # don't blow up @@ -999,7 +999,7 @@ def test_cumsum_corner(self): def test_sum_bools(self): df = DataFrame(index=lrange(1), columns=lrange(10)) bools = isnull(df) - self.assertEqual(bools.sum(axis=1)[0], 10) + assert bools.sum(axis=1)[0] == 10 # Index of max / min @@ -1307,7 +1307,7 @@ def test_drop_duplicates(self): result = df.drop_duplicates('AAA', keep=False) expected = df.loc[[]] tm.assert_frame_equal(result, expected) - self.assertEqual(len(result), 0) + assert len(result) == 0 # multi column expected = df.loc[[0, 1, 2, 3]] @@ -1380,7 +1380,7 @@ def test_drop_duplicates(self): df = df.append([[1] + [0] * 8], ignore_index=True) for keep in ['first', 'last', False]: - self.assertEqual(df.duplicated(keep=keep).sum(), 0) + assert df.duplicated(keep=keep).sum() == 0 def test_drop_duplicates_for_take_all(self): df = DataFrame({'AAA': ['foo', 'bar', 'baz', 'bar', @@ -1435,7 +1435,7 @@ def test_drop_duplicates_tuple(self): result = df.drop_duplicates(('AA', 'AB'), keep=False) expected = df.loc[[]] # empty df - self.assertEqual(len(result), 0) + assert len(result) == 0 tm.assert_frame_equal(result, expected) # multi column @@ -1464,7 +1464,7 @@ def test_drop_duplicates_NA(self): result = df.drop_duplicates('A', keep=False) expected = df.loc[[]] # empty df tm.assert_frame_equal(result, expected) - self.assertEqual(len(result), 0) + assert len(result) == 0 # multi column result = df.drop_duplicates(['A', 'B']) @@ -1499,7 +1499,7 @@ def test_drop_duplicates_NA(self): result = df.drop_duplicates('C', keep=False) expected = df.loc[[]] # empty df tm.assert_frame_equal(result, expected) - self.assertEqual(len(result), 0) + assert len(result) == 0 # multi column result = df.drop_duplicates(['C', 'B']) @@ -1574,7 +1574,7 @@ def test_drop_duplicates_inplace(self): expected = orig.loc[[]] result = df tm.assert_frame_equal(result, expected) - self.assertEqual(len(df), 0) + assert len(df) == 0 # multi column df = orig.copy() @@ -1840,11 +1840,11 @@ def test_clip_against_series(self): result = clipped_df.loc[lb_mask, i] tm.assert_series_equal(result, lb[lb_mask], check_names=False) - self.assertEqual(result.name, i) + assert result.name == i result = clipped_df.loc[ub_mask, i] tm.assert_series_equal(result, ub[ub_mask], check_names=False) - self.assertEqual(result.name, i) + assert result.name == i tm.assert_series_equal(clipped_df.loc[mask, i], df.loc[mask, i]) diff --git a/pandas/tests/frame/test_api.py b/pandas/tests/frame/test_api.py index 6b1e9d66d2071..d2a1e32f015b2 100644 --- a/pandas/tests/frame/test_api.py +++ b/pandas/tests/frame/test_api.py @@ -41,16 +41,16 @@ def test_copy_index_name_checking(self): def test_getitem_pop_assign_name(self): s = self.frame['A'] - self.assertEqual(s.name, 'A') + assert s.name == 'A' s = self.frame.pop('A') - self.assertEqual(s.name, 'A') + assert s.name == 'A' s = self.frame.loc[:, 'B'] - self.assertEqual(s.name, 'B') + assert s.name == 'B' s2 = s.loc[:] - self.assertEqual(s2.name, 'B') + assert s2.name == 'B' def test_get_value(self): for idx in self.frame.index: @@ -75,17 +75,17 @@ class TestDataFrameMisc(tm.TestCase, SharedWithSparse, TestData): def test_get_axis(self): f = self.frame - self.assertEqual(f._get_axis_number(0), 0) - self.assertEqual(f._get_axis_number(1), 1) - self.assertEqual(f._get_axis_number('index'), 0) - self.assertEqual(f._get_axis_number('rows'), 0) - self.assertEqual(f._get_axis_number('columns'), 1) - - self.assertEqual(f._get_axis_name(0), 'index') - self.assertEqual(f._get_axis_name(1), 'columns') - self.assertEqual(f._get_axis_name('index'), 'index') - self.assertEqual(f._get_axis_name('rows'), 'index') - self.assertEqual(f._get_axis_name('columns'), 'columns') + assert f._get_axis_number(0) == 0 + assert f._get_axis_number(1) == 1 + assert f._get_axis_number('index') == 0 + assert f._get_axis_number('rows') == 0 + assert f._get_axis_number('columns') == 1 + + assert f._get_axis_name(0) == 'index' + assert f._get_axis_name(1) == 'columns' + assert f._get_axis_name('index') == 'index' + assert f._get_axis_name('rows') == 'index' + assert f._get_axis_name('columns') == 'columns' assert f._get_axis(0) is f.index assert f._get_axis(1) is f.columns @@ -154,7 +154,7 @@ def test_nonzero(self): def test_iteritems(self): df = DataFrame([[1, 2, 3], [4, 5, 6]], columns=['a', 'a', 'b']) for k, v in compat.iteritems(df): - self.assertEqual(type(v), Series) + assert type(v) == Series def test_iter(self): assert tm.equalContents(list(self.frame), self.frame.columns) @@ -183,27 +183,25 @@ def test_itertuples(self): df = DataFrame(data={"a": [1, 2, 3], "b": [4, 5, 6]}) dfaa = df[['a', 'a']] - self.assertEqual(list(dfaa.itertuples()), [ - (0, 1, 1), (1, 2, 2), (2, 3, 3)]) - self.assertEqual(repr(list(df.itertuples(name=None))), - '[(0, 1, 4), (1, 2, 5), (2, 3, 6)]') + assert (list(dfaa.itertuples()) == + [(0, 1, 1), (1, 2, 2), (2, 3, 3)]) + assert (repr(list(df.itertuples(name=None))) == + '[(0, 1, 4), (1, 2, 5), (2, 3, 6)]') tup = next(df.itertuples(name='TestName')) - # no support for field renaming in Python 2.6, regular tuples are - # returned if sys.version >= LooseVersion('2.7'): - self.assertEqual(tup._fields, ('Index', 'a', 'b')) - self.assertEqual((tup.Index, tup.a, tup.b), tup) - self.assertEqual(type(tup).__name__, 'TestName') + assert tup._fields == ('Index', 'a', 'b') + assert (tup.Index, tup.a, tup.b) == tup + assert type(tup).__name__ == 'TestName' df.columns = ['def', 'return'] tup2 = next(df.itertuples(name='TestName')) - self.assertEqual(tup2, (0, 1, 4)) + assert tup2 == (0, 1, 4) if sys.version >= LooseVersion('2.7'): - self.assertEqual(tup2._fields, ('Index', '_1', '_2')) + assert tup2._fields == ('Index', '_1', '_2') df3 = DataFrame(dict(('f' + str(i), [i]) for i in range(1024))) # will raise SyntaxError if trying to create namedtuple @@ -212,7 +210,7 @@ def test_itertuples(self): assert isinstance(tup3, tuple) def test_len(self): - self.assertEqual(len(self.frame), len(self.frame.index)) + assert len(self.frame) == len(self.frame.index) def test_as_matrix(self): frame = self.frame @@ -225,15 +223,15 @@ def test_as_matrix(self): if np.isnan(value): assert np.isnan(frame[col][i]) else: - self.assertEqual(value, frame[col][i]) + assert value == frame[col][i] # mixed type mat = self.mixed_frame.as_matrix(['foo', 'A']) - self.assertEqual(mat[0, 0], 'bar') + assert mat[0, 0] == 'bar' df = DataFrame({'real': [1, 2, 3], 'complex': [1j, 2j, 3j]}) mat = df.as_matrix() - self.assertEqual(mat[0, 0], 1j) + assert mat[0, 0] == 1j # single block corner case mat = self.frame.as_matrix(['A', 'B']) @@ -262,7 +260,7 @@ def test_transpose(self): if np.isnan(value): assert np.isnan(frame[col][idx]) else: - self.assertEqual(value, frame[col][idx]) + assert value == frame[col][idx] # mixed type index, data = tm.getMixedTypeDict() @@ -270,7 +268,7 @@ def test_transpose(self): mixed_T = mixed.T for col, s in compat.iteritems(mixed_T): - self.assertEqual(s.dtype, np.object_) + assert s.dtype == np.object_ def test_transpose_get_view(self): dft = self.frame.T @@ -299,23 +297,23 @@ def test_axis_aliases(self): def test_more_asMatrix(self): values = self.mixed_frame.as_matrix() - self.assertEqual(values.shape[1], len(self.mixed_frame.columns)) + assert values.shape[1] == len(self.mixed_frame.columns) def test_repr_with_mi_nat(self): df = DataFrame({'X': [1, 2]}, index=[[pd.NaT, pd.Timestamp('20130101')], ['a', 'b']]) res = repr(df) exp = ' X\nNaT a 1\n2013-01-01 b 2' - self.assertEqual(res, exp) + assert res == exp def test_iteritems_names(self): for k, v in compat.iteritems(self.mixed_frame): - self.assertEqual(v.name, k) + assert v.name == k def test_series_put_names(self): series = self.mixed_frame._series for k, v in compat.iteritems(series): - self.assertEqual(v.name, k) + assert v.name == k def test_empty_nonzero(self): df = DataFrame([1, 2, 3]) diff --git a/pandas/tests/frame/test_apply.py b/pandas/tests/frame/test_apply.py index 0bccca5cecb27..5febe8c62abe8 100644 --- a/pandas/tests/frame/test_apply.py +++ b/pandas/tests/frame/test_apply.py @@ -97,7 +97,7 @@ def test_apply_empty(self): [], index=pd.Index([], dtype=object))) # Ensure that x.append hasn't been called - self.assertEqual(x, []) + assert x == [] def test_apply_standard_nonunique(self): df = DataFrame( @@ -150,7 +150,7 @@ def test_apply_raw(self): def test_apply_axis1(self): d = self.frame.index[0] tapplied = self.frame.apply(np.mean, axis=1) - self.assertEqual(tapplied[d], np.mean(self.frame.xs(d))) + assert tapplied[d] == np.mean(self.frame.xs(d)) def test_apply_ignore_failures(self): result = self.mixed_frame._apply_standard(np.mean, 0, @@ -284,12 +284,11 @@ def transform2(row): return row try: - transformed = data.apply(transform, axis=1) # noqa + data.apply(transform, axis=1) except AttributeError as e: - self.assertEqual(len(e.args), 2) - self.assertEqual(e.args[1], 'occurred at index 4') - self.assertEqual( - e.args[0], "'float' object has no attribute 'startswith'") + assert len(e.args) == 2 + assert e.args[1] == 'occurred at index 4' + assert e.args[0] == "'float' object has no attribute 'startswith'" def test_apply_bug(self): @@ -383,23 +382,23 @@ def test_apply_dict(self): def test_applymap(self): applied = self.frame.applymap(lambda x: x * 2) - assert_frame_equal(applied, self.frame * 2) - result = self.frame.applymap(type) + tm.assert_frame_equal(applied, self.frame * 2) + self.frame.applymap(type) - # GH #465, function returning tuples + # gh-465: function returning tuples result = self.frame.applymap(lambda x: (x, x)) assert isinstance(result['A'][0], tuple) - # GH 2909, object conversion to float in constructor? + # gh-2909: object conversion to float in constructor? df = DataFrame(data=[1, 'a']) result = df.applymap(lambda x: x) - self.assertEqual(result.dtypes[0], object) + assert result.dtypes[0] == object df = DataFrame(data=[1., 'a']) result = df.applymap(lambda x: x) - self.assertEqual(result.dtypes[0], object) + assert result.dtypes[0] == object - # GH2786 + # see gh-2786 df = DataFrame(np.random.random((3, 4))) df2 = df.copy() cols = ['a', 'a', 'a', 'a'] @@ -408,16 +407,16 @@ def test_applymap(self): expected = df2.applymap(str) expected.columns = cols result = df.applymap(str) - assert_frame_equal(result, expected) + tm.assert_frame_equal(result, expected) # datetime/timedelta df['datetime'] = Timestamp('20130101') df['timedelta'] = pd.Timedelta('1 min') result = df.applymap(str) for f in ['datetime', 'timedelta']: - self.assertEqual(result.loc[0, f], str(df.loc[0, f])) + assert result.loc[0, f] == str(df.loc[0, f]) - # GH 8222 + # see gh-8222 empty_frames = [pd.DataFrame(), pd.DataFrame(columns=list('ABC')), pd.DataFrame(index=list('ABC')), diff --git a/pandas/tests/frame/test_axis_select_reindex.py b/pandas/tests/frame/test_axis_select_reindex.py index 2c285c6261415..a563b678a3786 100644 --- a/pandas/tests/frame/test_axis_select_reindex.py +++ b/pandas/tests/frame/test_axis_select_reindex.py @@ -37,9 +37,9 @@ def test_drop_names(self): df_inplace_b.drop('b', inplace=True) df_inplace_e.drop('e', axis=1, inplace=True) for obj in (df_dropped_b, df_dropped_e, df_inplace_b, df_inplace_e): - self.assertEqual(obj.index.name, 'first') - self.assertEqual(obj.columns.name, 'second') - self.assertEqual(list(df.columns), ['d', 'e', 'f']) + assert obj.index.name == 'first' + assert obj.columns.name == 'second' + assert list(df.columns) == ['d', 'e', 'f'] pytest.raises(ValueError, df.drop, ['g']) pytest.raises(ValueError, df.drop, ['g'], 1) @@ -174,14 +174,14 @@ def test_reindex(self): if np.isnan(val): assert np.isnan(self.frame[col][idx]) else: - self.assertEqual(val, self.frame[col][idx]) + assert val == self.frame[col][idx] else: assert np.isnan(val) for col, series in compat.iteritems(newFrame): assert tm.equalContents(series.index, newFrame.index) emptyFrame = self.frame.reindex(Index([])) - self.assertEqual(len(emptyFrame.index), 0) + assert len(emptyFrame.index) == 0 # Cython code should be unit-tested directly nonContigFrame = self.frame.reindex(self.ts1.index[::2]) @@ -192,7 +192,7 @@ def test_reindex(self): if np.isnan(val): assert np.isnan(self.frame[col][idx]) else: - self.assertEqual(val, self.frame[col][idx]) + assert val == self.frame[col][idx] else: assert np.isnan(val) @@ -208,13 +208,13 @@ def test_reindex(self): # length zero newFrame = self.frame.reindex([]) assert newFrame.empty - self.assertEqual(len(newFrame.columns), len(self.frame.columns)) + assert len(newFrame.columns) == len(self.frame.columns) # length zero with columns reindexed with non-empty index newFrame = self.frame.reindex([]) newFrame = newFrame.reindex(self.frame.index) - self.assertEqual(len(newFrame.index), len(self.frame.index)) - self.assertEqual(len(newFrame.columns), len(self.frame.columns)) + assert len(newFrame.index) == len(self.frame.index) + assert len(newFrame.columns) == len(self.frame.columns) # pass non-Index newFrame = self.frame.reindex(list(self.ts1.index)) @@ -255,27 +255,27 @@ def test_reindex_name_remains(self): i = Series(np.arange(10), name='iname') df = df.reindex(i) - self.assertEqual(df.index.name, 'iname') + assert df.index.name == 'iname' df = df.reindex(Index(np.arange(10), name='tmpname')) - self.assertEqual(df.index.name, 'tmpname') + assert df.index.name == 'tmpname' s = Series(random.rand(10)) df = DataFrame(s.T, index=np.arange(len(s))) i = Series(np.arange(10), name='iname') df = df.reindex(columns=i) - self.assertEqual(df.columns.name, 'iname') + assert df.columns.name == 'iname' def test_reindex_int(self): smaller = self.intframe.reindex(self.intframe.index[::2]) - self.assertEqual(smaller['A'].dtype, np.int64) + assert smaller['A'].dtype == np.int64 bigger = smaller.reindex(self.intframe.index) - self.assertEqual(bigger['A'].dtype, np.float64) + assert bigger['A'].dtype == np.float64 smaller = self.intframe.reindex(columns=['A', 'B']) - self.assertEqual(smaller['A'].dtype, np.int64) + assert smaller['A'].dtype == np.int64 def test_reindex_like(self): other = self.frame.reindex(index=self.frame.index[:10], @@ -346,8 +346,8 @@ def test_reindex_axes(self): both_freq = df.reindex(index=time_freq, columns=some_cols).index.freq seq_freq = df.reindex(index=time_freq).reindex( columns=some_cols).index.freq - self.assertEqual(index_freq, both_freq) - self.assertEqual(index_freq, seq_freq) + assert index_freq == both_freq + assert index_freq == seq_freq def test_reindex_fill_value(self): df = DataFrame(np.random.randn(10, 4)) @@ -732,7 +732,7 @@ def test_filter_regex_search(self): # regex filtered = fcopy.filter(regex='[A]+') - self.assertEqual(len(filtered.columns), 2) + assert len(filtered.columns) == 2 assert 'AA' in filtered # doesn't have to be at beginning @@ -845,11 +845,11 @@ def test_reindex_boolean(self): columns=[0, 2]) reindexed = frame.reindex(np.arange(10)) - self.assertEqual(reindexed.values.dtype, np.object_) + assert reindexed.values.dtype == np.object_ assert isnull(reindexed[0][1]) reindexed = frame.reindex(columns=lrange(3)) - self.assertEqual(reindexed.values.dtype, np.object_) + assert reindexed.values.dtype == np.object_ assert isnull(reindexed[1]).all() def test_reindex_objects(self): @@ -867,7 +867,7 @@ def test_reindex_corner(self): # ints are weird smaller = self.intframe.reindex(columns=['A', 'B', 'E']) - self.assertEqual(smaller['E'].dtype, np.float64) + assert smaller['E'].dtype == np.float64 def test_reindex_axis(self): cols = ['A', 'B', 'E'] diff --git a/pandas/tests/frame/test_block_internals.py b/pandas/tests/frame/test_block_internals.py index 2a319348aca3f..44dc6df756f3d 100644 --- a/pandas/tests/frame/test_block_internals.py +++ b/pandas/tests/frame/test_block_internals.py @@ -95,47 +95,47 @@ def test_as_matrix_numeric_cols(self): self.frame['foo'] = 'bar' values = self.frame.as_matrix(['A', 'B', 'C', 'D']) - self.assertEqual(values.dtype, np.float64) + assert values.dtype == np.float64 def test_as_matrix_lcd(self): # mixed lcd values = self.mixed_float.as_matrix(['A', 'B', 'C', 'D']) - self.assertEqual(values.dtype, np.float64) + assert values.dtype == np.float64 values = self.mixed_float.as_matrix(['A', 'B', 'C']) - self.assertEqual(values.dtype, np.float32) + assert values.dtype == np.float32 values = self.mixed_float.as_matrix(['C']) - self.assertEqual(values.dtype, np.float16) + assert values.dtype == np.float16 # GH 10364 # B uint64 forces float because there are other signed int types values = self.mixed_int.as_matrix(['A', 'B', 'C', 'D']) - self.assertEqual(values.dtype, np.float64) + assert values.dtype == np.float64 values = self.mixed_int.as_matrix(['A', 'D']) - self.assertEqual(values.dtype, np.int64) + assert values.dtype == np.int64 # B uint64 forces float because there are other signed int types values = self.mixed_int.as_matrix(['A', 'B', 'C']) - self.assertEqual(values.dtype, np.float64) + assert values.dtype == np.float64 # as B and C are both unsigned, no forcing to float is needed values = self.mixed_int.as_matrix(['B', 'C']) - self.assertEqual(values.dtype, np.uint64) + assert values.dtype == np.uint64 values = self.mixed_int.as_matrix(['A', 'C']) - self.assertEqual(values.dtype, np.int32) + assert values.dtype == np.int32 values = self.mixed_int.as_matrix(['C', 'D']) - self.assertEqual(values.dtype, np.int64) + assert values.dtype == np.int64 values = self.mixed_int.as_matrix(['A']) - self.assertEqual(values.dtype, np.int32) + assert values.dtype == np.int32 values = self.mixed_int.as_matrix(['C']) - self.assertEqual(values.dtype, np.uint8) + assert values.dtype == np.uint8 def test_constructor_with_convert(self): # this is actually mostly a test of lib.maybe_convert_objects @@ -220,8 +220,8 @@ def test_construction_with_mixed(self): # mixed-type frames self.mixed_frame['datetime'] = datetime.now() self.mixed_frame['timedelta'] = timedelta(days=1, seconds=1) - self.assertEqual(self.mixed_frame['datetime'].dtype, 'M8[ns]') - self.assertEqual(self.mixed_frame['timedelta'].dtype, 'm8[ns]') + assert self.mixed_frame['datetime'].dtype == 'M8[ns]' + assert self.mixed_frame['timedelta'].dtype == 'm8[ns]' result = self.mixed_frame.get_dtype_counts().sort_values() expected = Series({'float64': 4, 'object': 1, @@ -452,7 +452,7 @@ def test_convert_objects(self): oops = self.mixed_frame.T.T converted = oops._convert(datetime=True) assert_frame_equal(converted, self.mixed_frame) - self.assertEqual(converted['A'].dtype, np.float64) + assert converted['A'].dtype == np.float64 # force numeric conversion self.mixed_frame['H'] = '1.' @@ -464,19 +464,19 @@ def test_convert_objects(self): self.mixed_frame['K'] = '1' self.mixed_frame.loc[0:5, ['J', 'K']] = 'garbled' converted = self.mixed_frame._convert(datetime=True, numeric=True) - self.assertEqual(converted['H'].dtype, 'float64') - self.assertEqual(converted['I'].dtype, 'int64') - self.assertEqual(converted['J'].dtype, 'float64') - self.assertEqual(converted['K'].dtype, 'float64') - self.assertEqual(len(converted['J'].dropna()), l - 5) - self.assertEqual(len(converted['K'].dropna()), l - 5) + assert converted['H'].dtype == 'float64' + assert converted['I'].dtype == 'int64' + assert converted['J'].dtype == 'float64' + assert converted['K'].dtype == 'float64' + assert len(converted['J'].dropna()) == l - 5 + assert len(converted['K'].dropna()) == l - 5 # via astype converted = self.mixed_frame.copy() converted['H'] = converted['H'].astype('float64') converted['I'] = converted['I'].astype('int64') - self.assertEqual(converted['H'].dtype, 'float64') - self.assertEqual(converted['I'].dtype, 'int64') + assert converted['H'].dtype == 'float64' + assert converted['I'].dtype == 'int64' # via astype, but errors converted = self.mixed_frame.copy() diff --git a/pandas/tests/frame/test_combine_concat.py b/pandas/tests/frame/test_combine_concat.py index 5452792def1ac..44f17faabe20d 100644 --- a/pandas/tests/frame/test_combine_concat.py +++ b/pandas/tests/frame/test_combine_concat.py @@ -303,7 +303,7 @@ def test_join_str_datetime(self): tst = A.join(C, on='aa') - self.assertEqual(len(tst.columns), 3) + assert len(tst.columns) == 3 def test_join_multiindex_leftright(self): # GH 10741 @@ -538,7 +538,7 @@ def test_combine_first_mixed_bug(self): "col5": ser3}) combined = frame1.combine_first(frame2) - self.assertEqual(len(combined.columns), 5) + assert len(combined.columns) == 5 # gh 3016 (same as in update) df = DataFrame([[1., 2., False, True], [4., 5., True, False]], @@ -603,28 +603,28 @@ def test_combine_first_align_nan(self): dfa = pd.DataFrame([[pd.Timestamp('2011-01-01'), 2]], columns=['a', 'b']) dfb = pd.DataFrame([[4], [5]], columns=['b']) - self.assertEqual(dfa['a'].dtype, 'datetime64[ns]') - self.assertEqual(dfa['b'].dtype, 'int64') + assert dfa['a'].dtype == 'datetime64[ns]' + assert dfa['b'].dtype == 'int64' res = dfa.combine_first(dfb) exp = pd.DataFrame({'a': [pd.Timestamp('2011-01-01'), pd.NaT], 'b': [2., 5.]}, columns=['a', 'b']) tm.assert_frame_equal(res, exp) - self.assertEqual(res['a'].dtype, 'datetime64[ns]') + assert res['a'].dtype == 'datetime64[ns]' # ToDo: this must be int64 - self.assertEqual(res['b'].dtype, 'float64') + assert res['b'].dtype == 'float64' res = dfa.iloc[:0].combine_first(dfb) exp = pd.DataFrame({'a': [np.nan, np.nan], 'b': [4, 5]}, columns=['a', 'b']) tm.assert_frame_equal(res, exp) # ToDo: this must be datetime64 - self.assertEqual(res['a'].dtype, 'float64') + assert res['a'].dtype == 'float64' # ToDo: this must be int64 - self.assertEqual(res['b'].dtype, 'int64') + assert res['b'].dtype == 'int64' def test_combine_first_timezone(self): - # GH 7630 + # see gh-7630 data1 = pd.to_datetime('20100101 01:01').tz_localize('UTC') df1 = pd.DataFrame(columns=['UTCdatetime', 'abc'], data=data1, @@ -644,10 +644,10 @@ def test_combine_first_timezone(self): index=pd.date_range('20140627', periods=2, freq='D')) tm.assert_frame_equal(res, exp) - self.assertEqual(res['UTCdatetime'].dtype, 'datetime64[ns, UTC]') - self.assertEqual(res['abc'].dtype, 'datetime64[ns, UTC]') + assert res['UTCdatetime'].dtype == 'datetime64[ns, UTC]' + assert res['abc'].dtype == 'datetime64[ns, UTC]' - # GH 10567 + # see gh-10567 dts1 = pd.date_range('2015-01-01', '2015-01-05', tz='UTC') df1 = pd.DataFrame({'DATE': dts1}) dts2 = pd.date_range('2015-01-03', '2015-01-05', tz='UTC') @@ -655,7 +655,7 @@ def test_combine_first_timezone(self): res = df1.combine_first(df2) tm.assert_frame_equal(res, df1) - self.assertEqual(res['DATE'].dtype, 'datetime64[ns, UTC]') + assert res['DATE'].dtype == 'datetime64[ns, UTC]' dts1 = pd.DatetimeIndex(['2011-01-01', 'NaT', '2011-01-03', '2011-01-04'], tz='US/Eastern') @@ -680,7 +680,7 @@ def test_combine_first_timezone(self): # if df1 doesn't have NaN, keep its dtype res = df1.combine_first(df2) tm.assert_frame_equal(res, df1) - self.assertEqual(res['DATE'].dtype, 'datetime64[ns, US/Eastern]') + assert res['DATE'].dtype == 'datetime64[ns, US/Eastern]' dts1 = pd.date_range('2015-01-01', '2015-01-02', tz='US/Eastern') df1 = pd.DataFrame({'DATE': dts1}) @@ -693,7 +693,7 @@ def test_combine_first_timezone(self): pd.Timestamp('2015-01-03')] exp = pd.DataFrame({'DATE': exp_dts}) tm.assert_frame_equal(res, exp) - self.assertEqual(res['DATE'].dtype, 'object') + assert res['DATE'].dtype == 'object' def test_combine_first_timedelta(self): data1 = pd.TimedeltaIndex(['1 day', 'NaT', '3 day', '4day']) @@ -706,7 +706,7 @@ def test_combine_first_timedelta(self): '11 day', '3 day', '4 day']) exp = pd.DataFrame({'TD': exp_dts}, index=[1, 2, 3, 4, 5, 7]) tm.assert_frame_equal(res, exp) - self.assertEqual(res['TD'].dtype, 'timedelta64[ns]') + assert res['TD'].dtype == 'timedelta64[ns]' def test_combine_first_period(self): data1 = pd.PeriodIndex(['2011-01', 'NaT', '2011-03', @@ -722,7 +722,7 @@ def test_combine_first_period(self): freq='M') exp = pd.DataFrame({'P': exp_dts}, index=[1, 2, 3, 4, 5, 7]) tm.assert_frame_equal(res, exp) - self.assertEqual(res['P'].dtype, 'object') + assert res['P'].dtype == 'object' # different freq dts2 = pd.PeriodIndex(['2012-01-01', '2012-01-02', @@ -738,7 +738,7 @@ def test_combine_first_period(self): pd.Period('2011-04', freq='M')] exp = pd.DataFrame({'P': exp_dts}, index=[1, 2, 3, 4, 5, 7]) tm.assert_frame_equal(res, exp) - self.assertEqual(res['P'].dtype, 'object') + assert res['P'].dtype == 'object' def test_combine_first_int(self): # GH14687 - integer series that do no align exactly @@ -748,7 +748,7 @@ def test_combine_first_int(self): res = df1.combine_first(df2) tm.assert_frame_equal(res, df1) - self.assertEqual(res['a'].dtype, 'int64') + assert res['a'].dtype == 'int64' def test_concat_datetime_datetime64_frame(self): # #2624 diff --git a/pandas/tests/frame/test_constructors.py b/pandas/tests/frame/test_constructors.py index 588182eb30336..5b00ddc51da46 100644 --- a/pandas/tests/frame/test_constructors.py +++ b/pandas/tests/frame/test_constructors.py @@ -36,10 +36,10 @@ class TestDataFrameConstructors(tm.TestCase, TestData): def test_constructor(self): df = DataFrame() - self.assertEqual(len(df.index), 0) + assert len(df.index) == 0 df = DataFrame(data={}) - self.assertEqual(len(df.index), 0) + assert len(df.index) == 0 def test_constructor_mixed(self): index, data = tm.getMixedTypeDict() @@ -48,11 +48,11 @@ def test_constructor_mixed(self): indexed_frame = DataFrame(data, index=index) # noqa unindexed_frame = DataFrame(data) # noqa - self.assertEqual(self.mixed_frame['foo'].dtype, np.object_) + assert self.mixed_frame['foo'].dtype == np.object_ def test_constructor_cast_failure(self): foo = DataFrame({'a': ['a', 'b', 'c']}, dtype=np.float64) - self.assertEqual(foo['a'].dtype, object) + assert foo['a'].dtype == object # GH 3010, constructing with odd arrays df = DataFrame(np.ones((4, 2))) @@ -76,29 +76,28 @@ def test_constructor_dtype_copy(self): new_df = pd.DataFrame(orig_df, dtype=float, copy=True) new_df['col1'] = 200. - self.assertEqual(orig_df['col1'][0], 1.) + assert orig_df['col1'][0] == 1. def test_constructor_dtype_nocast_view(self): df = DataFrame([[1, 2]]) should_be_view = DataFrame(df, dtype=df[0].dtype) should_be_view[0][0] = 99 - self.assertEqual(df.values[0, 0], 99) + assert df.values[0, 0] == 99 should_be_view = DataFrame(df.values, dtype=df[0].dtype) should_be_view[0][0] = 97 - self.assertEqual(df.values[0, 0], 97) + assert df.values[0, 0] == 97 def test_constructor_dtype_list_data(self): df = DataFrame([[1, '2'], [None, 'a']], dtype=object) assert df.loc[1, 0] is None - self.assertEqual(df.loc[0, 1], '2') + assert df.loc[0, 1] == '2' def test_constructor_list_frames(self): - - # GH 3243 + # see gh-3243 result = DataFrame([DataFrame([])]) - self.assertEqual(result.shape, (1, 0)) + assert result.shape == (1, 0) result = DataFrame([DataFrame(dict(A=lrange(5)))]) assert isinstance(result.iloc[0, 0], DataFrame) @@ -149,8 +148,8 @@ def test_constructor_complex_dtypes(self): b = np.random.rand(10).astype(np.complex128) df = DataFrame({'a': a, 'b': b}) - self.assertEqual(a.dtype, df.a.dtype) - self.assertEqual(b.dtype, df.b.dtype) + assert a.dtype == df.a.dtype + assert b.dtype == df.b.dtype def test_constructor_rec(self): rec = self.frame.to_records(index=False) @@ -175,7 +174,7 @@ def test_constructor_rec(self): def test_constructor_bool(self): df = DataFrame({0: np.ones(10, dtype=bool), 1: np.zeros(10, dtype=bool)}) - self.assertEqual(df.values.dtype, np.bool_) + assert df.values.dtype == np.bool_ def test_constructor_overflow_int64(self): # see gh-14881 @@ -183,7 +182,7 @@ def test_constructor_overflow_int64(self): dtype=np.uint64) result = DataFrame({'a': values}) - self.assertEqual(result['a'].dtype, np.uint64) + assert result['a'].dtype == np.uint64 # see gh-2355 data_scores = [(6311132704823138710, 273), (2685045978526272070, 23), @@ -194,7 +193,7 @@ def test_constructor_overflow_int64(self): data = np.zeros((len(data_scores),), dtype=dtype) data[:] = data_scores df_crawls = DataFrame(data) - self.assertEqual(df_crawls['uid'].dtype, np.uint64) + assert df_crawls['uid'].dtype == np.uint64 def test_constructor_ordereddict(self): import random @@ -203,7 +202,7 @@ def test_constructor_ordereddict(self): random.shuffle(nums) expected = ['A%d' % i for i in nums] df = DataFrame(OrderedDict(zip(expected, [[0]] * nitems))) - self.assertEqual(expected, list(df.columns)) + assert expected == list(df.columns) def test_constructor_dict(self): frame = DataFrame({'col1': self.ts1, @@ -378,14 +377,14 @@ def test_constructor_dict_cast(self): 'B': {'1': '1', '2': '2', '3': '3'}, } frame = DataFrame(test_data, dtype=float) - self.assertEqual(len(frame), 3) - self.assertEqual(frame['B'].dtype, np.float64) - self.assertEqual(frame['A'].dtype, np.float64) + assert len(frame) == 3 + assert frame['B'].dtype == np.float64 + assert frame['A'].dtype == np.float64 frame = DataFrame(test_data) - self.assertEqual(len(frame), 3) - self.assertEqual(frame['B'].dtype, np.object_) - self.assertEqual(frame['A'].dtype, np.float64) + assert len(frame) == 3 + assert frame['B'].dtype == np.object_ + assert frame['A'].dtype == np.float64 # can't cast to float test_data = { @@ -393,9 +392,9 @@ def test_constructor_dict_cast(self): 'B': dict(zip(range(15), randn(15))) } frame = DataFrame(test_data, dtype=float) - self.assertEqual(len(frame), 20) - self.assertEqual(frame['A'].dtype, np.object_) - self.assertEqual(frame['B'].dtype, np.float64) + assert len(frame) == 20 + assert frame['A'].dtype == np.object_ + assert frame['B'].dtype == np.float64 def test_constructor_dict_dont_upcast(self): d = {'Col1': {'Row1': 'A String', 'Row2': np.nan}} @@ -494,14 +493,14 @@ def test_constructor_period(self): a = pd.PeriodIndex(['2012-01', 'NaT', '2012-04'], freq='M') b = pd.PeriodIndex(['2012-02-01', '2012-03-01', 'NaT'], freq='D') df = pd.DataFrame({'a': a, 'b': b}) - self.assertEqual(df['a'].dtype, 'object') - self.assertEqual(df['b'].dtype, 'object') + assert df['a'].dtype == 'object' + assert df['b'].dtype == 'object' # list of periods df = pd.DataFrame({'a': a.asobject.tolist(), 'b': b.asobject.tolist()}) - self.assertEqual(df['a'].dtype, 'object') - self.assertEqual(df['b'].dtype, 'object') + assert df['a'].dtype == 'object' + assert df['b'].dtype == 'object' def test_nested_dict_frame_constructor(self): rng = pd.period_range('1/1/2000', periods=5) @@ -530,18 +529,18 @@ def _check_basic_constructor(self, empty): # 2-D input frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2]) - self.assertEqual(len(frame.index), 2) - self.assertEqual(len(frame.columns), 3) + assert len(frame.index) == 2 + assert len(frame.columns) == 3 # 1-D input frame = DataFrame(empty((3,)), columns=['A'], index=[1, 2, 3]) - self.assertEqual(len(frame.index), 3) - self.assertEqual(len(frame.columns), 1) + assert len(frame.index) == 3 + assert len(frame.columns) == 1 # cast type frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2], dtype=np.int64) - self.assertEqual(frame.values.dtype, np.int64) + assert frame.values.dtype == np.int64 # wrong size axis labels msg = r'Shape of passed values is \(3, 2\), indices imply \(3, 1\)' @@ -569,16 +568,16 @@ def _check_basic_constructor(self, empty): # 0-length axis frame = DataFrame(empty((0, 3))) - self.assertEqual(len(frame.index), 0) + assert len(frame.index) == 0 frame = DataFrame(empty((3, 0))) - self.assertEqual(len(frame.columns), 0) + assert len(frame.columns) == 0 def test_constructor_ndarray(self): self._check_basic_constructor(np.ones) frame = DataFrame(['foo', 'bar'], index=[0, 1], columns=['A']) - self.assertEqual(len(frame), 2) + assert len(frame) == 2 def test_constructor_maskedarray(self): self._check_basic_constructor(ma.masked_all) @@ -588,8 +587,8 @@ def test_constructor_maskedarray(self): mat[0, 0] = 1.0 mat[1, 2] = 2.0 frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2]) - self.assertEqual(1.0, frame['A'][1]) - self.assertEqual(2.0, frame['C'][2]) + assert 1.0 == frame['A'][1] + assert 2.0 == frame['C'][2] # what is this even checking?? mat = ma.masked_all((2, 3), dtype=float) @@ -602,66 +601,66 @@ def test_constructor_maskedarray_nonfloat(self): # 2-D input frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2]) - self.assertEqual(len(frame.index), 2) - self.assertEqual(len(frame.columns), 3) + assert len(frame.index) == 2 + assert len(frame.columns) == 3 assert np.all(~np.asarray(frame == frame)) # cast type frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2], dtype=np.float64) - self.assertEqual(frame.values.dtype, np.float64) + assert frame.values.dtype == np.float64 # Check non-masked values mat2 = ma.copy(mat) mat2[0, 0] = 1 mat2[1, 2] = 2 frame = DataFrame(mat2, columns=['A', 'B', 'C'], index=[1, 2]) - self.assertEqual(1, frame['A'][1]) - self.assertEqual(2, frame['C'][2]) + assert 1 == frame['A'][1] + assert 2 == frame['C'][2] # masked np.datetime64 stays (use lib.NaT as null) mat = ma.masked_all((2, 3), dtype='M8[ns]') # 2-D input frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2]) - self.assertEqual(len(frame.index), 2) - self.assertEqual(len(frame.columns), 3) + assert len(frame.index) == 2 + assert len(frame.columns) == 3 assert isnull(frame).values.all() # cast type frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2], dtype=np.int64) - self.assertEqual(frame.values.dtype, np.int64) + assert frame.values.dtype == np.int64 # Check non-masked values mat2 = ma.copy(mat) mat2[0, 0] = 1 mat2[1, 2] = 2 frame = DataFrame(mat2, columns=['A', 'B', 'C'], index=[1, 2]) - self.assertEqual(1, frame['A'].view('i8')[1]) - self.assertEqual(2, frame['C'].view('i8')[2]) + assert 1 == frame['A'].view('i8')[1] + assert 2 == frame['C'].view('i8')[2] # masked bool promoted to object mat = ma.masked_all((2, 3), dtype=bool) # 2-D input frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2]) - self.assertEqual(len(frame.index), 2) - self.assertEqual(len(frame.columns), 3) + assert len(frame.index) == 2 + assert len(frame.columns) == 3 assert np.all(~np.asarray(frame == frame)) # cast type frame = DataFrame(mat, columns=['A', 'B', 'C'], index=[1, 2], dtype=object) - self.assertEqual(frame.values.dtype, object) + assert frame.values.dtype == object # Check non-masked values mat2 = ma.copy(mat) mat2[0, 0] = True mat2[1, 2] = False frame = DataFrame(mat2, columns=['A', 'B', 'C'], index=[1, 2]) - self.assertEqual(True, frame['A'][1]) - self.assertEqual(False, frame['C'][2]) + assert frame['A'][1] is True + assert frame['C'][2] is False def test_constructor_mrecarray(self): # Ensure mrecarray produces frame identical to dict of masked arrays @@ -708,34 +707,34 @@ def test_constructor_mrecarray(self): def test_constructor_corner(self): df = DataFrame(index=[]) - self.assertEqual(df.values.shape, (0, 0)) + assert df.values.shape == (0, 0) # empty but with specified dtype df = DataFrame(index=lrange(10), columns=['a', 'b'], dtype=object) - self.assertEqual(df.values.dtype, np.object_) + assert df.values.dtype == np.object_ # does not error but ends up float df = DataFrame(index=lrange(10), columns=['a', 'b'], dtype=int) - self.assertEqual(df.values.dtype, np.object_) + assert df.values.dtype == np.object_ # #1783 empty dtype object df = DataFrame({}, columns=['foo', 'bar']) - self.assertEqual(df.values.dtype, np.object_) + assert df.values.dtype == np.object_ df = DataFrame({'b': 1}, index=lrange(10), columns=list('abc'), dtype=int) - self.assertEqual(df.values.dtype, np.object_) + assert df.values.dtype == np.object_ def test_constructor_scalar_inference(self): data = {'int': 1, 'bool': True, 'float': 3., 'complex': 4j, 'object': 'foo'} df = DataFrame(data, index=np.arange(10)) - self.assertEqual(df['int'].dtype, np.int64) - self.assertEqual(df['bool'].dtype, np.bool_) - self.assertEqual(df['float'].dtype, np.float64) - self.assertEqual(df['complex'].dtype, np.complex128) - self.assertEqual(df['object'].dtype, np.object_) + assert df['int'].dtype == np.int64 + assert df['bool'].dtype == np.bool_ + assert df['float'].dtype == np.float64 + assert df['complex'].dtype == np.complex128 + assert df['object'].dtype == np.object_ def test_constructor_arrays_and_scalars(self): df = DataFrame({'a': randn(10), 'b': True}) @@ -750,28 +749,28 @@ def test_constructor_DataFrame(self): tm.assert_frame_equal(df, self.frame) df_casted = DataFrame(self.frame, dtype=np.int64) - self.assertEqual(df_casted.values.dtype, np.int64) + assert df_casted.values.dtype == np.int64 def test_constructor_more(self): # used to be in test_matrix.py arr = randn(10) dm = DataFrame(arr, columns=['A'], index=np.arange(10)) - self.assertEqual(dm.values.ndim, 2) + assert dm.values.ndim == 2 arr = randn(0) dm = DataFrame(arr) - self.assertEqual(dm.values.ndim, 2) - self.assertEqual(dm.values.ndim, 2) + assert dm.values.ndim == 2 + assert dm.values.ndim == 2 # no data specified dm = DataFrame(columns=['A', 'B'], index=np.arange(10)) - self.assertEqual(dm.values.shape, (10, 2)) + assert dm.values.shape == (10, 2) dm = DataFrame(columns=['A', 'B']) - self.assertEqual(dm.values.shape, (0, 2)) + assert dm.values.shape == (0, 2) dm = DataFrame(index=np.arange(10)) - self.assertEqual(dm.values.shape, (10, 0)) + assert dm.values.shape == (10, 0) # corner, silly # TODO: Fix this Exception to be better... @@ -792,8 +791,8 @@ def test_constructor_more(self): 'B': np.ones(10, dtype=np.float64)}, index=np.arange(10)) - self.assertEqual(len(dm.columns), 2) - self.assertEqual(dm.values.dtype, np.float64) + assert len(dm.columns) == 2 + assert dm.values.dtype == np.float64 def test_constructor_empty_list(self): df = DataFrame([], index=[]) @@ -818,7 +817,7 @@ def test_constructor_list_of_lists(self): l = [[1, 'a'], [2, 'b']] df = DataFrame(data=l, columns=["num", "str"]) assert is_integer_dtype(df['num']) - self.assertEqual(df['str'].dtype, np.object_) + assert df['str'].dtype == np.object_ # GH 4851 # list of 0-dim ndarrays @@ -1075,7 +1074,7 @@ def test_constructor_orient(self): def test_constructor_Series_named(self): a = Series([1, 2, 3], index=['a', 'b', 'c'], name='x') df = DataFrame(a) - self.assertEqual(df.columns[0], 'x') + assert df.columns[0] == 'x' tm.assert_index_equal(df.index, a.index) # ndarray like @@ -1095,7 +1094,7 @@ def test_constructor_Series_named(self): # #2234 a = Series([], name='x') df = DataFrame(a) - self.assertEqual(df.columns[0], 'x') + assert df.columns[0] == 'x' # series with name and w/o s1 = Series(arr, name='x') @@ -1120,12 +1119,12 @@ def test_constructor_Series_differently_indexed(self): df1 = DataFrame(s1, index=other_index) exp1 = DataFrame(s1.reindex(other_index)) - self.assertEqual(df1.columns[0], 'x') + assert df1.columns[0] == 'x' tm.assert_frame_equal(df1, exp1) df2 = DataFrame(s2, index=other_index) exp2 = DataFrame(s2.reindex(other_index)) - self.assertEqual(df2.columns[0], 0) + assert df2.columns[0] == 0 tm.assert_index_equal(df2.index, other_index) tm.assert_frame_equal(df2, exp2) @@ -1156,7 +1155,7 @@ def test_constructor_from_items(self): columns=self.mixed_frame.columns, orient='index') tm.assert_frame_equal(recons, self.mixed_frame) - self.assertEqual(recons['A'].dtype, np.float64) + assert recons['A'].dtype == np.float64 with tm.assert_raises_regex(TypeError, "Must pass columns with " @@ -1305,7 +1304,7 @@ def test_constructor_with_datetimes(self): ind = date_range(start="2000-01-01", freq="D", periods=10) datetimes = [ts.to_pydatetime() for ts in ind] datetime_s = Series(datetimes) - self.assertEqual(datetime_s.dtype, 'M8[ns]') + assert datetime_s.dtype == 'M8[ns]' df = DataFrame({'datetime_s': datetime_s}) result = df.get_dtype_counts() expected = Series({datetime64name: 1}) @@ -1331,12 +1330,12 @@ def test_constructor_with_datetimes(self): dt = tz.localize(datetime(2012, 1, 1)) df = DataFrame({'End Date': dt}, index=[0]) - self.assertEqual(df.iat[0, 0], dt) + assert df.iat[0, 0] == dt tm.assert_series_equal(df.dtypes, Series( {'End Date': 'datetime64[ns, US/Eastern]'})) df = DataFrame([{'End Date': dt}]) - self.assertEqual(df.iat[0, 0], dt) + assert df.iat[0, 0] == dt tm.assert_series_equal(df.dtypes, Series( {'End Date': 'datetime64[ns, US/Eastern]'})) @@ -1511,7 +1510,7 @@ def f(): def test_constructor_lists_to_object_dtype(self): # from #1074 d = DataFrame({'a': [np.nan, False]}) - self.assertEqual(d['a'].dtype, np.object_) + assert d['a'].dtype == np.object_ assert not d['a'][1] def test_from_records_to_records(self): @@ -1616,7 +1615,7 @@ def test_from_records_columns_not_modified(self): df = DataFrame.from_records(tuples, columns=columns, index='a') # noqa - self.assertEqual(columns, original_columns) + assert columns == original_columns def test_from_records_decimal(self): from decimal import Decimal @@ -1624,10 +1623,10 @@ def test_from_records_decimal(self): tuples = [(Decimal('1.5'),), (Decimal('2.5'),), (None,)] df = DataFrame.from_records(tuples, columns=['a']) - self.assertEqual(df['a'].dtype, object) + assert df['a'].dtype == object df = DataFrame.from_records(tuples, columns=['a'], coerce_float=True) - self.assertEqual(df['a'].dtype, np.float64) + assert df['a'].dtype == np.float64 assert np.isnan(df['a'].values[-1]) def test_from_records_duplicates(self): @@ -1648,12 +1647,12 @@ def create_dict(order_id): documents.append({'order_id': 10, 'quantity': 5}) result = DataFrame.from_records(documents, index='order_id') - self.assertEqual(result.index.name, 'order_id') + assert result.index.name == 'order_id' # MultiIndex result = DataFrame.from_records(documents, index=['order_id', 'quantity']) - self.assertEqual(result.index.names, ('order_id', 'quantity')) + assert result.index.names == ('order_id', 'quantity') def test_from_records_misc_brokenness(self): # #2179 @@ -1702,13 +1701,13 @@ def test_from_records_empty_with_nonempty_fields_gh3682(self): a = np.array([(1, 2)], dtype=[('id', np.int64), ('value', np.int64)]) df = DataFrame.from_records(a, index='id') tm.assert_index_equal(df.index, Index([1], name='id')) - self.assertEqual(df.index.name, 'id') + assert df.index.name == 'id' tm.assert_index_equal(df.columns, Index(['value'])) b = np.array([], dtype=[('id', np.int64), ('value', np.int64)]) df = DataFrame.from_records(b, index='id') tm.assert_index_equal(df.index, Index([], name='id')) - self.assertEqual(df.index.name, 'id') + assert df.index.name == 'id' def test_from_records_with_datetimes(self): @@ -1804,13 +1803,13 @@ def test_from_records_sequencelike(self): # empty case result = DataFrame.from_records([], columns=['foo', 'bar', 'baz']) - self.assertEqual(len(result), 0) + assert len(result) == 0 tm.assert_index_equal(result.columns, pd.Index(['foo', 'bar', 'baz'])) result = DataFrame.from_records([]) - self.assertEqual(len(result), 0) - self.assertEqual(len(result.columns), 0) + assert len(result) == 0 + assert len(result.columns) == 0 def test_from_records_dictlike(self): @@ -1891,8 +1890,8 @@ def test_from_records_len0_with_columns(self): columns=['foo', 'bar']) assert np.array_equal(result.columns, ['bar']) - self.assertEqual(len(result), 0) - self.assertEqual(result.index.name, 'foo') + assert len(result) == 0 + assert result.index.name == 'foo' def test_to_frame_with_falsey_names(self): # GH 16114 diff --git a/pandas/tests/frame/test_convert_to.py b/pandas/tests/frame/test_convert_to.py index d3a675e3dc1a3..353b4b873332e 100644 --- a/pandas/tests/frame/test_convert_to.py +++ b/pandas/tests/frame/test_convert_to.py @@ -22,19 +22,19 @@ def test_to_dict(self): for k, v in compat.iteritems(test_data): for k2, v2 in compat.iteritems(v): - self.assertEqual(v2, recons_data[k][k2]) + assert v2 == recons_data[k][k2] recons_data = DataFrame(test_data).to_dict("l") for k, v in compat.iteritems(test_data): for k2, v2 in compat.iteritems(v): - self.assertEqual(v2, recons_data[k][int(k2) - 1]) + assert v2 == recons_data[k][int(k2) - 1] recons_data = DataFrame(test_data).to_dict("s") for k, v in compat.iteritems(test_data): for k2, v2 in compat.iteritems(v): - self.assertEqual(v2, recons_data[k][k2]) + assert v2 == recons_data[k][k2] recons_data = DataFrame(test_data).to_dict("sp") expected_split = {'columns': ['A', 'B'], 'index': ['1', '2', '3'], @@ -46,7 +46,7 @@ def test_to_dict(self): {'A': 2.0, 'B': '2'}, {'A': np.nan, 'B': '3'}] assert isinstance(recons_data, list) - self.assertEqual(len(recons_data), 3) + assert len(recons_data) == 3 for l, r in zip(recons_data, expected_records): tm.assert_dict_equal(l, r) @@ -55,7 +55,7 @@ def test_to_dict(self): for k, v in compat.iteritems(test_data): for k2, v2 in compat.iteritems(v): - self.assertEqual(v2, recons_data[k2][k]) + assert v2 == recons_data[k2][k] def test_to_dict_timestamp(self): @@ -72,10 +72,10 @@ def test_to_dict_timestamp(self): expected_records_mixed = [{'A': tsmp, 'B': 1}, {'A': tsmp, 'B': 2}] - self.assertEqual(test_data.to_dict(orient='records'), - expected_records) - self.assertEqual(test_data_mixed.to_dict(orient='records'), - expected_records_mixed) + assert (test_data.to_dict(orient='records') == + expected_records) + assert (test_data_mixed.to_dict(orient='records') == + expected_records_mixed) expected_series = { 'A': Series([tsmp, tsmp], name='A'), @@ -117,10 +117,10 @@ def test_to_records_dt64(self): df = DataFrame([["one", "two", "three"], ["four", "five", "six"]], index=date_range("2012-01-01", "2012-01-02")) - self.assertEqual(df.to_records()['index'][0], df.index[0]) + assert df.to_records()['index'][0] == df.index[0] rs = df.to_records(convert_datetime64=False) - self.assertEqual(rs['index'][0], df.index.values[0]) + assert rs['index'][0] == df.index.values[0] def test_to_records_with_multindex(self): # GH3189 diff --git a/pandas/tests/frame/test_dtypes.py b/pandas/tests/frame/test_dtypes.py index 427834b3dbf38..2d39db16dbd8d 100644 --- a/pandas/tests/frame/test_dtypes.py +++ b/pandas/tests/frame/test_dtypes.py @@ -28,14 +28,14 @@ def test_concat_empty_dataframe_dtypes(self): df['c'] = df['c'].astype(np.float64) result = pd.concat([df, df]) - self.assertEqual(result['a'].dtype, np.bool_) - self.assertEqual(result['b'].dtype, np.int32) - self.assertEqual(result['c'].dtype, np.float64) + assert result['a'].dtype == np.bool_ + assert result['b'].dtype == np.int32 + assert result['c'].dtype == np.float64 result = pd.concat([df, df.astype(np.float64)]) - self.assertEqual(result['a'].dtype, np.object_) - self.assertEqual(result['b'].dtype, np.float64) - self.assertEqual(result['c'].dtype, np.float64) + assert result['a'].dtype == np.object_ + assert result['b'].dtype == np.float64 + assert result['c'].dtype == np.float64 def test_empty_frame_dtypes_ftypes(self): empty_df = pd.DataFrame() @@ -326,9 +326,8 @@ def test_astype(self): # mixed casting def _check_cast(df, v): - self.assertEqual( - list(set([s.dtype.name - for _, s in compat.iteritems(df)]))[0], v) + assert (list(set([s.dtype.name for + _, s in compat.iteritems(df)]))[0] == v) mn = self.all_mixed._get_numeric_data().copy() mn['little_float'] = np.array(12345., dtype='float16') diff --git a/pandas/tests/frame/test_indexing.py b/pandas/tests/frame/test_indexing.py index 8f6128ad4e525..cd1529d04c991 100644 --- a/pandas/tests/frame/test_indexing.py +++ b/pandas/tests/frame/test_indexing.py @@ -113,7 +113,7 @@ def test_getitem_list(self): assert_frame_equal(result, expected) assert_frame_equal(result2, expected) - self.assertEqual(result.columns.name, 'foo') + assert result.columns.name == 'foo' with tm.assert_raises_regex(KeyError, 'not in index'): self.frame[['B', 'A', 'food']] @@ -128,7 +128,7 @@ def test_getitem_list(self): result = df[[('foo', 'bar'), ('baz', 'qux')]] expected = df.iloc[:, :2] assert_frame_equal(result, expected) - self.assertEqual(result.columns.names, ['sth', 'sth2']) + assert result.columns.names == ['sth', 'sth2'] def test_getitem_callable(self): # GH 12533 @@ -282,7 +282,7 @@ def test_getitem_boolean(self): assert_frame_equal(bif, bifw, check_dtype=False) for c in df.columns: if bif[c].dtype != bifw[c].dtype: - self.assertEqual(bif[c].dtype, df[c].dtype) + assert bif[c].dtype == df[c].dtype def test_getitem_boolean_casting(self): @@ -404,8 +404,8 @@ def test_getitem_setitem_ix_negative_integers(self): with catch_warnings(record=True): assert_series_equal(a.ix[-1], a.ix[-2], check_names=False) - self.assertEqual(a.ix[-1].name, 'T') - self.assertEqual(a.ix[-2].name, 'S') + assert a.ix[-1].name == 'T' + assert a.ix[-2].name == 'S' def test_getattr(self): assert_series_equal(self.frame.A, self.frame['A']) @@ -424,8 +424,8 @@ def test_setitem(self): self.frame['col5'] = series assert 'col5' in self.frame - self.assertEqual(len(series), 15) - self.assertEqual(len(self.frame), 30) + assert len(series) == 15 + assert len(self.frame) == 30 exp = np.ravel(np.column_stack((series.values, [np.nan] * 15))) exp = Series(exp, index=self.frame.index, name='col5') @@ -459,13 +459,13 @@ def test_setitem(self): def f(): smaller['col10'] = ['1', '2'] pytest.raises(com.SettingWithCopyError, f) - self.assertEqual(smaller['col10'].dtype, np.object_) + assert smaller['col10'].dtype == np.object_ assert (smaller['col10'] == ['1', '2']).all() # with a dtype for dtype in ['int32', 'int64', 'float32', 'float64']: self.frame[dtype] = np.array(arr, dtype=dtype) - self.assertEqual(self.frame[dtype].dtype.name, dtype) + assert self.frame[dtype].dtype.name == dtype # dtype changing GH4204 df = DataFrame([[0, 0]]) @@ -542,13 +542,13 @@ def test_setitem_boolean(self): def test_setitem_cast(self): self.frame['D'] = self.frame['D'].astype('i8') - self.assertEqual(self.frame['D'].dtype, np.int64) + assert self.frame['D'].dtype == np.int64 # #669, should not cast? # this is now set to int64, which means a replacement of the column to # the value dtype (and nothing to do with the existing dtype) self.frame['B'] = 0 - self.assertEqual(self.frame['B'].dtype, np.int64) + assert self.frame['B'].dtype == np.int64 # cast if pass array of course self.frame['B'] = np.arange(len(self.frame)) @@ -556,18 +556,18 @@ def test_setitem_cast(self): self.frame['foo'] = 'bar' self.frame['foo'] = 0 - self.assertEqual(self.frame['foo'].dtype, np.int64) + assert self.frame['foo'].dtype == np.int64 self.frame['foo'] = 'bar' self.frame['foo'] = 2.5 - self.assertEqual(self.frame['foo'].dtype, np.float64) + assert self.frame['foo'].dtype == np.float64 self.frame['something'] = 0 - self.assertEqual(self.frame['something'].dtype, np.int64) + assert self.frame['something'].dtype == np.int64 self.frame['something'] = 2 - self.assertEqual(self.frame['something'].dtype, np.int64) + assert self.frame['something'].dtype == np.int64 self.frame['something'] = 2.5 - self.assertEqual(self.frame['something'].dtype, np.float64) + assert self.frame['something'].dtype == np.float64 # GH 7704 # dtype conversion on setting @@ -581,9 +581,9 @@ def test_setitem_cast(self): # Test that data type is preserved . #5782 df = DataFrame({'one': np.arange(6, dtype=np.int8)}) df.loc[1, 'one'] = 6 - self.assertEqual(df.dtypes.one, np.dtype(np.int8)) + assert df.dtypes.one == np.dtype(np.int8) df.one = np.int8(7) - self.assertEqual(df.dtypes.one, np.dtype(np.int8)) + assert df.dtypes.one == np.dtype(np.int8) def test_setitem_boolean_column(self): expected = self.frame.copy() @@ -602,7 +602,7 @@ def test_setitem_corner(self): del df['B'] df['B'] = [1., 2., 3.] assert 'B' in df - self.assertEqual(len(df.columns), 2) + assert len(df.columns) == 2 df['A'] = 'beginning' df['E'] = 'foo' @@ -614,29 +614,29 @@ def test_setitem_corner(self): dm = DataFrame(index=self.frame.index) dm['A'] = 'foo' dm['B'] = 'bar' - self.assertEqual(len(dm.columns), 2) - self.assertEqual(dm.values.dtype, np.object_) + assert len(dm.columns) == 2 + assert dm.values.dtype == np.object_ # upcast dm['C'] = 1 - self.assertEqual(dm['C'].dtype, np.int64) + assert dm['C'].dtype == np.int64 dm['E'] = 1. - self.assertEqual(dm['E'].dtype, np.float64) + assert dm['E'].dtype == np.float64 # set existing column dm['A'] = 'bar' - self.assertEqual('bar', dm['A'][0]) + assert 'bar' == dm['A'][0] dm = DataFrame(index=np.arange(3)) dm['A'] = 1 dm['foo'] = 'bar' del dm['foo'] dm['foo'] = 'bar' - self.assertEqual(dm['foo'].dtype, np.object_) + assert dm['foo'].dtype == np.object_ dm['coercable'] = ['1', '2', '3'] - self.assertEqual(dm['coercable'].dtype, np.object_) + assert dm['coercable'].dtype == np.object_ def test_setitem_corner2(self): data = {"title": ['foobar', 'bar', 'foobar'] + ['foobar'] * 17, @@ -648,8 +648,8 @@ def test_setitem_corner2(self): df.loc[ix, ['title']] = 'foobar' df.loc[ix, ['cruft']] = 0 - self.assertEqual(df.loc[1, 'title'], 'foobar') - self.assertEqual(df.loc[1, 'cruft'], 0) + assert df.loc[1, 'title'] == 'foobar' + assert df.loc[1, 'cruft'] == 0 def test_setitem_ambig(self): # Difficulties with mixed-type data @@ -731,10 +731,10 @@ def test_getitem_empty_frame_with_boolean(self): def test_delitem_corner(self): f = self.frame.copy() del f['D'] - self.assertEqual(len(f.columns), 3) + assert len(f.columns) == 3 pytest.raises(KeyError, f.__delitem__, 'D') del f['B'] - self.assertEqual(len(f.columns), 2) + assert len(f.columns) == 2 def test_getitem_fancy_2d(self): f = self.frame @@ -781,13 +781,13 @@ def test_slice_floats(self): df = DataFrame(np.random.rand(3, 2), index=index) s1 = df.loc[52195.1:52196.5] - self.assertEqual(len(s1), 2) + assert len(s1) == 2 s1 = df.loc[52195.1:52196.6] - self.assertEqual(len(s1), 2) + assert len(s1) == 2 s1 = df.loc[52195.1:52198.9] - self.assertEqual(len(s1), 3) + assert len(s1) == 3 def test_getitem_fancy_slice_integers_step(self): df = DataFrame(np.random.randn(10, 5)) @@ -930,7 +930,7 @@ def test_setitem_fancy_2d(self): def test_fancy_getitem_slice_mixed(self): sliced = self.mixed_frame.iloc[:, -3:] - self.assertEqual(sliced['D'].dtype, np.float64) + assert sliced['D'].dtype == np.float64 # get view with single block # setting it triggers setting with copy @@ -1282,7 +1282,7 @@ def test_getitem_fancy_scalar(self): for col in f.columns: ts = f[col] for idx in f.index[::5]: - self.assertEqual(ix[idx, col], ts[idx]) + assert ix[idx, col] == ts[idx] def test_setitem_fancy_scalar(self): f = self.frame @@ -1394,17 +1394,17 @@ def test_getitem_setitem_float_labels(self): result = df.loc[1.5:4] expected = df.reindex([1.5, 2, 3, 4]) assert_frame_equal(result, expected) - self.assertEqual(len(result), 4) + assert len(result) == 4 result = df.loc[4:5] expected = df.reindex([4, 5]) # reindex with int assert_frame_equal(result, expected, check_index_type=False) - self.assertEqual(len(result), 2) + assert len(result) == 2 result = df.loc[4:5] expected = df.reindex([4.0, 5.0]) # reindex with float assert_frame_equal(result, expected) - self.assertEqual(len(result), 2) + assert len(result) == 2 # loc_float changes this to work properly result = df.loc[1:2] @@ -1425,7 +1425,7 @@ def test_getitem_setitem_float_labels(self): result = df.iloc[4:5] expected = df.reindex([5.0]) assert_frame_equal(result, expected) - self.assertEqual(len(result), 1) + assert len(result) == 1 cp = df.copy() @@ -1449,22 +1449,22 @@ def f(): result = df.loc[1.0:5] expected = df assert_frame_equal(result, expected) - self.assertEqual(len(result), 5) + assert len(result) == 5 result = df.loc[1.1:5] expected = df.reindex([2.5, 3.5, 4.5, 5.0]) assert_frame_equal(result, expected) - self.assertEqual(len(result), 4) + assert len(result) == 4 result = df.loc[4.51:5] expected = df.reindex([5.0]) assert_frame_equal(result, expected) - self.assertEqual(len(result), 1) + assert len(result) == 1 result = df.loc[1.0:5.0] expected = df.reindex([1.0, 2.5, 3.5, 4.5, 5.0]) assert_frame_equal(result, expected) - self.assertEqual(len(result), 5) + assert len(result) == 5 cp = df.copy() cp.loc[1.0:5.0] = 0 @@ -1621,7 +1621,7 @@ def test_getitem_list_duplicates(self): df.columns.name = 'foo' result = df[['B', 'C']] - self.assertEqual(result.columns.name, 'foo') + assert result.columns.name == 'foo' expected = df.iloc[:, 2:] assert_frame_equal(result, expected) @@ -1631,7 +1631,7 @@ def test_get_value(self): for col in self.frame.columns: result = self.frame.get_value(idx, col) expected = self.frame[col][idx] - self.assertEqual(result, expected) + assert result == expected def test_lookup(self): def alt(df, rows, cols, dtype): @@ -1657,7 +1657,7 @@ def testit(df): df['mask'] = df.lookup(df.index, 'mask_' + df['label']) exp_mask = alt(df, df.index, 'mask_' + df['label'], dtype=np.bool_) tm.assert_series_equal(df['mask'], pd.Series(exp_mask, name='mask')) - self.assertEqual(df['mask'].dtype, np.bool_) + assert df['mask'].dtype == np.bool_ with pytest.raises(KeyError): self.frame.lookup(['xyz'], ['A']) @@ -1672,25 +1672,25 @@ def test_set_value(self): for idx in self.frame.index: for col in self.frame.columns: self.frame.set_value(idx, col, 1) - self.assertEqual(self.frame[col][idx], 1) + assert self.frame[col][idx] == 1 def test_set_value_resize(self): res = self.frame.set_value('foobar', 'B', 0) assert res is self.frame - self.assertEqual(res.index[-1], 'foobar') - self.assertEqual(res.get_value('foobar', 'B'), 0) + assert res.index[-1] == 'foobar' + assert res.get_value('foobar', 'B') == 0 self.frame.loc['foobar', 'qux'] = 0 - self.assertEqual(self.frame.get_value('foobar', 'qux'), 0) + assert self.frame.get_value('foobar', 'qux') == 0 res = self.frame.copy() res3 = res.set_value('foobar', 'baz', 'sam') - self.assertEqual(res3['baz'].dtype, np.object_) + assert res3['baz'].dtype == np.object_ res = self.frame.copy() res3 = res.set_value('foobar', 'baz', True) - self.assertEqual(res3['baz'].dtype, np.object_) + assert res3['baz'].dtype == np.object_ res = self.frame.copy() res3 = res.set_value('foobar', 'baz', 5) @@ -1705,24 +1705,24 @@ def test_set_value_with_index_dtype_change(self): # so column is not created df = df_orig.copy() df.set_value('C', 2, 1.0) - self.assertEqual(list(df.index), list(df_orig.index) + ['C']) - # self.assertEqual(list(df.columns), list(df_orig.columns) + [2]) + assert list(df.index) == list(df_orig.index) + ['C'] + # assert list(df.columns) == list(df_orig.columns) + [2] df = df_orig.copy() df.loc['C', 2] = 1.0 - self.assertEqual(list(df.index), list(df_orig.index) + ['C']) - # self.assertEqual(list(df.columns), list(df_orig.columns) + [2]) + assert list(df.index) == list(df_orig.index) + ['C'] + # assert list(df.columns) == list(df_orig.columns) + [2] # create both new df = df_orig.copy() df.set_value('C', 'D', 1.0) - self.assertEqual(list(df.index), list(df_orig.index) + ['C']) - self.assertEqual(list(df.columns), list(df_orig.columns) + ['D']) + assert list(df.index) == list(df_orig.index) + ['C'] + assert list(df.columns) == list(df_orig.columns) + ['D'] df = df_orig.copy() df.loc['C', 'D'] = 1.0 - self.assertEqual(list(df.index), list(df_orig.index) + ['C']) - self.assertEqual(list(df.columns), list(df_orig.columns) + ['D']) + assert list(df.index) == list(df_orig.index) + ['C'] + assert list(df.columns) == list(df_orig.columns) + ['D'] def test_get_set_value_no_partial_indexing(self): # partial w/ MultiIndex raise exception @@ -1874,7 +1874,7 @@ def test_iat(self): for j, col in enumerate(self.frame.columns): result = self.frame.iat[i, j] expected = self.frame.at[row, col] - self.assertEqual(result, expected) + assert result == expected def test_nested_exception(self): # Ignore the strange way of triggering the problem @@ -1941,7 +1941,7 @@ def test_reindex_frame_add_nat(self): def test_set_dataframe_column_ns_dtype(self): x = DataFrame([datetime.now(), datetime.now()]) - self.assertEqual(x[0].dtype, np.dtype('M8[ns]')) + assert x[0].dtype == np.dtype('M8[ns]') def test_non_monotonic_reindex_methods(self): dr = pd.date_range('2013-08-01', periods=6, freq='B') @@ -2095,13 +2095,13 @@ def test_setitem_with_unaligned_tz_aware_datetime_column(self): assert_series_equal(df['dates'], column) def test_setitem_datetime_coercion(self): - # GH 1048 + # gh-1048 df = pd.DataFrame({'c': [pd.Timestamp('2010-10-01')] * 3}) df.loc[0:1, 'c'] = np.datetime64('2008-08-08') - self.assertEqual(pd.Timestamp('2008-08-08'), df.loc[0, 'c']) - self.assertEqual(pd.Timestamp('2008-08-08'), df.loc[1, 'c']) + assert pd.Timestamp('2008-08-08') == df.loc[0, 'c'] + assert pd.Timestamp('2008-08-08') == df.loc[1, 'c'] df.loc[2, 'c'] = date(2005, 5, 5) - self.assertEqual(pd.Timestamp('2005-05-05'), df.loc[2, 'c']) + assert pd.Timestamp('2005-05-05') == df.loc[2, 'c'] def test_setitem_datetimelike_with_inference(self): # GH 7592 @@ -2139,14 +2139,14 @@ def test_at_time_between_time_datetimeindex(self): expected2 = df.iloc[ainds] assert_frame_equal(result, expected) assert_frame_equal(result, expected2) - self.assertEqual(len(result), 4) + assert len(result) == 4 result = df.between_time(bkey.start, bkey.stop) expected = df.loc[bkey] expected2 = df.iloc[binds] assert_frame_equal(result, expected) assert_frame_equal(result, expected2) - self.assertEqual(len(result), 12) + assert len(result) == 12 result = df.copy() result.loc[akey] = 0 @@ -2179,7 +2179,7 @@ def test_xs(self): if np.isnan(value): assert np.isnan(self.frame[item][idx]) else: - self.assertEqual(value, self.frame[item][idx]) + assert value == self.frame[item][idx] # mixed-type xs test_data = { @@ -2188,9 +2188,9 @@ def test_xs(self): } frame = DataFrame(test_data) xs = frame.xs('1') - self.assertEqual(xs.dtype, np.object_) - self.assertEqual(xs['A'], 1) - self.assertEqual(xs['B'], '1') + assert xs.dtype == np.object_ + assert xs['A'] == 1 + assert xs['B'] == '1' with pytest.raises(KeyError): self.tsframe.xs(self.tsframe.index[0] - BDay()) @@ -2266,10 +2266,10 @@ def test_index_namedtuple(self): with catch_warnings(record=True): result = df.ix[IndexType("foo", "bar")]["A"] - self.assertEqual(result, 1) + assert result == 1 result = df.loc[IndexType("foo", "bar")]["A"] - self.assertEqual(result, 1) + assert result == 1 def test_boolean_indexing(self): idx = lrange(3) @@ -2442,7 +2442,7 @@ def _check_set(df, cond, check_dtypes=True): for k, v in compat.iteritems(df.dtypes): if issubclass(v.type, np.integer) and not cond[k].all(): v = np.dtype('float64') - self.assertEqual(dfi[k].dtype, v) + assert dfi[k].dtype == v for df in [default_frame, self.mixed_frame, self.mixed_float, self.mixed_int]: @@ -3011,7 +3011,7 @@ def test_set_reset(self): # set/reset df = DataFrame({'A': [0, 1, 2]}, index=idx) result = df.reset_index() - self.assertEqual(result['foo'].dtype, np.dtype('uint64')) + assert result['foo'].dtype == np.dtype('uint64') df = result.set_index('foo') tm.assert_index_equal(df.index, idx) diff --git a/pandas/tests/frame/test_missing.py b/pandas/tests/frame/test_missing.py index 17f12679ae92e..ffba141ddc15d 100644 --- a/pandas/tests/frame/test_missing.py +++ b/pandas/tests/frame/test_missing.py @@ -493,7 +493,7 @@ def test_fillna_col_reordering(self): data = np.random.rand(20, 5) df = DataFrame(index=lrange(20), columns=cols, data=data) filled = df.fillna(method='ffill') - self.assertEqual(df.columns.tolist(), filled.columns.tolist()) + assert df.columns.tolist() == filled.columns.tolist() def test_fill_corner(self): mf = self.mixed_frame diff --git a/pandas/tests/frame/test_mutate_columns.py b/pandas/tests/frame/test_mutate_columns.py index fbd1b7be3e431..ac76970aaa901 100644 --- a/pandas/tests/frame/test_mutate_columns.py +++ b/pandas/tests/frame/test_mutate_columns.py @@ -150,7 +150,7 @@ def test_insert(self): df.columns.name = 'some_name' # preserve columns name field df.insert(0, 'baz', df['c']) - self.assertEqual(df.columns.name, 'some_name') + assert df.columns.name == 'some_name' # GH 13522 df = DataFrame(index=['A', 'B', 'C']) @@ -197,7 +197,7 @@ def test_pop(self): self.frame['foo'] = 'bar' self.frame.pop('foo') assert 'foo' not in self.frame - # TODO self.assertEqual(self.frame.columns.name, 'baz') + # TODO assert self.frame.columns.name == 'baz' # gh-10912: inplace ops cause caching issue a = DataFrame([[1, 2, 3], [4, 5, 6]], columns=[ @@ -219,12 +219,12 @@ def test_pop_non_unique_cols(self): df.columns = ["a", "b", "a"] res = df.pop("a") - self.assertEqual(type(res), DataFrame) - self.assertEqual(len(res), 2) - self.assertEqual(len(df.columns), 1) + assert type(res) == DataFrame + assert len(res) == 2 + assert len(df.columns) == 1 assert "b" in df.columns assert "a" not in df.columns - self.assertEqual(len(df.index), 2) + assert len(df.index) == 2 def test_insert_column_bug_4032(self): diff --git a/pandas/tests/frame/test_nonunique_indexes.py b/pandas/tests/frame/test_nonunique_indexes.py index 61dd92fcd1fab..4bc0176b570e3 100644 --- a/pandas/tests/frame/test_nonunique_indexes.py +++ b/pandas/tests/frame/test_nonunique_indexes.py @@ -425,8 +425,8 @@ def test_columns_with_dups(self): columns=df_float.columns) df = pd.concat([df_float, df_int, df_bool, df_object, df_dt], axis=1) - self.assertEqual(len(df._data._blknos), len(df.columns)) - self.assertEqual(len(df._data._blklocs), len(df.columns)) + assert len(df._data._blknos) == len(df.columns) + assert len(df._data._blklocs) == len(df.columns) # testing iloc for i in range(len(df.columns)): diff --git a/pandas/tests/frame/test_operators.py b/pandas/tests/frame/test_operators.py index efe167297627a..9083b7952909e 100644 --- a/pandas/tests/frame/test_operators.py +++ b/pandas/tests/frame/test_operators.py @@ -41,7 +41,7 @@ def test_operators(self): for idx, val in compat.iteritems(series): origVal = self.frame[col][idx] * 2 if not np.isnan(val): - self.assertEqual(val, origVal) + assert val == origVal else: assert np.isnan(origVal) @@ -49,7 +49,7 @@ def test_operators(self): for idx, val in compat.iteritems(series): origVal = self.frame[col][idx] + colSeries[col] if not np.isnan(val): - self.assertEqual(val, origVal) + assert val == origVal else: assert np.isnan(origVal) @@ -278,14 +278,14 @@ def _check_bin_op(op): result = op(df1, df2) expected = DataFrame(op(df1.values, df2.values), index=df1.index, columns=df1.columns) - self.assertEqual(result.values.dtype, np.bool_) + assert result.values.dtype == np.bool_ assert_frame_equal(result, expected) def _check_unary_op(op): result = op(df1) expected = DataFrame(op(df1.values), index=df1.index, columns=df1.columns) - self.assertEqual(result.values.dtype, np.bool_) + assert result.values.dtype == np.bool_ assert_frame_equal(result, expected) df1 = {'a': {'a': True, 'b': False, 'c': False, 'd': True, 'e': True}, @@ -861,9 +861,9 @@ def test_combineSeries(self): for key, col in compat.iteritems(self.tsframe): result = col + ts assert_series_equal(added[key], result, check_names=False) - self.assertEqual(added[key].name, key) + assert added[key].name == key if col.name == ts.name: - self.assertEqual(result.name, 'A') + assert result.name == 'A' else: assert result.name is None @@ -891,7 +891,7 @@ def test_combineSeries(self): # empty but with non-empty index frame = self.tsframe[:1].reindex(columns=[]) result = frame.mul(ts, axis='index') - self.assertEqual(len(result), len(ts)) + assert len(result) == len(ts) def test_combineFunc(self): result = self.frame * 2 @@ -906,7 +906,7 @@ def test_combineFunc(self): result = self.empty * 2 assert result.index is self.empty.index - self.assertEqual(len(result.columns), 0) + assert len(result.columns) == 0 def test_comparisons(self): df1 = tm.makeTimeDataFrame() diff --git a/pandas/tests/frame/test_period.py b/pandas/tests/frame/test_period.py index 0ca37de6bf2d4..826ece2ed2c9b 100644 --- a/pandas/tests/frame/test_period.py +++ b/pandas/tests/frame/test_period.py @@ -37,8 +37,8 @@ def test_frame_setitem(self): df['Index'] = rng rs = Index(df['Index']) tm.assert_index_equal(rs, rng, check_names=False) - self.assertEqual(rs.name, 'Index') - self.assertEqual(rng.name, 'index') + assert rs.name == 'Index' + assert rng.name == 'index' rs = df.reset_index().set_index('index') assert isinstance(rs.index, PeriodIndex) @@ -117,8 +117,8 @@ def _get_with_delta(delta, freq='A-DEC'): tm.assert_numpy_array_equal(result1.columns.asi8, expected.asi8) tm.assert_numpy_array_equal(result2.columns.asi8, expected.asi8) # PeriodIndex.to_timestamp always use 'infer' - self.assertEqual(result1.columns.freqstr, 'AS-JAN') - self.assertEqual(result2.columns.freqstr, 'AS-JAN') + assert result1.columns.freqstr == 'AS-JAN' + assert result2.columns.freqstr == 'AS-JAN' def test_frame_index_to_string(self): index = PeriodIndex(['2011-1', '2011-2', '2011-3'], freq='M') diff --git a/pandas/tests/frame/test_quantile.py b/pandas/tests/frame/test_quantile.py index 406f8107952ef..33f72cde1b9a3 100644 --- a/pandas/tests/frame/test_quantile.py +++ b/pandas/tests/frame/test_quantile.py @@ -23,12 +23,12 @@ def test_quantile(self): from numpy import percentile q = self.tsframe.quantile(0.1, axis=0) - self.assertEqual(q['A'], percentile(self.tsframe['A'], 10)) + assert q['A'] == percentile(self.tsframe['A'], 10) tm.assert_index_equal(q.index, self.tsframe.columns) q = self.tsframe.quantile(0.9, axis=1) - self.assertEqual(q['2000-01-17'], - percentile(self.tsframe.loc['2000-01-17'], 90)) + assert (q['2000-01-17'] == + percentile(self.tsframe.loc['2000-01-17'], 90)) tm.assert_index_equal(q.index, self.tsframe.index) # test degenerate case @@ -102,7 +102,7 @@ def test_quantile_axis_parameter(self): pytest.raises(ValueError, df.quantile, 0.1, axis="column") def test_quantile_interpolation(self): - # GH #10174 + # see gh-10174 if _np_version_under1p9: pytest.skip("Numpy version under 1.9") @@ -110,32 +110,32 @@ def test_quantile_interpolation(self): # interpolation = linear (default case) q = self.tsframe.quantile(0.1, axis=0, interpolation='linear') - self.assertEqual(q['A'], percentile(self.tsframe['A'], 10)) + assert q['A'] == percentile(self.tsframe['A'], 10) q = self.intframe.quantile(0.1) - self.assertEqual(q['A'], percentile(self.intframe['A'], 10)) + assert q['A'] == percentile(self.intframe['A'], 10) # test with and without interpolation keyword q1 = self.intframe.quantile(0.1) - self.assertEqual(q1['A'], np.percentile(self.intframe['A'], 10)) - assert_series_equal(q, q1) + assert q1['A'] == np.percentile(self.intframe['A'], 10) + tm.assert_series_equal(q, q1) # interpolation method other than default linear df = DataFrame({"A": [1, 2, 3], "B": [2, 3, 4]}, index=[1, 2, 3]) result = df.quantile(.5, axis=1, interpolation='nearest') expected = Series([1, 2, 3], index=[1, 2, 3], name=0.5) - assert_series_equal(result, expected) + tm.assert_series_equal(result, expected) # cross-check interpolation=nearest results in original dtype exp = np.percentile(np.array([[1, 2, 3], [2, 3, 4]]), .5, axis=0, interpolation='nearest') expected = Series(exp, index=[1, 2, 3], name=0.5, dtype='int64') - assert_series_equal(result, expected) + tm.assert_series_equal(result, expected) # float df = DataFrame({"A": [1., 2., 3.], "B": [2., 3., 4.]}, index=[1, 2, 3]) result = df.quantile(.5, axis=1, interpolation='nearest') expected = Series([1., 2., 3.], index=[1, 2, 3], name=0.5) - assert_series_equal(result, expected) + tm.assert_series_equal(result, expected) exp = np.percentile(np.array([[1., 2., 3.], [2., 3., 4.]]), .5, axis=0, interpolation='nearest') expected = Series(exp, index=[1, 2, 3], name=0.5, dtype='float64') @@ -167,7 +167,7 @@ def test_quantile_interpolation(self): assert_frame_equal(result, expected) def test_quantile_interpolation_np_lt_1p9(self): - # GH #10174 + # see gh-10174 if not _np_version_under1p9: pytest.skip("Numpy version is greater than 1.9") @@ -175,33 +175,33 @@ def test_quantile_interpolation_np_lt_1p9(self): # interpolation = linear (default case) q = self.tsframe.quantile(0.1, axis=0, interpolation='linear') - self.assertEqual(q['A'], percentile(self.tsframe['A'], 10)) + assert q['A'] == percentile(self.tsframe['A'], 10) q = self.intframe.quantile(0.1) - self.assertEqual(q['A'], percentile(self.intframe['A'], 10)) + assert q['A'] == percentile(self.intframe['A'], 10) # test with and without interpolation keyword q1 = self.intframe.quantile(0.1) - self.assertEqual(q1['A'], np.percentile(self.intframe['A'], 10)) + assert q1['A'] == np.percentile(self.intframe['A'], 10) assert_series_equal(q, q1) # interpolation method other than default linear - expErrMsg = "Interpolation methods other than linear" + msg = "Interpolation methods other than linear" df = DataFrame({"A": [1, 2, 3], "B": [2, 3, 4]}, index=[1, 2, 3]) - with tm.assert_raises_regex(ValueError, expErrMsg): + with tm.assert_raises_regex(ValueError, msg): df.quantile(.5, axis=1, interpolation='nearest') - with tm.assert_raises_regex(ValueError, expErrMsg): + with tm.assert_raises_regex(ValueError, msg): df.quantile([.5, .75], axis=1, interpolation='lower') # test degenerate case df = DataFrame({'x': [], 'y': []}) - with tm.assert_raises_regex(ValueError, expErrMsg): + with tm.assert_raises_regex(ValueError, msg): q = df.quantile(0.1, axis=0, interpolation='higher') # multi df = DataFrame([[1, 1, 1], [2, 2, 2], [3, 3, 3]], columns=['a', 'b', 'c']) - with tm.assert_raises_regex(ValueError, expErrMsg): + with tm.assert_raises_regex(ValueError, msg): df.quantile([.25, .5], interpolation='midpoint') def test_quantile_multi(self): diff --git a/pandas/tests/frame/test_query_eval.py b/pandas/tests/frame/test_query_eval.py index 575906fb5c8b2..80db2c50c3eb6 100644 --- a/pandas/tests/frame/test_query_eval.py +++ b/pandas/tests/frame/test_query_eval.py @@ -808,7 +808,7 @@ def test_nested_scope(self): # smoke test x = 1 # noqa result = pd.eval('x + 1', engine=engine, parser=parser) - self.assertEqual(result, 2) + assert result == 2 df = DataFrame(np.random.randn(5, 3)) df2 = DataFrame(np.random.randn(5, 3)) diff --git a/pandas/tests/frame/test_replace.py b/pandas/tests/frame/test_replace.py index 87075e6d6e631..3f160012cb446 100644 --- a/pandas/tests/frame/test_replace.py +++ b/pandas/tests/frame/test_replace.py @@ -548,7 +548,7 @@ def test_regex_replace_numeric_to_object_conversion(self): expec = DataFrame({'a': ['a', 1, 2, 3], 'b': mix['b'], 'c': mix['c']}) res = df.replace(0, 'a') assert_frame_equal(res, expec) - self.assertEqual(res.a.dtype, np.object_) + assert res.a.dtype == np.object_ def test_replace_regex_metachar(self): metachars = '[]', '()', r'\d', r'\w', r'\s' diff --git a/pandas/tests/frame/test_repr_info.py b/pandas/tests/frame/test_repr_info.py index dbdbebddcc0b5..74301b918bd02 100644 --- a/pandas/tests/frame/test_repr_info.py +++ b/pandas/tests/frame/test_repr_info.py @@ -132,11 +132,11 @@ def test_repr_unicode(self): result = repr(df) ex_top = ' A' - self.assertEqual(result.split('\n')[0].rstrip(), ex_top) + assert result.split('\n')[0].rstrip() == ex_top df = DataFrame({'A': [uval, uval]}) result = repr(df) - self.assertEqual(result.split('\n')[0].rstrip(), ex_top) + assert result.split('\n')[0].rstrip() == ex_top def test_unicode_string_with_unicode(self): df = DataFrame({'A': [u("\u05d0")]}) @@ -186,7 +186,7 @@ def test_latex_repr(self): with option_context("display.latex.escape", False, 'display.latex.repr', True): df = DataFrame([[r'$\alpha$', 'b', 'c'], [1, 2, 3]]) - self.assertEqual(result, df._repr_latex_()) + assert result == df._repr_latex_() # GH 12182 assert df._repr_latex_() is None @@ -217,7 +217,7 @@ def test_info_wide(self): set_option('display.max_info_columns', 101) io = StringIO() df.info(buf=io) - self.assertEqual(rs, xp) + assert rs == xp reset_option('display.max_info_columns') def test_info_duplicate_columns(self): @@ -237,8 +237,8 @@ def test_info_duplicate_columns_shows_correct_dtypes(self): frame.info(buf=io) io.seek(0) lines = io.readlines() - self.assertEqual('a 1 non-null int64\n', lines[3]) - self.assertEqual('a 1 non-null float64\n', lines[4]) + assert 'a 1 non-null int64\n' == lines[3] + assert 'a 1 non-null float64\n' == lines[4] def test_info_shows_column_dtypes(self): dtypes = ['int64', 'float64', 'datetime64[ns]', 'timedelta64[ns]', @@ -263,7 +263,7 @@ def test_info_max_cols(self): buf = StringIO() df.info(buf=buf, verbose=verbose) res = buf.getvalue() - self.assertEqual(len(res.strip().split('\n')), len_) + assert len(res.strip().split('\n')) == len_ for len_, verbose in [(10, None), (5, False), (10, True)]: @@ -272,7 +272,7 @@ def test_info_max_cols(self): buf = StringIO() df.info(buf=buf, verbose=verbose) res = buf.getvalue() - self.assertEqual(len(res.strip().split('\n')), len_) + assert len(res.strip().split('\n')) == len_ for len_, max_cols in [(10, 5), (5, 4)]: # setting truncates @@ -280,14 +280,14 @@ def test_info_max_cols(self): buf = StringIO() df.info(buf=buf, max_cols=max_cols) res = buf.getvalue() - self.assertEqual(len(res.strip().split('\n')), len_) + assert len(res.strip().split('\n')) == len_ # setting wouldn't truncate with option_context('max_info_columns', 5): buf = StringIO() df.info(buf=buf, max_cols=max_cols) res = buf.getvalue() - self.assertEqual(len(res.strip().split('\n')), len_) + assert len(res.strip().split('\n')) == len_ def test_info_memory_usage(self): # Ensure memory usage is displayed, when asserted, on the last line @@ -352,15 +352,14 @@ def test_info_memory_usage(self): # (cols * rows * bytes) + index size df_size = df.memory_usage().sum() exp_size = len(dtypes) * n * 8 + df.index.nbytes - self.assertEqual(df_size, exp_size) + assert df_size == exp_size # Ensure number of cols in memory_usage is the same as df size_df = np.size(df.columns.values) + 1 # index=True; default - self.assertEqual(size_df, np.size(df.memory_usage())) + assert size_df == np.size(df.memory_usage()) # assert deep works only on object - self.assertEqual(df.memory_usage().sum(), - df.memory_usage(deep=True).sum()) + assert df.memory_usage().sum() == df.memory_usage(deep=True).sum() # test for validity DataFrame(1, index=['a'], columns=['A'] @@ -428,7 +427,7 @@ def memory_usage(f): df = DataFrame({'value': np.random.randn(N * M)}, index=index) unstacked = df.unstack('id') - self.assertEqual(df.values.nbytes, unstacked.values.nbytes) + assert df.values.nbytes == unstacked.values.nbytes assert memory_usage(df) > memory_usage(unstacked) # high upper bound diff --git a/pandas/tests/frame/test_reshape.py b/pandas/tests/frame/test_reshape.py index 9c48233ff29cd..79ee76ee362c3 100644 --- a/pandas/tests/frame/test_reshape.py +++ b/pandas/tests/frame/test_reshape.py @@ -41,25 +41,25 @@ def test_pivot(self): 'One': {'A': 1., 'B': 2., 'C': 3.}, 'Two': {'A': 1., 'B': 2., 'C': 3.} }) - expected.index.name, expected.columns.name = 'index', 'columns' - assert_frame_equal(pivoted, expected) + expected.index.name, expected.columns.name = 'index', 'columns' + tm.assert_frame_equal(pivoted, expected) # name tracking - self.assertEqual(pivoted.index.name, 'index') - self.assertEqual(pivoted.columns.name, 'columns') + assert pivoted.index.name == 'index' + assert pivoted.columns.name == 'columns' # don't specify values pivoted = frame.pivot(index='index', columns='columns') - self.assertEqual(pivoted.index.name, 'index') - self.assertEqual(pivoted.columns.names, (None, 'columns')) + assert pivoted.index.name == 'index' + assert pivoted.columns.names == (None, 'columns') with catch_warnings(record=True): # pivot multiple columns wp = tm.makePanel() lp = wp.to_frame() df = lp.reset_index() - assert_frame_equal(df.pivot('major', 'minor'), lp.unstack()) + tm.assert_frame_equal(df.pivot('major', 'minor'), lp.unstack()) def test_pivot_duplicates(self): data = DataFrame({'a': ['bar', 'bar', 'foo', 'foo', 'foo'], @@ -72,7 +72,7 @@ def test_pivot_empty(self): df = DataFrame({}, columns=['a', 'b', 'c']) result = df.pivot('a', 'b', 'c') expected = DataFrame({}) - assert_frame_equal(result, expected, check_names=False) + tm.assert_frame_equal(result, expected, check_names=False) def test_pivot_integer_bug(self): df = DataFrame(data=[("A", "1", "A1"), ("B", "2", "B2")]) @@ -106,21 +106,14 @@ def test_pivot_index_none(self): ('values', 'Two')], names=[None, 'columns']) expected.index.name = 'index' - assert_frame_equal(result, expected, check_names=False) - self.assertEqual(result.index.name, 'index',) - self.assertEqual(result.columns.names, (None, 'columns')) + tm.assert_frame_equal(result, expected, check_names=False) + assert result.index.name == 'index' + assert result.columns.names == (None, 'columns') expected.columns = expected.columns.droplevel(0) - - data = { - 'index': range(7), - 'columns': ['One', 'One', 'One', 'Two', 'Two', 'Two'], - 'values': [1., 2., 3., 3., 2., 1.] - } - result = frame.pivot(columns='columns', values='values') expected.columns.name = 'columns' - assert_frame_equal(result, expected) + tm.assert_frame_equal(result, expected) def test_stack_unstack(self): f = self.frame.copy() @@ -516,8 +509,8 @@ def test_unstack_dtypes(self): right = right.set_index(['A', 'B']).unstack(0) right[('D', 'a')] = right[('D', 'a')].astype('int64') - self.assertEqual(left.shape, (3, 2)) - assert_frame_equal(left, right) + assert left.shape == (3, 2) + tm.assert_frame_equal(left, right) def test_unstack_non_unique_index_names(self): idx = MultiIndex.from_tuples([('a', 'b'), ('c', 'd')], @@ -540,7 +533,7 @@ def verify(df): left = sorted(df.iloc[i, j].split('.')) right = mk_list(df.index[i]) + mk_list(df.columns[j]) right = sorted(list(map(cast, right))) - self.assertEqual(left, right) + assert left == right df = DataFrame({'jim': ['a', 'b', nan, 'd'], 'joe': ['w', 'x', 'y', 'z'], @@ -554,7 +547,7 @@ def verify(df): mi = df.set_index(list(idx)) for lev in range(2): udf = mi.unstack(level=lev) - self.assertEqual(udf.notnull().values.sum(), len(df)) + assert udf.notnull().values.sum() == len(df) verify(udf['jolie']) df = DataFrame({'1st': ['d'] * 3 + [nan] * 5 + ['a'] * 2 + @@ -572,7 +565,7 @@ def verify(df): mi = df.set_index(list(idx)) for lev in range(3): udf = mi.unstack(level=lev) - self.assertEqual(udf.notnull().values.sum(), 2 * len(df)) + assert udf.notnull().values.sum() == 2 * len(df) for col in ['4th', '5th']: verify(udf[col]) @@ -677,12 +670,12 @@ def verify(df): df.loc[1, '3rd'] = df.loc[4, '3rd'] = nan left = df.set_index(['1st', '2nd', '3rd']).unstack(['2nd', '3rd']) - self.assertEqual(left.notnull().values.sum(), 2 * len(df)) + assert left.notnull().values.sum() == 2 * len(df) for col in ['jim', 'joe']: for _, r in df.iterrows(): key = r['1st'], (col, r['2nd'], r['3rd']) - self.assertEqual(r[col], left.loc[key]) + assert r[col] == left.loc[key] def test_stack_datetime_column_multiIndex(self): # GH 8039 diff --git a/pandas/tests/frame/test_subclass.py b/pandas/tests/frame/test_subclass.py index ade696885c2e0..40a8ece852623 100644 --- a/pandas/tests/frame/test_subclass.py +++ b/pandas/tests/frame/test_subclass.py @@ -55,12 +55,12 @@ def custom_frame_function(self): # Do we get back our own Series class after selecting a column? cdf_series = cdf.col1 assert isinstance(cdf_series, CustomSeries) - self.assertEqual(cdf_series.custom_series_function(), 'OK') + assert cdf_series.custom_series_function() == 'OK' # Do we get back our own DF class after slicing row-wise? cdf_rows = cdf[1:5] assert isinstance(cdf_rows, CustomDataFrame) - self.assertEqual(cdf_rows.custom_frame_function(), 'OK') + assert cdf_rows.custom_frame_function() == 'OK' # Make sure sliced part of multi-index frame is custom class mcol = pd.MultiIndex.from_tuples([('A', 'A'), ('A', 'B')]) @@ -76,19 +76,19 @@ def test_dataframe_metadata(self): index=['a', 'b', 'c']) df.testattr = 'XXX' - self.assertEqual(df.testattr, 'XXX') - self.assertEqual(df[['X']].testattr, 'XXX') - self.assertEqual(df.loc[['a', 'b'], :].testattr, 'XXX') - self.assertEqual(df.iloc[[0, 1], :].testattr, 'XXX') + assert df.testattr == 'XXX' + assert df[['X']].testattr == 'XXX' + assert df.loc[['a', 'b'], :].testattr == 'XXX' + assert df.iloc[[0, 1], :].testattr == 'XXX' - # GH9776 - self.assertEqual(df.iloc[0:1, :].testattr, 'XXX') + # see gh-9776 + assert df.iloc[0:1, :].testattr == 'XXX' - # GH10553 + # see gh-10553 unpickled = tm.round_trip_pickle(df) tm.assert_frame_equal(df, unpickled) - self.assertEqual(df._metadata, unpickled._metadata) - self.assertEqual(df.testattr, unpickled.testattr) + assert df._metadata == unpickled._metadata + assert df.testattr == unpickled.testattr def test_indexing_sliced(self): # GH 11559 diff --git a/pandas/tests/frame/test_timeseries.py b/pandas/tests/frame/test_timeseries.py index 910f04f0d63c6..f52f4697b1b08 100644 --- a/pandas/tests/frame/test_timeseries.py +++ b/pandas/tests/frame/test_timeseries.py @@ -38,7 +38,7 @@ def test_diff(self): s = Series([a, b]) rs = DataFrame({'s': s}).diff() - self.assertEqual(rs.s[1], 1) + assert rs.s[1] == 1 # mixed numeric tf = self.tsframe.astype('float32') @@ -71,7 +71,7 @@ def test_diff_mixed_dtype(self): df['A'] = np.array([1, 2, 3, 4, 5], dtype=object) result = df.diff() - self.assertEqual(result[0].dtype, np.float64) + assert result[0].dtype == np.float64 def test_diff_neg_n(self): rs = self.tsframe.diff(-1) @@ -153,7 +153,7 @@ def test_frame_add_datetime64_col_other_units(self): ex_vals = to_datetime(vals.astype('O')).values - self.assertEqual(df[unit].dtype, ns_dtype) + assert df[unit].dtype == ns_dtype assert (df[unit].values == ex_vals).all() # Test insertion into existing datetime64 column @@ -191,7 +191,7 @@ def test_shift(self): # shift by DateOffset shiftedFrame = self.tsframe.shift(5, freq=offsets.BDay()) - self.assertEqual(len(shiftedFrame), len(self.tsframe)) + assert len(shiftedFrame) == len(self.tsframe) shiftedFrame2 = self.tsframe.shift(5, freq='B') assert_frame_equal(shiftedFrame, shiftedFrame2) @@ -408,10 +408,10 @@ def test_first_last_valid(self): frame = DataFrame({'foo': mat}, index=self.frame.index) index = frame.first_valid_index() - self.assertEqual(index, frame.index[5]) + assert index == frame.index[5] index = frame.last_valid_index() - self.assertEqual(index, frame.index[-6]) + assert index == frame.index[-6] # GH12800 empty = DataFrame() @@ -446,7 +446,7 @@ def test_at_time_frame(self): rng = date_range('1/1/2012', freq='23Min', periods=384) ts = DataFrame(np.random.randn(len(rng), 2), rng) rs = ts.at_time('16:00') - self.assertEqual(len(rs), 0) + assert len(rs) == 0 def test_between_time_frame(self): rng = date_range('1/1/2000', '1/5/2000', freq='5min') @@ -463,7 +463,7 @@ def test_between_time_frame(self): if not inc_end: exp_len -= 4 - self.assertEqual(len(filtered), exp_len) + assert len(filtered) == exp_len for rs in filtered.index: t = rs.time() if inc_start: @@ -495,7 +495,7 @@ def test_between_time_frame(self): if not inc_end: exp_len -= 4 - self.assertEqual(len(filtered), exp_len) + assert len(filtered) == exp_len for rs in filtered.index: t = rs.time() if inc_start: diff --git a/pandas/tests/frame/test_to_csv.py b/pandas/tests/frame/test_to_csv.py index 11c10f1982558..3e38f2a71d99d 100644 --- a/pandas/tests/frame/test_to_csv.py +++ b/pandas/tests/frame/test_to_csv.py @@ -433,13 +433,13 @@ def test_to_csv_no_index(self): assert_frame_equal(df, result) def test_to_csv_with_mix_columns(self): - # GH11637, incorrect output when a mix of integer and string column + # gh-11637: incorrect output when a mix of integer and string column # names passed as columns parameter in to_csv df = DataFrame({0: ['a', 'b', 'c'], 1: ['aa', 'bb', 'cc']}) df['test'] = 'txt' - self.assertEqual(df.to_csv(), df.to_csv(columns=[0, 1, 'test'])) + assert df.to_csv() == df.to_csv(columns=[0, 1, 'test']) def test_to_csv_headers(self): # GH6186, the presence or absence of `index` incorrectly @@ -475,7 +475,7 @@ def test_to_csv_multiindex(self): # TODO to_csv drops column name assert_frame_equal(frame, df, check_names=False) - self.assertEqual(frame.index.names, df.index.names) + assert frame.index.names == df.index.names # needed if setUP becomes a classmethod self.frame.index = old_index @@ -494,7 +494,7 @@ def test_to_csv_multiindex(self): # do not load index tsframe.to_csv(path) recons = DataFrame.from_csv(path, index_col=None) - self.assertEqual(len(recons.columns), len(tsframe.columns) + 2) + assert len(recons.columns) == len(tsframe.columns) + 2 # no index tsframe.to_csv(path, index=False) @@ -604,7 +604,7 @@ def _make_frame(names=None): exp.index = [] tm.assert_index_equal(recons.columns, exp.columns) - self.assertEqual(len(recons), 0) + assert len(recons) == 0 def test_to_csv_float32_nanrep(self): df = DataFrame(np.random.randn(1, 4).astype(np.float32)) @@ -615,7 +615,7 @@ def test_to_csv_float32_nanrep(self): with open(path) as f: lines = f.readlines() - self.assertEqual(lines[1].split(',')[2], '999') + assert lines[1].split(',')[2] == '999' def test_to_csv_withcommas(self): @@ -813,7 +813,7 @@ def test_to_csv_unicodewriter_quoting(self): '2,"bar"\n' '3,"baz"\n') - self.assertEqual(result, expected) + assert result == expected def test_to_csv_quote_none(self): # GH4328 @@ -824,7 +824,7 @@ def test_to_csv_quote_none(self): encoding=encoding, index=False) result = buf.getvalue() expected = 'A\nhello\n{"hello"}\n' - self.assertEqual(result, expected) + assert result == expected def test_to_csv_index_no_leading_comma(self): df = DataFrame({'A': [1, 2, 3], 'B': [4, 5, 6]}, @@ -836,7 +836,7 @@ def test_to_csv_index_no_leading_comma(self): 'one,1,4\n' 'two,2,5\n' 'three,3,6\n') - self.assertEqual(buf.getvalue(), expected) + assert buf.getvalue() == expected def test_to_csv_line_terminators(self): df = DataFrame({'A': [1, 2, 3], 'B': [4, 5, 6]}, @@ -848,7 +848,7 @@ def test_to_csv_line_terminators(self): 'one,1,4\r\n' 'two,2,5\r\n' 'three,3,6\r\n') - self.assertEqual(buf.getvalue(), expected) + assert buf.getvalue() == expected buf = StringIO() df.to_csv(buf) # The default line terminator remains \n @@ -856,7 +856,7 @@ def test_to_csv_line_terminators(self): 'one,1,4\n' 'two,2,5\n' 'three,3,6\n') - self.assertEqual(buf.getvalue(), expected) + assert buf.getvalue() == expected def test_to_csv_from_csv_categorical(self): @@ -868,7 +868,7 @@ def test_to_csv_from_csv_categorical(self): s.to_csv(res) exp = StringIO() s2.to_csv(exp) - self.assertEqual(res.getvalue(), exp.getvalue()) + assert res.getvalue() == exp.getvalue() df = DataFrame({"s": s}) df2 = DataFrame({"s": s2}) @@ -876,7 +876,7 @@ def test_to_csv_from_csv_categorical(self): df.to_csv(res) exp = StringIO() df2.to_csv(exp) - self.assertEqual(res.getvalue(), exp.getvalue()) + assert res.getvalue() == exp.getvalue() def test_to_csv_path_is_none(self): # GH 8215 @@ -1078,13 +1078,13 @@ def test_to_csv_quoting(self): 1,False,3.2,,"b,c" """ result = df.to_csv() - self.assertEqual(result, expected) + assert result == expected result = df.to_csv(quoting=None) - self.assertEqual(result, expected) + assert result == expected result = df.to_csv(quoting=csv.QUOTE_MINIMAL) - self.assertEqual(result, expected) + assert result == expected expected = """\ "","c_bool","c_float","c_int","c_string" @@ -1092,7 +1092,7 @@ def test_to_csv_quoting(self): "1","False","3.2","","b,c" """ result = df.to_csv(quoting=csv.QUOTE_ALL) - self.assertEqual(result, expected) + assert result == expected # see gh-12922, gh-13259: make sure changes to # the formatters do not break this behaviour @@ -1102,7 +1102,7 @@ def test_to_csv_quoting(self): 1,False,3.2,"","b,c" """ result = df.to_csv(quoting=csv.QUOTE_NONNUMERIC) - self.assertEqual(result, expected) + assert result == expected msg = "need to escape, but no escapechar set" tm.assert_raises_regex(csv.Error, msg, df.to_csv, @@ -1118,7 +1118,7 @@ def test_to_csv_quoting(self): """ result = df.to_csv(quoting=csv.QUOTE_NONE, escapechar='!') - self.assertEqual(result, expected) + assert result == expected expected = """\ ,c_bool,c_ffloat,c_int,c_string @@ -1127,7 +1127,7 @@ def test_to_csv_quoting(self): """ result = df.to_csv(quoting=csv.QUOTE_NONE, escapechar='f') - self.assertEqual(result, expected) + assert result == expected # see gh-3503: quoting Windows line terminators # presents with encoding? @@ -1135,14 +1135,14 @@ def test_to_csv_quoting(self): df = pd.read_csv(StringIO(text)) buf = StringIO() df.to_csv(buf, encoding='utf-8', index=False) - self.assertEqual(buf.getvalue(), text) + assert buf.getvalue() == text # xref gh-7791: make sure the quoting parameter is passed through # with multi-indexes df = pd.DataFrame({'a': [1, 2], 'b': [3, 4], 'c': [5, 6]}) df = df.set_index(['a', 'b']) expected = '"a","b","c"\n"1","3","5"\n"2","4","6"\n' - self.assertEqual(df.to_csv(quoting=csv.QUOTE_ALL), expected) + assert df.to_csv(quoting=csv.QUOTE_ALL) == expected def test_period_index_date_overflow(self): # see gh-15982 diff --git a/pandas/tests/groupby/test_aggregate.py b/pandas/tests/groupby/test_aggregate.py index e3f166d2294e2..310a5aca77b77 100644 --- a/pandas/tests/groupby/test_aggregate.py +++ b/pandas/tests/groupby/test_aggregate.py @@ -197,7 +197,7 @@ def test_agg_ser_multi_key(self): def test_agg_apply_corner(self): # nothing to group, all NA grouped = self.ts.groupby(self.ts * np.nan) - self.assertEqual(self.ts.dtype, np.float64) + assert self.ts.dtype == np.float64 # groupby float64 values results in Float64Index exp = Series([], dtype=np.float64, index=pd.Index( @@ -445,7 +445,7 @@ def test_aggregate_item_by_item(self): # def aggfun(ser): # return len(ser + 'a') # result = grouped.agg(aggfun) - # self.assertEqual(len(result.columns), 1) + # assert len(result.columns) == 1 aggfun = lambda ser: ser.size result = grouped.agg(aggfun) @@ -468,7 +468,7 @@ def aggfun(ser): result = DataFrame().groupby(self.df.A).agg(aggfun) assert isinstance(result, DataFrame) - self.assertEqual(len(result), 0) + assert len(result) == 0 def test_agg_item_by_item_raise_typeerror(self): from numpy.random import randint diff --git a/pandas/tests/groupby/test_categorical.py b/pandas/tests/groupby/test_categorical.py index b9a731f2204da..9d2134927389d 100644 --- a/pandas/tests/groupby/test_categorical.py +++ b/pandas/tests/groupby/test_categorical.py @@ -48,7 +48,7 @@ def get_stats(group): 'mean': group.mean()} result = self.df.groupby(cats).D.apply(get_stats) - self.assertEqual(result.index.names[0], 'C') + assert result.index.names[0] == 'C' def test_apply_categorical_data(self): # GH 10138 diff --git a/pandas/tests/groupby/test_groupby.py b/pandas/tests/groupby/test_groupby.py index 278682ccb8d45..09643e918af31 100644 --- a/pandas/tests/groupby/test_groupby.py +++ b/pandas/tests/groupby/test_groupby.py @@ -41,10 +41,10 @@ def checkit(dtype): grouped = data.groupby(lambda x: x // 3) for k, v in grouped: - self.assertEqual(len(v), 3) + assert len(v) == 3 agged = grouped.aggregate(np.mean) - self.assertEqual(agged[1], 1) + assert agged[1] == 1 assert_series_equal(agged, grouped.agg(np.mean)) # shorthand assert_series_equal(agged, grouped.mean()) @@ -52,7 +52,7 @@ def checkit(dtype): expected = grouped.apply(lambda x: x * x.sum()) transformed = grouped.transform(lambda x: x * x.sum()) - self.assertEqual(transformed[7], 12) + assert transformed[7] == 12 assert_series_equal(transformed, expected) value_grouped = data.groupby(data) @@ -68,7 +68,7 @@ def checkit(dtype): group_constants = {0: 10, 1: 20, 2: 30} agged = grouped.agg(lambda x: group_constants[x.name] + x.mean()) - self.assertEqual(agged[1], 21) + assert agged[1] == 21 # corner cases pytest.raises(Exception, grouped.aggregate, lambda x: x * 2) @@ -423,10 +423,10 @@ def test_grouper_getting_correct_binner(self): assert_frame_equal(result, expected) def test_grouper_iter(self): - self.assertEqual(sorted(self.df.groupby('A').grouper), ['bar', 'foo']) + assert sorted(self.df.groupby('A').grouper) == ['bar', 'foo'] def test_empty_groups(self): - # GH # 1048 + # see gh-1048 pytest.raises(ValueError, self.df.groupby, []) def test_groupby_grouper(self): @@ -434,7 +434,7 @@ def test_groupby_grouper(self): result = self.df.groupby(grouped.grouper).mean() expected = grouped.mean() - assert_frame_equal(result, expected) + tm.assert_frame_equal(result, expected) def test_groupby_duplicated_column_errormsg(self): # GH7511 @@ -744,17 +744,17 @@ def test_len(self): df = tm.makeTimeDataFrame() grouped = df.groupby([lambda x: x.year, lambda x: x.month, lambda x: x.day]) - self.assertEqual(len(grouped), len(df)) + assert len(grouped) == len(df) grouped = df.groupby([lambda x: x.year, lambda x: x.month]) expected = len(set([(x.year, x.month) for x in df.index])) - self.assertEqual(len(grouped), expected) + assert len(grouped) == expected # issue 11016 df = pd.DataFrame(dict(a=[np.nan] * 3, b=[1, 2, 3])) - self.assertEqual(len(df.groupby(('a'))), 0) - self.assertEqual(len(df.groupby(('b'))), 3) - self.assertEqual(len(df.groupby(('a', 'b'))), 3) + assert len(df.groupby(('a'))) == 0 + assert len(df.groupby(('b'))) == 3 + assert len(df.groupby(('a', 'b'))) == 3 def test_groups(self): grouped = self.df.groupby(['A']) @@ -900,7 +900,7 @@ def test_series_describe_single(self): def test_series_index_name(self): grouped = self.df.loc[:, ['C']].groupby(self.df['A']) result = grouped.agg(lambda x: x.mean()) - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' def test_frame_describe_multikey(self): grouped = self.tsframe.groupby([lambda x: x.year, lambda x: x.month]) @@ -962,8 +962,8 @@ def test_frame_groupby(self): # aggregate aggregated = grouped.aggregate(np.mean) - self.assertEqual(len(aggregated), 5) - self.assertEqual(len(aggregated.columns), 4) + assert len(aggregated) == 5 + assert len(aggregated.columns) == 4 # by string tscopy = self.tsframe.copy() @@ -974,8 +974,8 @@ def test_frame_groupby(self): # transform grouped = self.tsframe.head(30).groupby(lambda x: x.weekday()) transformed = grouped.transform(lambda x: x - x.mean()) - self.assertEqual(len(transformed), 30) - self.assertEqual(len(transformed.columns), 4) + assert len(transformed) == 30 + assert len(transformed.columns) == 4 # transform propagate transformed = grouped.transform(lambda x: x.mean()) @@ -987,7 +987,7 @@ def test_frame_groupby(self): # iterate for weekday, group in grouped: - self.assertEqual(group.index[0].weekday(), weekday) + assert group.index[0].weekday() == weekday # groups / group_indices groups = grouped.groups @@ -1013,8 +1013,8 @@ def test_frame_groupby_columns(self): # aggregate aggregated = grouped.aggregate(np.mean) - self.assertEqual(len(aggregated), len(self.tsframe)) - self.assertEqual(len(aggregated.columns), 2) + assert len(aggregated) == len(self.tsframe) + assert len(aggregated.columns) == 2 # transform tf = lambda x: x - x.mean() @@ -1023,34 +1023,34 @@ def test_frame_groupby_columns(self): # iterate for k, v in grouped: - self.assertEqual(len(v.columns), 2) + assert len(v.columns) == 2 def test_frame_set_name_single(self): grouped = self.df.groupby('A') result = grouped.mean() - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' result = self.df.groupby('A', as_index=False).mean() self.assertNotEqual(result.index.name, 'A') result = grouped.agg(np.mean) - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' result = grouped.agg({'C': np.mean, 'D': np.std}) - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' result = grouped['C'].mean() - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' result = grouped['C'].agg(np.mean) - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' result = grouped['C'].agg([np.mean, np.std]) - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = grouped['C'].agg({'foo': np.mean, 'bar': np.std}) - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' def test_multi_iter(self): s = Series(np.arange(6)) @@ -1064,8 +1064,8 @@ def test_multi_iter(self): ('b', '1', s[[4]]), ('b', '2', s[[3, 5]])] for i, ((one, two), three) in enumerate(iterated): e1, e2, e3 = expected[i] - self.assertEqual(e1, one) - self.assertEqual(e2, two) + assert e1 == one + assert e2 == two assert_series_equal(three, e3) def test_multi_iter_frame(self): @@ -1087,8 +1087,8 @@ def test_multi_iter_frame(self): ('b', '2', df.loc[idx[[1]]])] for i, ((one, two), three) in enumerate(iterated): e1, e2, e3 = expected[i] - self.assertEqual(e1, one) - self.assertEqual(e2, two) + assert e1 == one + assert e2 == two assert_frame_equal(three, e3) # don't iterate through groups with no data @@ -1098,7 +1098,7 @@ def test_multi_iter_frame(self): groups = {} for key, gp in grouped: groups[key] = gp - self.assertEqual(len(groups), 2) + assert len(groups) == 2 # axis = 1 three_levels = self.three_group.groupby(['A', 'B', 'C']).mean() @@ -1563,7 +1563,7 @@ def test_empty_groups_corner(self): agged = grouped.apply(lambda x: x.mean()) agged_A = grouped['A'].apply(np.mean) assert_series_equal(agged['A'], agged_A) - self.assertEqual(agged.index.name, 'first') + assert agged.index.name == 'first' def test_apply_concat_preserve_names(self): grouped = self.three_group.groupby(['A', 'B']) @@ -1591,13 +1591,13 @@ def desc3(group): return result result = grouped.apply(desc) - self.assertEqual(result.index.names, ('A', 'B', 'stat')) + assert result.index.names == ('A', 'B', 'stat') result2 = grouped.apply(desc2) - self.assertEqual(result2.index.names, ('A', 'B', 'stat')) + assert result2.index.names == ('A', 'B', 'stat') result3 = grouped.apply(desc3) - self.assertEqual(result3.index.names, ('A', 'B', None)) + assert result3.index.names == ('A', 'B', None) def test_nonsense_func(self): df = DataFrame([0]) @@ -1789,7 +1789,7 @@ def aggfun(ser): return ser.sum() agged2 = df.groupby(keys).aggregate(aggfun) - self.assertEqual(len(agged2.columns) + 1, len(df.columns)) + assert len(agged2.columns) + 1 == len(df.columns) def test_groupby_level(self): frame = self.mframe @@ -1804,13 +1804,13 @@ def test_groupby_level(self): expected0 = expected0.reindex(frame.index.levels[0]) expected1 = expected1.reindex(frame.index.levels[1]) - self.assertEqual(result0.index.name, 'first') - self.assertEqual(result1.index.name, 'second') + assert result0.index.name == 'first' + assert result1.index.name == 'second' assert_frame_equal(result0, expected0) assert_frame_equal(result1, expected1) - self.assertEqual(result0.index.name, frame.index.names[0]) - self.assertEqual(result1.index.name, frame.index.names[1]) + assert result0.index.name == frame.index.names[0] + assert result1.index.name == frame.index.names[1] # groupby level name result0 = frame.groupby(level='first').sum() @@ -1860,12 +1860,12 @@ def test_groupby_level_apply(self): frame = self.mframe result = frame.groupby(level=0).count() - self.assertEqual(result.index.name, 'first') + assert result.index.name == 'first' result = frame.groupby(level=1).count() - self.assertEqual(result.index.name, 'second') + assert result.index.name == 'second' result = frame['A'].groupby(level=0).count() - self.assertEqual(result.index.name, 'first') + assert result.index.name == 'first' def test_groupby_args(self): # PR8618 and issue 8015 @@ -1965,7 +1965,7 @@ def f(piece): def test_apply_series_yield_constant(self): result = self.df.groupby(['A', 'B'])['C'].apply(len) - self.assertEqual(result.index.names[:2], ('A', 'B')) + assert result.index.names[:2] == ('A', 'B') def test_apply_frame_yield_constant(self): # GH13568 @@ -1999,7 +1999,7 @@ def trans2(group): result = df.groupby('A').apply(trans) exp = df.groupby('A')['C'].apply(trans2) assert_series_equal(result, exp, check_names=False) - self.assertEqual(result.name, 'C') + assert result.name == 'C' def test_apply_transform(self): grouped = self.ts.groupby(lambda x: x.month) @@ -2161,17 +2161,17 @@ def test_size(self): grouped = self.df.groupby(['A', 'B']) result = grouped.size() for key, group in grouped: - self.assertEqual(result[key], len(group)) + assert result[key] == len(group) grouped = self.df.groupby('A') result = grouped.size() for key, group in grouped: - self.assertEqual(result[key], len(group)) + assert result[key] == len(group) grouped = self.df.groupby('B') result = grouped.size() for key, group in grouped: - self.assertEqual(result[key], len(group)) + assert result[key] == len(group) df = DataFrame(np.random.choice(20, (1000, 3)), columns=list('abc')) for sort, key in cart_product((False, True), ('a', 'b', ['a', 'b'])): @@ -2481,24 +2481,24 @@ def test_groupby_wrong_multi_labels(self): def test_groupby_series_with_name(self): result = self.df.groupby(self.df['A']).mean() result2 = self.df.groupby(self.df['A'], as_index=False).mean() - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' assert 'A' in result2 result = self.df.groupby([self.df['A'], self.df['B']]).mean() result2 = self.df.groupby([self.df['A'], self.df['B']], as_index=False).mean() - self.assertEqual(result.index.names, ('A', 'B')) + assert result.index.names == ('A', 'B') assert 'A' in result2 assert 'B' in result2 def test_seriesgroupby_name_attr(self): # GH 6265 result = self.df.groupby('A')['C'] - self.assertEqual(result.count().name, 'C') - self.assertEqual(result.mean().name, 'C') + assert result.count().name == 'C' + assert result.mean().name == 'C' testFunc = lambda x: np.sum(x) * 2 - self.assertEqual(result.agg(testFunc).name, 'C') + assert result.agg(testFunc).name == 'C' def test_consistency_name(self): # GH 12363 @@ -2530,11 +2530,11 @@ def summarize_random_name(df): }, name=df.iloc[0]['A']) metrics = self.df.groupby('A').apply(summarize) - self.assertEqual(metrics.columns.name, None) + assert metrics.columns.name is None metrics = self.df.groupby('A').apply(summarize, 'metrics') - self.assertEqual(metrics.columns.name, 'metrics') + assert metrics.columns.name == 'metrics' metrics = self.df.groupby('A').apply(summarize_random_name) - self.assertEqual(metrics.columns.name, None) + assert metrics.columns.name is None def test_groupby_nonstring_columns(self): df = DataFrame([np.arange(10) for x in range(10)]) @@ -2595,11 +2595,11 @@ def convert_force_pure(x): grouped = s.groupby(labels) result = grouped.agg(convert_fast) - self.assertEqual(result.dtype, np.object_) + assert result.dtype == np.object_ assert isinstance(result[0], Decimal) result = grouped.agg(convert_force_pure) - self.assertEqual(result.dtype, np.object_) + assert result.dtype == np.object_ assert isinstance(result[0], Decimal) def test_fast_apply(self): @@ -2670,7 +2670,7 @@ def test_groupby_aggregation_mixed_dtype(self): def test_groupby_dtype_inference_empty(self): # GH 6733 df = DataFrame({'x': [], 'range': np.arange(0, dtype='int64')}) - self.assertEqual(df['x'].dtype, np.float64) + assert df['x'].dtype == np.float64 result = df.groupby('x').first() exp_index = Index([], name='x', dtype=np.float64) @@ -2725,7 +2725,7 @@ def test_groupby_nat_exclude(self): expected = [pd.Index([1, 7]), pd.Index([3, 5])] keys = sorted(grouped.groups.keys()) - self.assertEqual(len(keys), 2) + assert len(keys) == 2 for k, e in zip(keys, expected): # grouped.groups keys are np.datetime64 with system tz # not to be affected by tz, only compare values @@ -2733,7 +2733,7 @@ def test_groupby_nat_exclude(self): # confirm obj is not filtered tm.assert_frame_equal(grouped.grouper.groupings[0].obj, df) - self.assertEqual(grouped.ngroups, 2) + assert grouped.ngroups == 2 expected = { Timestamp('2013-01-01 00:00:00'): np.array([1, 7], dtype=np.int64), @@ -2752,14 +2752,14 @@ def test_groupby_nat_exclude(self): nan_df = DataFrame({'nan': [np.nan, np.nan, np.nan], 'nat': [pd.NaT, pd.NaT, pd.NaT]}) - self.assertEqual(nan_df['nan'].dtype, 'float64') - self.assertEqual(nan_df['nat'].dtype, 'datetime64[ns]') + assert nan_df['nan'].dtype == 'float64' + assert nan_df['nat'].dtype == 'datetime64[ns]' for key in ['nan', 'nat']: grouped = nan_df.groupby(key) - self.assertEqual(grouped.groups, {}) - self.assertEqual(grouped.ngroups, 0) - self.assertEqual(grouped.indices, {}) + assert grouped.groups == {} + assert grouped.ngroups == 0 + assert grouped.indices == {} pytest.raises(KeyError, grouped.get_group, np.nan) pytest.raises(KeyError, grouped.get_group, pd.NaT) @@ -2837,7 +2837,7 @@ def test_int32_overflow(self): left = df.groupby(['A', 'B', 'C', 'D']).sum() right = df.groupby(['D', 'C', 'B', 'A']).sum() - self.assertEqual(len(left), len(right)) + assert len(left) == len(right) def test_groupby_sort_multi(self): df = DataFrame({'a': ['foo', 'bar', 'baz'], @@ -2963,7 +2963,7 @@ def test_multifunc_sum_bug(self): grouped = x.groupby('test') result = grouped.agg({'fl': 'sum', 2: 'size'}) - self.assertEqual(result['fl'].dtype, np.float64) + assert result['fl'].dtype == np.float64 def test_handle_dict_return_value(self): def f(group): @@ -3056,14 +3056,13 @@ def f(group): assert names == expected_names def test_no_dummy_key_names(self): - # GH #1291 - + # see gh-1291 result = self.df.groupby(self.df['A'].values).sum() assert result.index.name is None result = self.df.groupby([self.df['A'].values, self.df['B'].values ]).sum() - self.assertEqual(result.index.names, (None, None)) + assert result.index.names == (None, None) def test_groupby_sort_multiindex_series(self): # series multiindex groupby sort argument was not being passed through @@ -3121,16 +3120,16 @@ def test_multiindex_columns_empty_level(self): df = DataFrame([[long(1), 'A']], columns=midx) grouped = df.groupby('to filter').groups - self.assertEqual(grouped['A'], [0]) + assert grouped['A'] == [0] grouped = df.groupby([('to filter', '')]).groups - self.assertEqual(grouped['A'], [0]) + assert grouped['A'] == [0] df = DataFrame([[long(1), 'A'], [long(2), 'B']], columns=midx) expected = df.groupby('to filter').groups result = df.groupby([('to filter', '')]).groups - self.assertEqual(result, expected) + assert result == expected df = DataFrame([[long(1), 'A'], [long(2), 'A']], columns=midx) @@ -3230,7 +3229,7 @@ def test_groupby_non_arithmetic_agg_intlike_precision(self): grpd = df.groupby('a') res = getattr(grpd, method)(*data['args']) - self.assertEqual(res.iloc[0].b, data['expected']) + assert res.iloc[0].b == data['expected'] def test_groupby_multiindex_missing_pair(self): # GH9049 diff --git a/pandas/tests/groupby/test_nth.py b/pandas/tests/groupby/test_nth.py index f583fa7aa7e86..0b6aeaf155f86 100644 --- a/pandas/tests/groupby/test_nth.py +++ b/pandas/tests/groupby/test_nth.py @@ -87,9 +87,9 @@ def test_first_last_nth_dtypes(self): idx = lrange(10) idx.append(9) s = Series(data=lrange(11), index=idx, name='IntCol') - self.assertEqual(s.dtype, 'int64') + assert s.dtype == 'int64' f = s.groupby(level=0).first() - self.assertEqual(f.dtype, 'int64') + assert f.dtype == 'int64' def test_nth(self): df = DataFrame([[1, np.nan], [1, 4], [5, 6]], columns=['A', 'B']) @@ -155,12 +155,12 @@ def test_nth(self): expected2 = s.groupby(g).apply(lambda x: x.iloc[0]) assert_series_equal(expected2, expected, check_names=False) assert expected.name, 0 - self.assertEqual(expected.name, 1) + assert expected.name == 1 # validate first v = s[g == 1].iloc[0] - self.assertEqual(expected.iloc[0], v) - self.assertEqual(expected2.iloc[0], v) + assert expected.iloc[0] == v + assert expected2.iloc[0] == v # this is NOT the same as .first (as sorted is default!) # as it keeps the order in the series (and not the group order) diff --git a/pandas/tests/groupby/test_timegrouper.py b/pandas/tests/groupby/test_timegrouper.py index db3fdfa605b5b..42caecbdb700e 100644 --- a/pandas/tests/groupby/test_timegrouper.py +++ b/pandas/tests/groupby/test_timegrouper.py @@ -444,7 +444,7 @@ def test_frame_datetime64_handling_groupby(self): (3, np.datetime64('2012-07-04'))], columns=['a', 'date']) result = df.groupby('a').first() - self.assertEqual(result['date'][3], Timestamp('2012-07-03')) + assert result['date'][3] == Timestamp('2012-07-03') def test_groupby_multi_timezone(self): @@ -575,10 +575,10 @@ def test_timezone_info(self): import pytz df = pd.DataFrame({'a': [1], 'b': [datetime.now(pytz.utc)]}) - self.assertEqual(df['b'][0].tzinfo, pytz.utc) + assert df['b'][0].tzinfo == pytz.utc df = pd.DataFrame({'a': [1, 2, 3]}) df['b'] = datetime.now(pytz.utc) - self.assertEqual(df['b'][0].tzinfo, pytz.utc) + assert df['b'][0].tzinfo == pytz.utc def test_datetime_count(self): df = DataFrame({'a': [1, 2, 3] * 2, diff --git a/pandas/tests/groupby/test_transform.py b/pandas/tests/groupby/test_transform.py index e0d81003e325f..0b81235ef2117 100644 --- a/pandas/tests/groupby/test_transform.py +++ b/pandas/tests/groupby/test_transform.py @@ -29,7 +29,7 @@ def test_transform(self): grouped = data.groupby(lambda x: x // 3) transformed = grouped.transform(lambda x: x * x.sum()) - self.assertEqual(transformed[7], 12) + assert transformed[7] == 12 # GH 8046 # make sure that we preserve the input order @@ -408,7 +408,7 @@ def f(group): grouped = df.groupby('c') result = grouped.apply(f) - self.assertEqual(result['d'].dtype, np.float64) + assert result['d'].dtype == np.float64 # this is by definition a mutating operation! with option_context('mode.chained_assignment', None): diff --git a/pandas/tests/indexes/common.py b/pandas/tests/indexes/common.py index d9dccc39f469f..bbde902fb87bf 100644 --- a/pandas/tests/indexes/common.py +++ b/pandas/tests/indexes/common.py @@ -139,7 +139,7 @@ def test_ndarray_compat_properties(self): values = idx.values for prop in self._compat_props: - self.assertEqual(getattr(idx, prop), getattr(values, prop)) + assert getattr(idx, prop) == getattr(values, prop) # test for validity idx.nbytes @@ -162,7 +162,7 @@ def test_dtype_str(self): for idx in self.indices.values(): dtype = idx.dtype_str assert isinstance(dtype, compat.string_types) - self.assertEqual(dtype, str(idx.dtype)) + assert dtype == str(idx.dtype) def test_repr_max_seq_item_setting(self): # GH10182 @@ -189,14 +189,14 @@ def test_set_name_methods(self): original_name = ind.name new_ind = ind.set_names([new_name]) - self.assertEqual(new_ind.name, new_name) - self.assertEqual(ind.name, original_name) + assert new_ind.name == new_name + assert ind.name == original_name res = ind.rename(new_name, inplace=True) # should return None assert res is None - self.assertEqual(ind.name, new_name) - self.assertEqual(ind.names, [new_name]) + assert ind.name == new_name + assert ind.names == [new_name] # with tm.assert_raises_regex(TypeError, "list-like"): # # should still fail even if it would be the right length # ind.set_names("a") @@ -206,8 +206,8 @@ def test_set_name_methods(self): # rename in place just leaves tuples and other containers alone name = ('A', 'B') ind.rename(name, inplace=True) - self.assertEqual(ind.name, name) - self.assertEqual(ind.names, [name]) + assert ind.name == name + assert ind.names == [name] def test_hash_error(self): for ind in self.indices.values(): @@ -310,7 +310,7 @@ def test_duplicates(self): # preserve names idx.name = 'foo' result = idx.drop_duplicates() - self.assertEqual(result.name, 'foo') + assert result.name == 'foo' tm.assert_index_equal(result, Index([ind[0]], name='foo')) def test_get_unique_index(self): @@ -351,8 +351,8 @@ def test_get_unique_index(self): idx_unique_nan = ind._shallow_copy(vals_unique) assert idx_unique_nan.is_unique - self.assertEqual(idx_nan.dtype, ind.dtype) - self.assertEqual(idx_unique_nan.dtype, ind.dtype) + assert idx_nan.dtype == ind.dtype + assert idx_unique_nan.dtype == ind.dtype for dropna, expected in zip([False, True], [idx_unique_nan, idx_unique]): @@ -373,11 +373,11 @@ def test_mutability(self): def test_view(self): for ind in self.indices.values(): i_view = ind.view() - self.assertEqual(i_view.name, ind.name) + assert i_view.name == ind.name def test_compat(self): for ind in self.indices.values(): - self.assertEqual(ind.tolist(), list(ind)) + assert ind.tolist() == list(ind) def test_memory_usage(self): for name, index in compat.iteritems(self.indices): @@ -398,7 +398,7 @@ def test_memory_usage(self): else: # we report 0 for no-length - self.assertEqual(result, 0) + assert result == 0 def test_argsort(self): for k, ind in self.indices.items(): @@ -617,7 +617,7 @@ def test_difference_base(self): elif isinstance(idx, CategoricalIndex): pass elif isinstance(idx, (DatetimeIndex, TimedeltaIndex)): - self.assertEqual(result.__class__, answer.__class__) + assert result.__class__ == answer.__class__ tm.assert_numpy_array_equal(result.asi8, answer.asi8) else: result = first.difference(case) @@ -687,12 +687,12 @@ def test_delete_base(self): expected = idx[1:] result = idx.delete(0) assert result.equals(expected) - self.assertEqual(result.name, expected.name) + assert result.name == expected.name expected = idx[:-1] result = idx.delete(-1) assert result.equals(expected) - self.assertEqual(result.name, expected.name) + assert result.name == expected.name with pytest.raises((IndexError, ValueError)): # either depending on numpy version diff --git a/pandas/tests/indexes/datetimes/test_astype.py b/pandas/tests/indexes/datetimes/test_astype.py index 35031746efebe..1c8189d0c75ac 100644 --- a/pandas/tests/indexes/datetimes/test_astype.py +++ b/pandas/tests/indexes/datetimes/test_astype.py @@ -131,8 +131,8 @@ def _check_rng(rng): assert isinstance(converted, np.ndarray) for x, stamp in zip(converted, rng): assert isinstance(x, datetime) - self.assertEqual(x, stamp.to_pydatetime()) - self.assertEqual(x.tzinfo, stamp.tzinfo) + assert x == stamp.to_pydatetime() + assert x.tzinfo == stamp.tzinfo rng = date_range('20090415', '20090519') rng_eastern = date_range('20090415', '20090519', tz='US/Eastern') @@ -151,8 +151,8 @@ def _check_rng(rng): assert isinstance(converted, np.ndarray) for x, stamp in zip(converted, rng): assert isinstance(x, datetime) - self.assertEqual(x, stamp.to_pydatetime()) - self.assertEqual(x.tzinfo, stamp.tzinfo) + assert x == stamp.to_pydatetime() + assert x.tzinfo == stamp.tzinfo rng = date_range('20090415', '20090519') rng_eastern = date_range('20090415', '20090519', @@ -172,8 +172,8 @@ def _check_rng(rng): assert isinstance(converted, np.ndarray) for x, stamp in zip(converted, rng): assert isinstance(x, datetime) - self.assertEqual(x, stamp.to_pydatetime()) - self.assertEqual(x.tzinfo, stamp.tzinfo) + assert x == stamp.to_pydatetime() + assert x.tzinfo == stamp.tzinfo rng = date_range('20090415', '20090519') rng_eastern = date_range('20090415', '20090519', @@ -196,17 +196,17 @@ def test_to_period_millisecond(self): index = self.index period = index.to_period(freq='L') - self.assertEqual(2, len(period)) - self.assertEqual(period[0], Period('2007-01-01 10:11:12.123Z', 'L')) - self.assertEqual(period[1], Period('2007-01-01 10:11:13.789Z', 'L')) + assert 2 == len(period) + assert period[0] == Period('2007-01-01 10:11:12.123Z', 'L') + assert period[1] == Period('2007-01-01 10:11:13.789Z', 'L') def test_to_period_microsecond(self): index = self.index period = index.to_period(freq='U') - self.assertEqual(2, len(period)) - self.assertEqual(period[0], Period('2007-01-01 10:11:12.123456Z', 'U')) - self.assertEqual(period[1], Period('2007-01-01 10:11:13.789123Z', 'U')) + assert 2 == len(period) + assert period[0] == Period('2007-01-01 10:11:12.123456Z', 'U') + assert period[1] == Period('2007-01-01 10:11:13.789123Z', 'U') def test_to_period_tz_pytz(self): tm._skip_if_no_pytz() @@ -220,7 +220,7 @@ def test_to_period_tz_pytz(self): result = ts.to_period()[0] expected = ts[0].to_period() - self.assertEqual(result, expected) + assert result == expected tm.assert_index_equal(ts.to_period(), xp) ts = date_range('1/1/2000', '4/1/2000', tz=UTC) @@ -228,7 +228,7 @@ def test_to_period_tz_pytz(self): result = ts.to_period()[0] expected = ts[0].to_period() - self.assertEqual(result, expected) + assert result == expected tm.assert_index_equal(ts.to_period(), xp) ts = date_range('1/1/2000', '4/1/2000', tz=tzlocal()) @@ -236,7 +236,7 @@ def test_to_period_tz_pytz(self): result = ts.to_period()[0] expected = ts[0].to_period() - self.assertEqual(result, expected) + assert result == expected tm.assert_index_equal(ts.to_period(), xp) def test_to_period_tz_explicit_pytz(self): @@ -309,4 +309,4 @@ def test_astype_object(self): exp_values = list(rng) tm.assert_index_equal(casted, Index(exp_values, dtype=np.object_)) - self.assertEqual(casted.tolist(), exp_values) + assert casted.tolist() == exp_values diff --git a/pandas/tests/indexes/datetimes/test_construction.py b/pandas/tests/indexes/datetimes/test_construction.py index 098d4755b385c..9af4136afd025 100644 --- a/pandas/tests/indexes/datetimes/test_construction.py +++ b/pandas/tests/indexes/datetimes/test_construction.py @@ -436,14 +436,14 @@ def test_constructor_dtype(self): def test_constructor_name(self): idx = DatetimeIndex(start='2000-01-01', periods=1, freq='A', name='TEST') - self.assertEqual(idx.name, 'TEST') + assert idx.name == 'TEST' def test_000constructor_resolution(self): # 2252 t1 = Timestamp((1352934390 * 1000000000) + 1000000 + 1000 + 1) idx = DatetimeIndex([t1]) - self.assertEqual(idx.nanosecond[0], t1.nanosecond) + assert idx.nanosecond[0] == t1.nanosecond class TestTimeSeries(tm.TestCase): @@ -452,7 +452,7 @@ def test_dti_constructor_preserve_dti_freq(self): rng = date_range('1/1/2000', '1/2/2000', freq='5min') rng2 = DatetimeIndex(rng) - self.assertEqual(rng.freq, rng2.freq) + assert rng.freq == rng2.freq def test_dti_constructor_years_only(self): # GH 6961 @@ -487,7 +487,7 @@ def test_dti_constructor_small_int(self): def test_ctor_str_intraday(self): rng = DatetimeIndex(['1-1-2000 00:00:01']) - self.assertEqual(rng[0].second, 1) + assert rng[0].second == 1 def test_is_(self): dti = DatetimeIndex(start='1/1/2005', end='12/1/2005', freq='M') @@ -565,29 +565,29 @@ def test_datetimeindex_constructor_misc(self): sdate = datetime(1999, 12, 25) edate = datetime(2000, 1, 1) idx = DatetimeIndex(start=sdate, freq='1B', periods=20) - self.assertEqual(len(idx), 20) - self.assertEqual(idx[0], sdate + 0 * offsets.BDay()) - self.assertEqual(idx.freq, 'B') + assert len(idx) == 20 + assert idx[0] == sdate + 0 * offsets.BDay() + assert idx.freq == 'B' idx = DatetimeIndex(end=edate, freq=('D', 5), periods=20) - self.assertEqual(len(idx), 20) - self.assertEqual(idx[-1], edate) - self.assertEqual(idx.freq, '5D') + assert len(idx) == 20 + assert idx[-1] == edate + assert idx.freq == '5D' idx1 = DatetimeIndex(start=sdate, end=edate, freq='W-SUN') idx2 = DatetimeIndex(start=sdate, end=edate, freq=offsets.Week(weekday=6)) - self.assertEqual(len(idx1), len(idx2)) - self.assertEqual(idx1.offset, idx2.offset) + assert len(idx1) == len(idx2) + assert idx1.offset == idx2.offset idx1 = DatetimeIndex(start=sdate, end=edate, freq='QS') idx2 = DatetimeIndex(start=sdate, end=edate, freq=offsets.QuarterBegin(startingMonth=1)) - self.assertEqual(len(idx1), len(idx2)) - self.assertEqual(idx1.offset, idx2.offset) + assert len(idx1) == len(idx2) + assert idx1.offset == idx2.offset idx1 = DatetimeIndex(start=sdate, end=edate, freq='BQ') idx2 = DatetimeIndex(start=sdate, end=edate, freq=offsets.BQuarterEnd(startingMonth=12)) - self.assertEqual(len(idx1), len(idx2)) - self.assertEqual(idx1.offset, idx2.offset) + assert len(idx1) == len(idx2) + assert idx1.offset == idx2.offset diff --git a/pandas/tests/indexes/datetimes/test_date_range.py b/pandas/tests/indexes/datetimes/test_date_range.py index 6b011ad6db98e..a9fdd40406770 100644 --- a/pandas/tests/indexes/datetimes/test_date_range.py +++ b/pandas/tests/indexes/datetimes/test_date_range.py @@ -30,7 +30,7 @@ class TestDateRanges(TestData, tm.TestCase): def test_date_range_gen_error(self): rng = date_range('1/1/2000 00:00', '1/1/2000 00:18', freq='5min') - self.assertEqual(len(rng), 4) + assert len(rng) == 4 def test_date_range_negative_freq(self): # GH 11018 @@ -38,20 +38,20 @@ def test_date_range_negative_freq(self): exp = pd.DatetimeIndex(['2011-12-31', '2009-12-31', '2007-12-31'], freq='-2A') tm.assert_index_equal(rng, exp) - self.assertEqual(rng.freq, '-2A') + assert rng.freq == '-2A' rng = date_range('2011-01-31', freq='-2M', periods=3) exp = pd.DatetimeIndex(['2011-01-31', '2010-11-30', '2010-09-30'], freq='-2M') tm.assert_index_equal(rng, exp) - self.assertEqual(rng.freq, '-2M') + assert rng.freq == '-2M' def test_date_range_bms_bug(self): # #1645 rng = date_range('1/1/2000', periods=10, freq='BMS') ex_first = Timestamp('2000-01-03') - self.assertEqual(rng[0], ex_first) + assert rng[0] == ex_first def test_date_range_normalize(self): snap = datetime.today() @@ -68,13 +68,13 @@ def test_date_range_normalize(self): freq='B') the_time = time(8, 15) for val in rng: - self.assertEqual(val.time(), the_time) + assert val.time() == the_time def test_date_range_fy5252(self): dr = date_range(start="2013-01-01", periods=2, freq=offsets.FY5253( startingMonth=1, weekday=3, variation="nearest")) - self.assertEqual(dr[0], Timestamp('2013-01-31')) - self.assertEqual(dr[1], Timestamp('2014-01-30')) + assert dr[0] == Timestamp('2013-01-31') + assert dr[1] == Timestamp('2014-01-30') def test_date_range_ambiguous_arguments(self): # #2538 @@ -138,7 +138,7 @@ def test_compat_replace(self): freq='QS-JAN'), periods=f(76), freq='QS-JAN') - self.assertEqual(len(result), 76) + assert len(result) == 76 def test_catch_infinite_loop(self): offset = offsets.DateOffset(minute=5) @@ -152,12 +152,12 @@ class TestGenRangeGeneration(tm.TestCase): def test_generate(self): rng1 = list(generate_range(START, END, offset=BDay())) rng2 = list(generate_range(START, END, time_rule='B')) - self.assertEqual(rng1, rng2) + assert rng1 == rng2 def test_generate_cday(self): rng1 = list(generate_range(START, END, offset=CDay())) rng2 = list(generate_range(START, END, time_rule='C')) - self.assertEqual(rng1, rng2) + assert rng1 == rng2 def test_1(self): eq_gen_range(dict(start=datetime(2009, 3, 25), periods=2), @@ -241,14 +241,14 @@ def test_cached_range(self): def test_cached_range_bug(self): rng = date_range('2010-09-01 05:00:00', periods=50, freq=DateOffset(hours=6)) - self.assertEqual(len(rng), 50) - self.assertEqual(rng[0], datetime(2010, 9, 1, 5)) + assert len(rng) == 50 + assert rng[0] == datetime(2010, 9, 1, 5) def test_timezone_comparaison_bug(self): # smoke test start = Timestamp('20130220 10:00', tz='US/Eastern') result = date_range(start, periods=2, tz='US/Eastern') - self.assertEqual(len(result), 2) + assert len(result) == 2 def test_timezone_comparaison_assert(self): start = Timestamp('20130220 10:00', tz='US/Eastern') @@ -308,19 +308,19 @@ def test_range_tz_pytz(self): end = tz.localize(datetime(2011, 1, 3)) dr = date_range(start=start, periods=3) - self.assertEqual(dr.tz.zone, tz.zone) - self.assertEqual(dr[0], start) - self.assertEqual(dr[2], end) + assert dr.tz.zone == tz.zone + assert dr[0] == start + assert dr[2] == end dr = date_range(end=end, periods=3) - self.assertEqual(dr.tz.zone, tz.zone) - self.assertEqual(dr[0], start) - self.assertEqual(dr[2], end) + assert dr.tz.zone == tz.zone + assert dr[0] == start + assert dr[2] == end dr = date_range(start=start, end=end) - self.assertEqual(dr.tz.zone, tz.zone) - self.assertEqual(dr[0], start) - self.assertEqual(dr[2], end) + assert dr.tz.zone == tz.zone + assert dr[0] == start + assert dr[2] == end def test_range_tz_dst_straddle_pytz(self): @@ -333,20 +333,20 @@ def test_range_tz_dst_straddle_pytz(self): tz.localize(datetime(2013, 11, 6)))] for (start, end) in dates: dr = date_range(start, end, freq='D') - self.assertEqual(dr[0], start) - self.assertEqual(dr[-1], end) - self.assertEqual(np.all(dr.hour == 0), True) + assert dr[0] == start + assert dr[-1] == end + assert np.all(dr.hour == 0) dr = date_range(start, end, freq='D', tz='US/Eastern') - self.assertEqual(dr[0], start) - self.assertEqual(dr[-1], end) - self.assertEqual(np.all(dr.hour == 0), True) + assert dr[0] == start + assert dr[-1] == end + assert np.all(dr.hour == 0) dr = date_range(start.replace(tzinfo=None), end.replace( tzinfo=None), freq='D', tz='US/Eastern') - self.assertEqual(dr[0], start) - self.assertEqual(dr[-1], end) - self.assertEqual(np.all(dr.hour == 0), True) + assert dr[0] == start + assert dr[-1] == end + assert np.all(dr.hour == 0) def test_range_tz_dateutil(self): # GH 2906 @@ -461,8 +461,8 @@ def test_range_closed_boundary(self): def test_years_only(self): # GH 6961 dr = date_range('2014', '2015', freq='M') - self.assertEqual(dr[0], datetime(2014, 1, 31)) - self.assertEqual(dr[-1], datetime(2014, 12, 31)) + assert dr[0] == datetime(2014, 1, 31) + assert dr[-1] == datetime(2014, 12, 31) def test_freq_divides_end_in_nanos(self): # GH 10885 diff --git a/pandas/tests/indexes/datetimes/test_datetime.py b/pandas/tests/indexes/datetimes/test_datetime.py index 83f9119377b19..7b22d1615fbeb 100644 --- a/pandas/tests/indexes/datetimes/test_datetime.py +++ b/pandas/tests/indexes/datetimes/test_datetime.py @@ -21,35 +21,35 @@ def test_get_loc(self): idx = pd.date_range('2000-01-01', periods=3) for method in [None, 'pad', 'backfill', 'nearest']: - self.assertEqual(idx.get_loc(idx[1], method), 1) - self.assertEqual(idx.get_loc(idx[1].to_pydatetime(), method), 1) - self.assertEqual(idx.get_loc(str(idx[1]), method), 1) + assert idx.get_loc(idx[1], method) == 1 + assert idx.get_loc(idx[1].to_pydatetime(), method) == 1 + assert idx.get_loc(str(idx[1]), method) == 1 + if method is not None: - self.assertEqual(idx.get_loc(idx[1], method, - tolerance=pd.Timedelta('0 days')), - 1) - - self.assertEqual(idx.get_loc('2000-01-01', method='nearest'), 0) - self.assertEqual(idx.get_loc('2000-01-01T12', method='nearest'), 1) - - self.assertEqual(idx.get_loc('2000-01-01T12', method='nearest', - tolerance='1 day'), 1) - self.assertEqual(idx.get_loc('2000-01-01T12', method='nearest', - tolerance=pd.Timedelta('1D')), 1) - self.assertEqual(idx.get_loc('2000-01-01T12', method='nearest', - tolerance=np.timedelta64(1, 'D')), 1) - self.assertEqual(idx.get_loc('2000-01-01T12', method='nearest', - tolerance=timedelta(1)), 1) + assert idx.get_loc(idx[1], method, + tolerance=pd.Timedelta('0 days')) == 1 + + assert idx.get_loc('2000-01-01', method='nearest') == 0 + assert idx.get_loc('2000-01-01T12', method='nearest') == 1 + + assert idx.get_loc('2000-01-01T12', method='nearest', + tolerance='1 day') == 1 + assert idx.get_loc('2000-01-01T12', method='nearest', + tolerance=pd.Timedelta('1D')) == 1 + assert idx.get_loc('2000-01-01T12', method='nearest', + tolerance=np.timedelta64(1, 'D')) == 1 + assert idx.get_loc('2000-01-01T12', method='nearest', + tolerance=timedelta(1)) == 1 with tm.assert_raises_regex(ValueError, 'must be convertible'): idx.get_loc('2000-01-01T12', method='nearest', tolerance='foo') with pytest.raises(KeyError): idx.get_loc('2000-01-01T03', method='nearest', tolerance='2 hours') - self.assertEqual(idx.get_loc('2000', method='nearest'), slice(0, 3)) - self.assertEqual(idx.get_loc('2000-01', method='nearest'), slice(0, 3)) + assert idx.get_loc('2000', method='nearest') == slice(0, 3) + assert idx.get_loc('2000-01', method='nearest') == slice(0, 3) - self.assertEqual(idx.get_loc('1999', method='nearest'), 0) - self.assertEqual(idx.get_loc('2001', method='nearest'), 2) + assert idx.get_loc('1999', method='nearest') == 0 + assert idx.get_loc('2001', method='nearest') == 2 with pytest.raises(KeyError): idx.get_loc('1999', method='pad') @@ -62,9 +62,9 @@ def test_get_loc(self): idx.get_loc(slice(2)) idx = pd.to_datetime(['2000-01-01', '2000-01-04']) - self.assertEqual(idx.get_loc('2000-01-02', method='nearest'), 0) - self.assertEqual(idx.get_loc('2000-01-03', method='nearest'), 1) - self.assertEqual(idx.get_loc('2000-01', method='nearest'), slice(0, 2)) + assert idx.get_loc('2000-01-02', method='nearest') == 0 + assert idx.get_loc('2000-01-03', method='nearest') == 1 + assert idx.get_loc('2000-01', method='nearest') == slice(0, 2) # time indexing idx = pd.date_range('2000-01-01', periods=24, freq='H') @@ -114,8 +114,8 @@ def test_roundtrip_pickle_with_tz(self): def test_reindex_preserves_tz_if_target_is_empty_list_or_array(self): # GH7774 index = date_range('20130101', periods=3, tz='US/Eastern') - self.assertEqual(str(index.reindex([])[0].tz), 'US/Eastern') - self.assertEqual(str(index.reindex(np.array([]))[0].tz), 'US/Eastern') + assert str(index.reindex([])[0].tz) == 'US/Eastern' + assert str(index.reindex(np.array([]))[0].tz) == 'US/Eastern' def test_time_loc(self): # GH8667 from datetime import time @@ -150,10 +150,10 @@ def test_time_overflow_for_32bit_machines(self): periods = np.int_(1000) idx1 = pd.date_range(start='2000', periods=periods, freq='S') - self.assertEqual(len(idx1), periods) + assert len(idx1) == periods idx2 = pd.date_range(end='2000', periods=periods, freq='S') - self.assertEqual(len(idx2), periods) + assert len(idx2) == periods def test_nat(self): assert DatetimeIndex([np.nan])[0] is pd.NaT @@ -166,13 +166,13 @@ def test_ufunc_coercions(self): assert isinstance(result, DatetimeIndex) exp = date_range('2011-01-02', periods=3, freq='2D', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, '2D') + assert result.freq == '2D' for result in [idx - delta, np.subtract(idx, delta)]: assert isinstance(result, DatetimeIndex) exp = date_range('2010-12-31', periods=3, freq='2D', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, '2D') + assert result.freq == '2D' delta = np.array([np.timedelta64(1, 'D'), np.timedelta64(2, 'D'), np.timedelta64(3, 'D')]) @@ -181,14 +181,14 @@ def test_ufunc_coercions(self): exp = DatetimeIndex(['2011-01-02', '2011-01-05', '2011-01-08'], freq='3D', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, '3D') + assert result.freq == '3D' for result in [idx - delta, np.subtract(idx, delta)]: assert isinstance(result, DatetimeIndex) exp = DatetimeIndex(['2010-12-31', '2011-01-01', '2011-01-02'], freq='D', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, 'D') + assert result.freq == 'D' def test_week_of_month_frequency(self): # GH 5348: "ValueError: Could not evaluate WOM-1SUN" shouldn't raise @@ -240,14 +240,14 @@ def test_to_period_nofreq(self): idx = DatetimeIndex(['2000-01-01', '2000-01-02', '2000-01-03'], freq='infer') - self.assertEqual(idx.freqstr, 'D') + assert idx.freqstr == 'D' expected = pd.PeriodIndex(['2000-01-01', '2000-01-02', '2000-01-03'], freq='D') tm.assert_index_equal(idx.to_period(), expected) # GH 7606 idx = DatetimeIndex(['2000-01-01', '2000-01-02', '2000-01-03']) - self.assertEqual(idx.freqstr, None) + assert idx.freqstr is None tm.assert_index_equal(idx.to_period(), expected) def test_comparisons_coverage(self): @@ -373,7 +373,7 @@ def test_iteration_preserves_tz(self): for i, ts in enumerate(index): result = ts expected = index[i] - self.assertEqual(result, expected) + assert result == expected index = date_range("2012-01-01", periods=3, freq='H', tz=dateutil.tz.tzoffset(None, -28800)) @@ -381,8 +381,8 @@ def test_iteration_preserves_tz(self): for i, ts in enumerate(index): result = ts expected = index[i] - self.assertEqual(result._repr_base, expected._repr_base) - self.assertEqual(result, expected) + assert result._repr_base == expected._repr_base + assert result == expected # 9100 index = pd.DatetimeIndex(['2014-12-01 03:32:39.987000-08:00', @@ -390,8 +390,8 @@ def test_iteration_preserves_tz(self): for i, ts in enumerate(index): result = ts expected = index[i] - self.assertEqual(result._repr_base, expected._repr_base) - self.assertEqual(result, expected) + assert result._repr_base == expected._repr_base + assert result == expected def test_misc_coverage(self): rng = date_range('1/1/2000', periods=5) @@ -410,10 +410,10 @@ def test_string_index_series_name_converted(self): index=date_range('1/1/2000', periods=10)) result = df.loc['1/3/2000'] - self.assertEqual(result.name, df.index[2]) + assert result.name == df.index[2] result = df.T['1/3/2000'] - self.assertEqual(result.name, df.index[2]) + assert result.name == df.index[2] def test_overflow_offset(self): # xref https://github.com/statsmodels/statsmodels/issues/3374 @@ -444,8 +444,8 @@ def test_get_duplicates(self): def test_argmin_argmax(self): idx = DatetimeIndex(['2000-01-04', '2000-01-01', '2000-01-02']) - self.assertEqual(idx.argmin(), 1) - self.assertEqual(idx.argmax(), 0) + assert idx.argmin() == 1 + assert idx.argmax() == 0 def test_sort_values(self): idx = DatetimeIndex(['2000-01-04', '2000-01-01', '2000-01-02']) @@ -481,8 +481,8 @@ def test_take(self): tm.assert_index_equal(taken, expected) assert isinstance(taken, DatetimeIndex) assert taken.freq is None - self.assertEqual(taken.tz, expected.tz) - self.assertEqual(taken.name, expected.name) + assert taken.tz == expected.tz + assert taken.name == expected.name def test_take_fill_value(self): # GH 12631 @@ -601,8 +601,8 @@ def test_does_not_convert_mixed_integer(self): r_idx_type='i', c_idx_type='dt') cols = df.columns.join(df.index, how='outer') joined = cols.join(df.columns) - self.assertEqual(cols.dtype, np.dtype('O')) - self.assertEqual(cols.dtype, joined.dtype) + assert cols.dtype == np.dtype('O') + assert cols.dtype == joined.dtype tm.assert_numpy_array_equal(cols.values, joined.values) def test_slice_keeps_name(self): @@ -610,7 +610,7 @@ def test_slice_keeps_name(self): st = pd.Timestamp('2013-07-01 00:00:00', tz='America/Los_Angeles') et = pd.Timestamp('2013-07-02 00:00:00', tz='America/Los_Angeles') dr = pd.date_range(st, et, freq='H', name='timebucket') - self.assertEqual(dr[1:].name, dr.name) + assert dr[1:].name == dr.name def test_join_self(self): index = date_range('1/1/2000', periods=10) @@ -769,8 +769,8 @@ def test_slice_bounds_empty(self): right = empty_idx._maybe_cast_slice_bound('2015-01-02', 'right', 'loc') exp = Timestamp('2015-01-02 23:59:59.999999999') - self.assertEqual(right, exp) + assert right == exp left = empty_idx._maybe_cast_slice_bound('2015-01-02', 'left', 'loc') exp = Timestamp('2015-01-02 00:00:00') - self.assertEqual(left, exp) + assert left == exp diff --git a/pandas/tests/indexes/datetimes/test_indexing.py b/pandas/tests/indexes/datetimes/test_indexing.py index 568e045d9f5e7..92134a296b08f 100644 --- a/pandas/tests/indexes/datetimes/test_indexing.py +++ b/pandas/tests/indexes/datetimes/test_indexing.py @@ -164,8 +164,8 @@ def test_delete(self): for n, expected in compat.iteritems(cases): result = idx.delete(n) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) + assert result.name == expected.name + assert result.freq == expected.freq with pytest.raises((IndexError, ValueError)): # either depeidnig on numpy version @@ -179,17 +179,17 @@ def test_delete(self): freq='H', name='idx', tz=tz) result = idx.delete(0) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freqstr, 'H') - self.assertEqual(result.tz, expected.tz) + assert result.name == expected.name + assert result.freqstr == 'H' + assert result.tz == expected.tz expected = date_range(start='2000-01-01 09:00', periods=9, freq='H', name='idx', tz=tz) result = idx.delete(-1) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freqstr, 'H') - self.assertEqual(result.tz, expected.tz) + assert result.name == expected.name + assert result.freqstr == 'H' + assert result.tz == expected.tz def test_delete_slice(self): idx = date_range(start='2000-01-01', periods=10, freq='D', name='idx') @@ -211,13 +211,13 @@ def test_delete_slice(self): for n, expected in compat.iteritems(cases): result = idx.delete(n) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) + assert result.name == expected.name + assert result.freq == expected.freq result = idx.delete(slice(n[0], n[-1] + 1)) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) + assert result.name == expected.name + assert result.freq == expected.freq for tz in [None, 'Asia/Tokyo', 'US/Pacific']: ts = pd.Series(1, index=pd.date_range( @@ -227,9 +227,9 @@ def test_delete_slice(self): expected = pd.date_range('2000-01-01 14:00', periods=5, freq='H', name='idx', tz=tz) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.tz, expected.tz) + assert result.name == expected.name + assert result.freq == expected.freq + assert result.tz == expected.tz # reset freq to None result = ts.drop(ts.index[[1, 3, 5, 7, 9]]).index @@ -238,6 +238,6 @@ def test_delete_slice(self): '2000-01-01 15:00', '2000-01-01 17:00'], freq=None, name='idx', tz=tz) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.tz, expected.tz) + assert result.name == expected.name + assert result.freq == expected.freq + assert result.tz == expected.tz diff --git a/pandas/tests/indexes/datetimes/test_misc.py b/pandas/tests/indexes/datetimes/test_misc.py index 55165aa39a1a4..ae5d29ca426b4 100644 --- a/pandas/tests/indexes/datetimes/test_misc.py +++ b/pandas/tests/indexes/datetimes/test_misc.py @@ -181,81 +181,80 @@ def test_datetimeindex_accessors(self): periods=365, tz='US/Eastern') for dti in [dti_naive, dti_tz]: - self.assertEqual(dti.year[0], 1998) - self.assertEqual(dti.month[0], 1) - self.assertEqual(dti.day[0], 1) - self.assertEqual(dti.hour[0], 0) - self.assertEqual(dti.minute[0], 0) - self.assertEqual(dti.second[0], 0) - self.assertEqual(dti.microsecond[0], 0) - self.assertEqual(dti.dayofweek[0], 3) - - self.assertEqual(dti.dayofyear[0], 1) - self.assertEqual(dti.dayofyear[120], 121) - - self.assertEqual(dti.weekofyear[0], 1) - self.assertEqual(dti.weekofyear[120], 18) - - self.assertEqual(dti.quarter[0], 1) - self.assertEqual(dti.quarter[120], 2) - - self.assertEqual(dti.days_in_month[0], 31) - self.assertEqual(dti.days_in_month[90], 30) - - self.assertEqual(dti.is_month_start[0], True) - self.assertEqual(dti.is_month_start[1], False) - self.assertEqual(dti.is_month_start[31], True) - self.assertEqual(dti.is_quarter_start[0], True) - self.assertEqual(dti.is_quarter_start[90], True) - self.assertEqual(dti.is_year_start[0], True) - self.assertEqual(dti.is_year_start[364], False) - self.assertEqual(dti.is_month_end[0], False) - self.assertEqual(dti.is_month_end[30], True) - self.assertEqual(dti.is_month_end[31], False) - self.assertEqual(dti.is_month_end[364], True) - self.assertEqual(dti.is_quarter_end[0], False) - self.assertEqual(dti.is_quarter_end[30], False) - self.assertEqual(dti.is_quarter_end[89], True) - self.assertEqual(dti.is_quarter_end[364], True) - self.assertEqual(dti.is_year_end[0], False) - self.assertEqual(dti.is_year_end[364], True) + assert dti.year[0] == 1998 + assert dti.month[0] == 1 + assert dti.day[0] == 1 + assert dti.hour[0] == 0 + assert dti.minute[0] == 0 + assert dti.second[0] == 0 + assert dti.microsecond[0] == 0 + assert dti.dayofweek[0] == 3 + + assert dti.dayofyear[0] == 1 + assert dti.dayofyear[120] == 121 + + assert dti.weekofyear[0] == 1 + assert dti.weekofyear[120] == 18 + + assert dti.quarter[0] == 1 + assert dti.quarter[120] == 2 + + assert dti.days_in_month[0] == 31 + assert dti.days_in_month[90] == 30 + + assert dti.is_month_start[0] + assert not dti.is_month_start[1] + assert dti.is_month_start[31] + assert dti.is_quarter_start[0] + assert dti.is_quarter_start[90] + assert dti.is_year_start[0] + assert not dti.is_year_start[364] + assert not dti.is_month_end[0] + assert dti.is_month_end[30] + assert not dti.is_month_end[31] + assert dti.is_month_end[364] + assert not dti.is_quarter_end[0] + assert not dti.is_quarter_end[30] + assert dti.is_quarter_end[89] + assert dti.is_quarter_end[364] + assert not dti.is_year_end[0] + assert dti.is_year_end[364] # GH 11128 - self.assertEqual(dti.weekday_name[4], u'Monday') - self.assertEqual(dti.weekday_name[5], u'Tuesday') - self.assertEqual(dti.weekday_name[6], u'Wednesday') - self.assertEqual(dti.weekday_name[7], u'Thursday') - self.assertEqual(dti.weekday_name[8], u'Friday') - self.assertEqual(dti.weekday_name[9], u'Saturday') - self.assertEqual(dti.weekday_name[10], u'Sunday') - - self.assertEqual(Timestamp('2016-04-04').weekday_name, u'Monday') - self.assertEqual(Timestamp('2016-04-05').weekday_name, u'Tuesday') - self.assertEqual(Timestamp('2016-04-06').weekday_name, - u'Wednesday') - self.assertEqual(Timestamp('2016-04-07').weekday_name, u'Thursday') - self.assertEqual(Timestamp('2016-04-08').weekday_name, u'Friday') - self.assertEqual(Timestamp('2016-04-09').weekday_name, u'Saturday') - self.assertEqual(Timestamp('2016-04-10').weekday_name, u'Sunday') - - self.assertEqual(len(dti.year), 365) - self.assertEqual(len(dti.month), 365) - self.assertEqual(len(dti.day), 365) - self.assertEqual(len(dti.hour), 365) - self.assertEqual(len(dti.minute), 365) - self.assertEqual(len(dti.second), 365) - self.assertEqual(len(dti.microsecond), 365) - self.assertEqual(len(dti.dayofweek), 365) - self.assertEqual(len(dti.dayofyear), 365) - self.assertEqual(len(dti.weekofyear), 365) - self.assertEqual(len(dti.quarter), 365) - self.assertEqual(len(dti.is_month_start), 365) - self.assertEqual(len(dti.is_month_end), 365) - self.assertEqual(len(dti.is_quarter_start), 365) - self.assertEqual(len(dti.is_quarter_end), 365) - self.assertEqual(len(dti.is_year_start), 365) - self.assertEqual(len(dti.is_year_end), 365) - self.assertEqual(len(dti.weekday_name), 365) + assert dti.weekday_name[4] == u'Monday' + assert dti.weekday_name[5] == u'Tuesday' + assert dti.weekday_name[6] == u'Wednesday' + assert dti.weekday_name[7] == u'Thursday' + assert dti.weekday_name[8] == u'Friday' + assert dti.weekday_name[9] == u'Saturday' + assert dti.weekday_name[10] == u'Sunday' + + assert Timestamp('2016-04-04').weekday_name == u'Monday' + assert Timestamp('2016-04-05').weekday_name == u'Tuesday' + assert Timestamp('2016-04-06').weekday_name == u'Wednesday' + assert Timestamp('2016-04-07').weekday_name == u'Thursday' + assert Timestamp('2016-04-08').weekday_name == u'Friday' + assert Timestamp('2016-04-09').weekday_name == u'Saturday' + assert Timestamp('2016-04-10').weekday_name == u'Sunday' + + assert len(dti.year) == 365 + assert len(dti.month) == 365 + assert len(dti.day) == 365 + assert len(dti.hour) == 365 + assert len(dti.minute) == 365 + assert len(dti.second) == 365 + assert len(dti.microsecond) == 365 + assert len(dti.dayofweek) == 365 + assert len(dti.dayofyear) == 365 + assert len(dti.weekofyear) == 365 + assert len(dti.quarter) == 365 + assert len(dti.is_month_start) == 365 + assert len(dti.is_month_end) == 365 + assert len(dti.is_quarter_start) == 365 + assert len(dti.is_quarter_end) == 365 + assert len(dti.is_year_start) == 365 + assert len(dti.is_year_end) == 365 + assert len(dti.weekday_name) == 365 dti.name = 'name' @@ -283,10 +282,10 @@ def test_datetimeindex_accessors(self): dti = DatetimeIndex(freq='BQ-FEB', start=datetime(1998, 1, 1), periods=4) - self.assertEqual(sum(dti.is_quarter_start), 0) - self.assertEqual(sum(dti.is_quarter_end), 4) - self.assertEqual(sum(dti.is_year_start), 0) - self.assertEqual(sum(dti.is_year_end), 1) + assert sum(dti.is_quarter_start) == 0 + assert sum(dti.is_quarter_end) == 4 + assert sum(dti.is_year_start) == 0 + assert sum(dti.is_year_end) == 1 # Ensure is_start/end accessors throw ValueError for CustomBusinessDay, # CBD requires np >= 1.7 @@ -296,7 +295,7 @@ def test_datetimeindex_accessors(self): dti = DatetimeIndex(['2000-01-01', '2000-01-02', '2000-01-03']) - self.assertEqual(dti.is_month_start[0], 1) + assert dti.is_month_start[0] == 1 tests = [ (Timestamp('2013-06-01', freq='M').is_month_start, 1), @@ -333,7 +332,7 @@ def test_datetimeindex_accessors(self): (Timestamp('2013-02-01').days_in_month, 28)] for ts, value in tests: - self.assertEqual(ts, value) + assert ts == value def test_nanosecond_field(self): dti = DatetimeIndex(np.arange(10)) diff --git a/pandas/tests/indexes/datetimes/test_ops.py b/pandas/tests/indexes/datetimes/test_ops.py index fa1b2c0d7c68d..e25e3d448190e 100644 --- a/pandas/tests/indexes/datetimes/test_ops.py +++ b/pandas/tests/indexes/datetimes/test_ops.py @@ -45,9 +45,9 @@ def test_ops_properties_basic(self): # attribute access should still work! s = Series(dict(year=2000, month=1, day=10)) - self.assertEqual(s.year, 2000) - self.assertEqual(s.month, 1) - self.assertEqual(s.day, 10) + assert s.year == 2000 + assert s.month == 1 + assert s.day == 10 pytest.raises(AttributeError, lambda: s.weekday) def test_asobject_tolist(self): @@ -61,10 +61,10 @@ def test_asobject_tolist(self): result = idx.asobject assert isinstance(result, Index) - self.assertEqual(result.dtype, object) + assert result.dtype == object tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(idx.tolist(), expected_list) + assert result.name == expected.name + assert idx.tolist() == expected_list idx = pd.date_range(start='2013-01-01', periods=4, freq='M', name='idx', tz='Asia/Tokyo') @@ -75,10 +75,10 @@ def test_asobject_tolist(self): expected = pd.Index(expected_list, dtype=object, name='idx') result = idx.asobject assert isinstance(result, Index) - self.assertEqual(result.dtype, object) + assert result.dtype == object tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(idx.tolist(), expected_list) + assert result.name == expected.name + assert idx.tolist() == expected_list idx = DatetimeIndex([datetime(2013, 1, 1), datetime(2013, 1, 2), pd.NaT, datetime(2013, 1, 4)], name='idx') @@ -88,10 +88,10 @@ def test_asobject_tolist(self): expected = pd.Index(expected_list, dtype=object, name='idx') result = idx.asobject assert isinstance(result, Index) - self.assertEqual(result.dtype, object) + assert result.dtype == object tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(idx.tolist(), expected_list) + assert result.name == expected.name + assert idx.tolist() == expected_list def test_minmax(self): for tz in self.tz: @@ -106,10 +106,10 @@ def test_minmax(self): assert not idx2.is_monotonic for idx in [idx1, idx2]: - self.assertEqual(idx.min(), Timestamp('2011-01-01', tz=tz)) - self.assertEqual(idx.max(), Timestamp('2011-01-03', tz=tz)) - self.assertEqual(idx.argmin(), 0) - self.assertEqual(idx.argmax(), 2) + assert idx.min() == Timestamp('2011-01-01', tz=tz) + assert idx.max() == Timestamp('2011-01-03', tz=tz) + assert idx.argmin() == 0 + assert idx.argmax() == 2 for op in ['min', 'max']: # Return NaT @@ -125,17 +125,15 @@ def test_minmax(self): def test_numpy_minmax(self): dr = pd.date_range(start='2016-01-15', end='2016-01-20') - self.assertEqual(np.min(dr), - Timestamp('2016-01-15 00:00:00', freq='D')) - self.assertEqual(np.max(dr), - Timestamp('2016-01-20 00:00:00', freq='D')) + assert np.min(dr) == Timestamp('2016-01-15 00:00:00', freq='D') + assert np.max(dr) == Timestamp('2016-01-20 00:00:00', freq='D') errmsg = "the 'out' parameter is not supported" tm.assert_raises_regex(ValueError, errmsg, np.min, dr, out=0) tm.assert_raises_regex(ValueError, errmsg, np.max, dr, out=0) - self.assertEqual(np.argmin(dr), 0) - self.assertEqual(np.argmax(dr), 5) + assert np.argmin(dr) == 0 + assert np.argmax(dr) == 5 if not _np_version_under1p10: errmsg = "the 'out' parameter is not supported" @@ -160,7 +158,7 @@ def test_round(self): expected_elt = expected_rng[1] tm.assert_index_equal(rng.round(freq='H'), expected_rng) - self.assertEqual(elt.round(freq='H'), expected_elt) + assert elt.round(freq='H') == expected_elt msg = pd.tseries.frequencies._INVALID_FREQ_ERROR with tm.assert_raises_regex(ValueError, msg): @@ -200,7 +198,7 @@ def test_repeat_range(self): result = rng.repeat(5) assert result.freq is None - self.assertEqual(len(result), 5 * len(rng)) + assert len(result) == 5 * len(rng) for tz in self.tz: index = pd.date_range('2001-01-01', periods=2, freq='D', tz=tz) @@ -288,7 +286,7 @@ def test_representation(self): for indx, expected in zip(idx, exp): for func in ['__repr__', '__unicode__', '__str__']: result = getattr(indx, func)() - self.assertEqual(result, expected) + assert result == expected def test_representation_to_series(self): idx1 = DatetimeIndex([], freq='D') @@ -336,7 +334,7 @@ def test_representation_to_series(self): [exp1, exp2, exp3, exp4, exp5, exp6, exp7]): result = repr(Series(idx)) - self.assertEqual(result, expected) + assert result == expected def test_summary(self): # GH9116 @@ -372,7 +370,7 @@ def test_summary(self): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5, idx6], [exp1, exp2, exp3, exp4, exp5, exp6]): result = idx.summary() - self.assertEqual(result, expected) + assert result == expected def test_resolution(self): for freq, expected in zip(['A', 'Q', 'M', 'D', 'H', 'T', @@ -383,7 +381,7 @@ def test_resolution(self): for tz in self.tz: idx = pd.date_range(start='2013-04-01', periods=30, freq=freq, tz=tz) - self.assertEqual(idx.resolution, expected) + assert idx.resolution == expected def test_union(self): for tz in self.tz: @@ -724,39 +722,39 @@ def test_getitem(self): for idx in [idx1, idx2]: result = idx[0] - self.assertEqual(result, Timestamp('2011-01-01', tz=idx.tz)) + assert result == Timestamp('2011-01-01', tz=idx.tz) result = idx[0:5] expected = pd.date_range('2011-01-01', '2011-01-05', freq='D', tz=idx.tz, name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx[0:10:2] expected = pd.date_range('2011-01-01', '2011-01-09', freq='2D', tz=idx.tz, name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx[-20:-5:3] expected = pd.date_range('2011-01-12', '2011-01-24', freq='3D', tz=idx.tz, name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx[4::-1] expected = DatetimeIndex(['2011-01-05', '2011-01-04', '2011-01-03', '2011-01-02', '2011-01-01'], freq='-1D', tz=idx.tz, name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq def test_drop_duplicates_metadata(self): # GH 10115 idx = pd.date_range('2011-01-01', '2011-01-31', freq='D', name='idx') result = idx.drop_duplicates() tm.assert_index_equal(idx, result) - self.assertEqual(idx.freq, result.freq) + assert idx.freq == result.freq idx_dup = idx.append(idx) assert idx_dup.freq is None # freq is reset @@ -793,25 +791,25 @@ def test_take(self): for idx in [idx1, idx2]: result = idx.take([0]) - self.assertEqual(result, Timestamp('2011-01-01', tz=idx.tz)) + assert result == Timestamp('2011-01-01', tz=idx.tz) result = idx.take([0, 1, 2]) expected = pd.date_range('2011-01-01', '2011-01-03', freq='D', tz=idx.tz, name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx.take([0, 2, 4]) expected = pd.date_range('2011-01-01', '2011-01-05', freq='2D', tz=idx.tz, name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx.take([7, 4, 1]) expected = pd.date_range('2011-01-08', '2011-01-02', freq='-3D', tz=idx.tz, name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx.take([3, 2, 5]) expected = DatetimeIndex(['2011-01-04', '2011-01-03', @@ -851,7 +849,7 @@ def test_infer_freq(self): idx = pd.date_range('2011-01-01 09:00:00', freq=freq, periods=10) result = pd.DatetimeIndex(idx.asi8, freq='infer') tm.assert_index_equal(idx, result) - self.assertEqual(result.freq, freq) + assert result.freq == freq def test_nat_new(self): idx = pd.date_range('2011-01-01', freq='D', periods=5, name='x') @@ -1139,18 +1137,18 @@ def test_getitem(self): exp = DatetimeIndex(self.rng.view(np.ndarray)[:5]) tm.assert_index_equal(smaller, exp) - self.assertEqual(smaller.offset, self.rng.offset) + assert smaller.offset == self.rng.offset sliced = self.rng[::5] - self.assertEqual(sliced.offset, BDay() * 5) + assert sliced.offset == BDay() * 5 fancy_indexed = self.rng[[4, 3, 2, 1, 0]] - self.assertEqual(len(fancy_indexed), 5) + assert len(fancy_indexed) == 5 assert isinstance(fancy_indexed, DatetimeIndex) assert fancy_indexed.freq is None # 32-bit vs. 64-bit platforms - self.assertEqual(self.rng[4], self.rng[np.int_(4)]) + assert self.rng[4] == self.rng[np.int_(4)] def test_getitem_matplotlib_hackaround(self): values = self.rng[:, None] @@ -1159,20 +1157,20 @@ def test_getitem_matplotlib_hackaround(self): def test_shift(self): shifted = self.rng.shift(5) - self.assertEqual(shifted[0], self.rng[5]) - self.assertEqual(shifted.offset, self.rng.offset) + assert shifted[0] == self.rng[5] + assert shifted.offset == self.rng.offset shifted = self.rng.shift(-5) - self.assertEqual(shifted[5], self.rng[0]) - self.assertEqual(shifted.offset, self.rng.offset) + assert shifted[5] == self.rng[0] + assert shifted.offset == self.rng.offset shifted = self.rng.shift(0) - self.assertEqual(shifted[0], self.rng[0]) - self.assertEqual(shifted.offset, self.rng.offset) + assert shifted[0] == self.rng[0] + assert shifted.offset == self.rng.offset rng = date_range(START, END, freq=BMonthEnd()) shifted = rng.shift(1, freq=BDay()) - self.assertEqual(shifted[0], rng[0] + BDay()) + assert shifted[0] == rng[0] + BDay() def test_summary(self): self.rng.summary() @@ -1234,18 +1232,18 @@ def test_getitem(self): smaller = self.rng[:5] exp = DatetimeIndex(self.rng.view(np.ndarray)[:5]) tm.assert_index_equal(smaller, exp) - self.assertEqual(smaller.offset, self.rng.offset) + assert smaller.offset == self.rng.offset sliced = self.rng[::5] - self.assertEqual(sliced.offset, CDay() * 5) + assert sliced.offset == CDay() * 5 fancy_indexed = self.rng[[4, 3, 2, 1, 0]] - self.assertEqual(len(fancy_indexed), 5) + assert len(fancy_indexed) == 5 assert isinstance(fancy_indexed, DatetimeIndex) assert fancy_indexed.freq is None # 32-bit vs. 64-bit platforms - self.assertEqual(self.rng[4], self.rng[np.int_(4)]) + assert self.rng[4] == self.rng[np.int_(4)] def test_getitem_matplotlib_hackaround(self): values = self.rng[:, None] @@ -1255,22 +1253,22 @@ def test_getitem_matplotlib_hackaround(self): def test_shift(self): shifted = self.rng.shift(5) - self.assertEqual(shifted[0], self.rng[5]) - self.assertEqual(shifted.offset, self.rng.offset) + assert shifted[0] == self.rng[5] + assert shifted.offset == self.rng.offset shifted = self.rng.shift(-5) - self.assertEqual(shifted[5], self.rng[0]) - self.assertEqual(shifted.offset, self.rng.offset) + assert shifted[5] == self.rng[0] + assert shifted.offset == self.rng.offset shifted = self.rng.shift(0) - self.assertEqual(shifted[0], self.rng[0]) - self.assertEqual(shifted.offset, self.rng.offset) + assert shifted[0] == self.rng[0] + assert shifted.offset == self.rng.offset # PerformanceWarning with warnings.catch_warnings(record=True): rng = date_range(START, END, freq=BMonthEnd()) shifted = rng.shift(1, freq=CDay()) - self.assertEqual(shifted[0], rng[0] + CDay()) + assert shifted[0] == rng[0] + CDay() def test_pickle_unpickle(self): unpickled = tm.round_trip_pickle(self.rng) diff --git a/pandas/tests/indexes/datetimes/test_partial_slicing.py b/pandas/tests/indexes/datetimes/test_partial_slicing.py index c3eda8b378c96..b3661ae0e7a97 100644 --- a/pandas/tests/indexes/datetimes/test_partial_slicing.py +++ b/pandas/tests/indexes/datetimes/test_partial_slicing.py @@ -30,24 +30,24 @@ def test_slice_year(self): result = rng.get_loc('2009') expected = slice(3288, 3653) - self.assertEqual(result, expected) + assert result == expected def test_slice_quarter(self): dti = DatetimeIndex(freq='D', start=datetime(2000, 6, 1), periods=500) s = Series(np.arange(len(dti)), index=dti) - self.assertEqual(len(s['2001Q1']), 90) + assert len(s['2001Q1']) == 90 df = DataFrame(np.random.rand(len(dti), 5), index=dti) - self.assertEqual(len(df.loc['1Q01']), 90) + assert len(df.loc['1Q01']) == 90 def test_slice_month(self): dti = DatetimeIndex(freq='D', start=datetime(2005, 1, 1), periods=500) s = Series(np.arange(len(dti)), index=dti) - self.assertEqual(len(s['2005-11']), 30) + assert len(s['2005-11']) == 30 df = DataFrame(np.random.rand(len(dti), 5), index=dti) - self.assertEqual(len(df.loc['2005-11']), 30) + assert len(df.loc['2005-11']) == 30 tm.assert_series_equal(s['2005-11'], s['11-2005']) @@ -68,7 +68,7 @@ def test_partial_slice(self): tm.assert_series_equal(result, expected) result = s['2005-1-1'] - self.assertEqual(result, s.iloc[0]) + assert result == s.iloc[0] pytest.raises(Exception, s.__getitem__, '2004-12-31') @@ -92,7 +92,7 @@ def test_partial_slice_hourly(self): result = s['2005-1-1 20'] tm.assert_series_equal(result, s.iloc[:60]) - self.assertEqual(s['2005-1-1 20:00'], s.iloc[0]) + assert s['2005-1-1 20:00'] == s.iloc[0] pytest.raises(Exception, s.__getitem__, '2004-12-31 00:15') def test_partial_slice_minutely(self): @@ -106,7 +106,7 @@ def test_partial_slice_minutely(self): result = s['2005-1-1'] tm.assert_series_equal(result, s.iloc[:60]) - self.assertEqual(s[Timestamp('2005-1-1 23:59:00')], s.iloc[0]) + assert s[Timestamp('2005-1-1 23:59:00')] == s.iloc[0] pytest.raises(Exception, s.__getitem__, '2004-12-31 00:00:00') def test_partial_slice_second_precision(self): @@ -121,7 +121,7 @@ def test_partial_slice_second_precision(self): tm.assert_series_equal(s['2005-1-1 00:01'], s.iloc[10:]) tm.assert_series_equal(s['2005-1-1 00:01:00'], s.iloc[10:]) - self.assertEqual(s[Timestamp('2005-1-1 00:00:59.999990')], s.iloc[0]) + assert s[Timestamp('2005-1-1 00:00:59.999990')] == s.iloc[0] tm.assert_raises_regex(KeyError, '2005-1-1 00:00:00', lambda: s['2005-1-1 00:00:00']) @@ -144,7 +144,7 @@ def test_partial_slicing_dataframe(self): middate, middate + unit]) values = [1, 2, 3] df = DataFrame({'a': values}, index, dtype=np.int64) - self.assertEqual(df.index.resolution, resolution) + assert df.index.resolution == resolution # Timestamp with the same resolution as index # Should be exact match for Series (return scalar) @@ -154,7 +154,7 @@ def test_partial_slicing_dataframe(self): # make ts_string as precise as index result = df['a'][ts_string] assert isinstance(result, np.int64) - self.assertEqual(result, expected) + assert result == expected pytest.raises(KeyError, df.__getitem__, ts_string) # Timestamp with resolution less precise than index @@ -181,7 +181,7 @@ def test_partial_slicing_dataframe(self): ts_string = index[1].strftime(fmt) result = df['a'][ts_string] assert isinstance(result, np.int64) - self.assertEqual(result, 2) + assert result == 2 pytest.raises(KeyError, df.__getitem__, ts_string) # Not compatible with existing key diff --git a/pandas/tests/indexes/datetimes/test_setops.py b/pandas/tests/indexes/datetimes/test_setops.py index 6612ab844b849..b25fdaf6be3b0 100644 --- a/pandas/tests/indexes/datetimes/test_setops.py +++ b/pandas/tests/indexes/datetimes/test_setops.py @@ -29,7 +29,7 @@ def test_union_coverage(self): result = ordered[:0].union(ordered) tm.assert_index_equal(result, ordered) - self.assertEqual(result.freq, ordered.freq) + assert result.freq == ordered.freq def test_union_bug_1730(self): rng_a = date_range('1/1/2012', periods=4, freq='3H') @@ -106,9 +106,9 @@ def test_intersection(self): (rng4, expected4)]: result = base.intersection(rng) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.tz, expected.tz) + assert result.name == expected.name + assert result.freq == expected.freq + assert result.tz == expected.tz # non-monotonic base = DatetimeIndex(['2011-01-05', '2011-01-04', @@ -136,17 +136,17 @@ def test_intersection(self): (rng4, expected4)]: result = base.intersection(rng) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) + assert result.name == expected.name assert result.freq is None - self.assertEqual(result.tz, expected.tz) + assert result.tz == expected.tz # empty same freq GH2129 rng = date_range('6/1/2000', '6/15/2000', freq='T') result = rng[0:0].intersection(rng) - self.assertEqual(len(result), 0) + assert len(result) == 0 result = rng.intersection(rng[0:0]) - self.assertEqual(len(result), 0) + assert len(result) == 0 def test_intersection_bug_1708(self): from pandas import DateOffset @@ -154,7 +154,7 @@ def test_intersection_bug_1708(self): index_2 = index_1 + DateOffset(hours=1) result = index_1 & index_2 - self.assertEqual(len(result), 0) + assert len(result) == 0 def test_difference_freq(self): # GH14323: difference of DatetimeIndex should not preserve frequency @@ -177,7 +177,7 @@ def test_datetimeindex_diff(self): periods=100) dti2 = DatetimeIndex(freq='Q-JAN', start=datetime(1997, 12, 31), periods=98) - self.assertEqual(len(dti1.difference(dti2)), 2) + assert len(dti1.difference(dti2)) == 2 def test_datetimeindex_union_join_empty(self): dti = DatetimeIndex(start='1/1/2001', end='2/1/2001', freq='D') @@ -288,7 +288,7 @@ def test_intersection(self): expected = rng[10:25] tm.assert_index_equal(the_int, expected) assert isinstance(the_int, DatetimeIndex) - self.assertEqual(the_int.offset, rng.offset) + assert the_int.offset == rng.offset the_int = rng1.intersection(rng2.view(DatetimeIndex)) tm.assert_index_equal(the_int, expected) diff --git a/pandas/tests/indexes/datetimes/test_tools.py b/pandas/tests/indexes/datetimes/test_tools.py index 4c32f41db207c..3c7f2e424f779 100644 --- a/pandas/tests/indexes/datetimes/test_tools.py +++ b/pandas/tests/indexes/datetimes/test_tools.py @@ -45,7 +45,7 @@ def test_to_datetime_format(self): if isinstance(expected, Series): assert_series_equal(result, Series(expected)) elif isinstance(expected, Timestamp): - self.assertEqual(result, expected) + assert result == expected else: tm.assert_index_equal(result, expected) @@ -112,7 +112,7 @@ def test_to_datetime_format_microsecond(self): format = '%d-%b-%Y %H:%M:%S.%f' result = to_datetime(val, format=format) exp = datetime.strptime(val, format) - self.assertEqual(result, exp) + assert result == exp def test_to_datetime_format_time(self): data = [ @@ -130,7 +130,7 @@ def test_to_datetime_format_time(self): # Timestamp('2010-01-10 09:12:56')] ] for s, format, dt in data: - self.assertEqual(to_datetime(s, format=format), dt) + assert to_datetime(s, format=format) == dt def test_to_datetime_with_non_exact(self): # GH 10834 @@ -159,7 +159,7 @@ def test_parse_nanoseconds_with_formula(self): "2012-01-01 09:00:00.001000000", ]: expected = pd.to_datetime(v) result = pd.to_datetime(v, format="%Y-%m-%d %H:%M:%S.%f") - self.assertEqual(result, expected) + assert result == expected def test_to_datetime_format_weeks(self): data = [ @@ -167,7 +167,7 @@ def test_to_datetime_format_weeks(self): ['2013020', '%Y%U%w', Timestamp('2013-01-13')] ] for s, format, dt in data: - self.assertEqual(to_datetime(s, format=format), dt) + assert to_datetime(s, format=format) == dt class TestToDatetime(tm.TestCase): @@ -312,11 +312,11 @@ def test_datetime_bool(self): with pytest.raises(TypeError): to_datetime(False) assert to_datetime(False, errors="coerce") is NaT - self.assertEqual(to_datetime(False, errors="ignore"), False) + assert to_datetime(False, errors="ignore") is False with pytest.raises(TypeError): to_datetime(True) assert to_datetime(True, errors="coerce") is NaT - self.assertEqual(to_datetime(True, errors="ignore"), True) + assert to_datetime(True, errors="ignore") is True with pytest.raises(TypeError): to_datetime([False, datetime.today()]) with pytest.raises(TypeError): @@ -390,15 +390,15 @@ def test_unit_consistency(self): # consistency of conversions expected = Timestamp('1970-05-09 14:25:11') result = pd.to_datetime(11111111, unit='s', errors='raise') - self.assertEqual(result, expected) + assert result == expected assert isinstance(result, Timestamp) result = pd.to_datetime(11111111, unit='s', errors='coerce') - self.assertEqual(result, expected) + assert result == expected assert isinstance(result, Timestamp) result = pd.to_datetime(11111111, unit='s', errors='ignore') - self.assertEqual(result, expected) + assert result == expected assert isinstance(result, Timestamp) def test_unit_with_numeric(self): @@ -617,11 +617,11 @@ def test_index_to_datetime(self): def test_to_datetime_iso8601(self): result = to_datetime(["2012-01-01 00:00:00"]) exp = Timestamp("2012-01-01 00:00:00") - self.assertEqual(result[0], exp) + assert result[0] == exp result = to_datetime(['20121001']) # bad iso 8601 exp = Timestamp('2012-10-01') - self.assertEqual(result[0], exp) + assert result[0] == exp def test_to_datetime_default(self): rs = to_datetime('2001') @@ -639,7 +639,7 @@ def test_to_datetime_on_datetime64_series(self): s = Series(date_range('1/1/2000', periods=10)) result = to_datetime(s) - self.assertEqual(result[0], s[0]) + assert result[0] == s[0] def test_to_datetime_with_space_in_series(self): # GH 6428 @@ -689,12 +689,12 @@ def test_to_datetime_types(self): # ints result = Timestamp(0) expected = to_datetime(0) - self.assertEqual(result, expected) + assert result == expected # GH 3888 (strings) expected = to_datetime(['2012'])[0] result = to_datetime('2012') - self.assertEqual(result, expected) + assert result == expected # array = ['2012','20120101','20120101 12:01:01'] array = ['20120101', '20120101 12:01:01'] @@ -705,7 +705,7 @@ def test_to_datetime_types(self): # currently fails ### # result = Timestamp('2012') # expected = to_datetime('2012') - # self.assertEqual(result, expected) + # assert result == expected def test_to_datetime_unprocessable_input(self): # GH 4928 @@ -721,10 +721,10 @@ def test_to_datetime_other_datetime64_units(self): as_obj = scalar.astype('O') index = DatetimeIndex([scalar]) - self.assertEqual(index[0], scalar.astype('O')) + assert index[0] == scalar.astype('O') value = Timestamp(scalar) - self.assertEqual(value, as_obj) + assert value == as_obj def test_to_datetime_list_of_integers(self): rng = date_range('1/1/2000', periods=20) @@ -739,8 +739,8 @@ def test_to_datetime_list_of_integers(self): def test_to_datetime_freq(self): xp = bdate_range('2000-1-1', periods=10, tz='UTC') rs = xp.to_datetime() - self.assertEqual(xp.freq, rs.freq) - self.assertEqual(xp.tzinfo, rs.tzinfo) + assert xp.freq == rs.freq + assert xp.tzinfo == rs.tzinfo def test_string_na_nat_conversion(self): # GH #999, #858 @@ -794,10 +794,10 @@ def test_string_na_nat_conversion(self): expected[i] = to_datetime(x) assert_series_equal(result, expected, check_names=False) - self.assertEqual(result.name, 'foo') + assert result.name == 'foo' assert_series_equal(dresult, expected, check_names=False) - self.assertEqual(dresult.name, 'foo') + assert dresult.name == 'foo' def test_dti_constructor_numpy_timeunits(self): # GH 9114 @@ -842,21 +842,14 @@ def test_guess_datetime_format_with_parseable_formats(self): '%Y-%m-%d %H:%M:%S.%f'), ) for dt_string, dt_format in dt_string_to_format: - self.assertEqual( - tools._guess_datetime_format(dt_string), - dt_format - ) + assert tools._guess_datetime_format(dt_string) == dt_format def test_guess_datetime_format_with_dayfirst(self): ambiguous_string = '01/01/2011' - self.assertEqual( - tools._guess_datetime_format(ambiguous_string, dayfirst=True), - '%d/%m/%Y' - ) - self.assertEqual( - tools._guess_datetime_format(ambiguous_string, dayfirst=False), - '%m/%d/%Y' - ) + assert tools._guess_datetime_format( + ambiguous_string, dayfirst=True) == '%d/%m/%Y' + assert tools._guess_datetime_format( + ambiguous_string, dayfirst=False) == '%m/%d/%Y' def test_guess_datetime_format_with_locale_specific_formats(self): # The month names will vary depending on the locale, in which @@ -868,10 +861,7 @@ def test_guess_datetime_format_with_locale_specific_formats(self): ('30/Dec/2011 00:00:00', '%d/%b/%Y %H:%M:%S'), ) for dt_string, dt_format in dt_string_to_format: - self.assertEqual( - tools._guess_datetime_format(dt_string), - dt_format - ) + assert tools._guess_datetime_format(dt_string) == dt_format def test_guess_datetime_format_invalid_inputs(self): # A datetime string must include a year, month and a day for it @@ -901,10 +891,7 @@ def test_guess_datetime_format_nopadding(self): ('2011-1-3T00:00:0', '%Y-%m-%dT%H:%M:%S')) for dt_string, dt_format in dt_string_to_format: - self.assertEqual( - tools._guess_datetime_format(dt_string), - dt_format - ) + assert tools._guess_datetime_format(dt_string) == dt_format def test_guess_datetime_format_for_array(self): tm._skip_if_not_us_locale() @@ -918,10 +905,8 @@ def test_guess_datetime_format_for_array(self): ] for test_array in test_arrays: - self.assertEqual( - tools._guess_datetime_format_for_array(test_array), - expected_format - ) + assert tools._guess_datetime_format_for_array( + test_array) == expected_format format_for_string_of_nans = tools._guess_datetime_format_for_array( np.array( @@ -1012,14 +997,13 @@ def test_day_not_in_month_raise(self): errors='raise', format="%Y-%m-%d") def test_day_not_in_month_ignore(self): - self.assertEqual(to_datetime( - '2015-02-29', errors='ignore'), '2015-02-29') - self.assertEqual(to_datetime( - '2015-02-29', errors='ignore', format="%Y-%m-%d"), '2015-02-29') - self.assertEqual(to_datetime( - '2015-02-32', errors='ignore', format="%Y-%m-%d"), '2015-02-32') - self.assertEqual(to_datetime( - '2015-04-31', errors='ignore', format="%Y-%m-%d"), '2015-04-31') + assert to_datetime('2015-02-29', errors='ignore') == '2015-02-29' + assert to_datetime('2015-02-29', errors='ignore', + format="%Y-%m-%d") == '2015-02-29' + assert to_datetime('2015-02-32', errors='ignore', + format="%Y-%m-%d") == '2015-02-32' + assert to_datetime('2015-04-31', errors='ignore', + format="%Y-%m-%d") == '2015-04-31' class TestDatetimeParsingWrappers(tm.TestCase): @@ -1110,7 +1094,7 @@ def test_parsers(self): result9 = DatetimeIndex(Series([date_str]), yearfirst=yearfirst) for res in [result1, result2]: - self.assertEqual(res, expected) + assert res == expected for res in [result3, result4, result6, result8, result9]: exp = DatetimeIndex([pd.Timestamp(expected)]) tm.assert_index_equal(res, exp) @@ -1118,10 +1102,10 @@ def test_parsers(self): # these really need to have yearfist, but we don't support if not yearfirst: result5 = Timestamp(date_str) - self.assertEqual(result5, expected) + assert result5 == expected result7 = date_range(date_str, freq='S', periods=1, yearfirst=yearfirst) - self.assertEqual(result7, expected) + assert result7 == expected # NaT result1, _, _ = tools.parse_time_string('NaT') @@ -1215,7 +1199,7 @@ def test_parsers_dayfirst_yearfirst(self): # compare with dateutil result dateutil_result = parse(date_str, dayfirst=dayfirst, yearfirst=yearfirst) - self.assertEqual(dateutil_result, expected) + assert dateutil_result == expected result1, _, _ = tools.parse_time_string(date_str, dayfirst=dayfirst, @@ -1224,7 +1208,7 @@ def test_parsers_dayfirst_yearfirst(self): # we don't support dayfirst/yearfirst here: if not dayfirst and not yearfirst: result2 = Timestamp(date_str) - self.assertEqual(result2, expected) + assert result2 == expected result3 = to_datetime(date_str, dayfirst=dayfirst, yearfirst=yearfirst) @@ -1232,9 +1216,9 @@ def test_parsers_dayfirst_yearfirst(self): result4 = DatetimeIndex([date_str], dayfirst=dayfirst, yearfirst=yearfirst)[0] - self.assertEqual(result1, expected) - self.assertEqual(result3, expected) - self.assertEqual(result4, expected) + assert result1 == expected + assert result3 == expected + assert result4 == expected def test_parsers_timestring(self): tm._skip_if_no_dateutil() @@ -1253,11 +1237,11 @@ def test_parsers_timestring(self): # parse time string return time string based on default date # others are not, and can't be changed because it is used in # time series plot - self.assertEqual(result1, exp_def) - self.assertEqual(result2, exp_now) - self.assertEqual(result3, exp_now) - self.assertEqual(result4, exp_now) - self.assertEqual(result5, exp_now) + assert result1 == exp_def + assert result2 == exp_now + assert result3 == exp_now + assert result4 == exp_now + assert result5 == exp_now def test_parsers_time(self): # GH11818 @@ -1267,20 +1251,19 @@ def test_parsers_time(self): expected = time(14, 15) for time_string in strings: - self.assertEqual(tools.to_time(time_string), expected) + assert tools.to_time(time_string) == expected new_string = "14.15" pytest.raises(ValueError, tools.to_time, new_string) - self.assertEqual(tools.to_time(new_string, format="%H.%M"), expected) + assert tools.to_time(new_string, format="%H.%M") == expected arg = ["14:15", "20:20"] expected_arr = [time(14, 15), time(20, 20)] - self.assertEqual(tools.to_time(arg), expected_arr) - self.assertEqual(tools.to_time(arg, format="%H:%M"), expected_arr) - self.assertEqual(tools.to_time(arg, infer_time_format=True), - expected_arr) - self.assertEqual(tools.to_time(arg, format="%I:%M%p", errors="coerce"), - [None, None]) + assert tools.to_time(arg) == expected_arr + assert tools.to_time(arg, format="%H:%M") == expected_arr + assert tools.to_time(arg, infer_time_format=True) == expected_arr + assert tools.to_time(arg, format="%I:%M%p", + errors="coerce") == [None, None] res = tools.to_time(arg, format="%I:%M%p", errors="ignore") tm.assert_numpy_array_equal(res, np.array(arg, dtype=np.object_)) @@ -1301,7 +1284,7 @@ def test_parsers_monthfreq(self): for date_str, expected in compat.iteritems(cases): result1, _, _ = tools.parse_time_string(date_str, freq='M') - self.assertEqual(result1, expected) + assert result1 == expected def test_parsers_quarterly_with_freq(self): msg = ('Incorrect quarterly string is given, quarter ' @@ -1321,7 +1304,7 @@ def test_parsers_quarterly_with_freq(self): for (date_str, freq), exp in compat.iteritems(cases): result, _, _ = tools.parse_time_string(date_str, freq=freq) - self.assertEqual(result, exp) + assert result == exp def test_parsers_timezone_minute_offsets_roundtrip(self): # GH11708 @@ -1337,9 +1320,9 @@ def test_parsers_timezone_minute_offsets_roundtrip(self): for dt_string, tz, dt_string_repr in dt_strings: dt_time = to_datetime(dt_string) - self.assertEqual(base, dt_time) + assert base == dt_time converted_time = dt_time.tz_localize('UTC').tz_convert(tz) - self.assertEqual(dt_string_repr, repr(converted_time)) + assert dt_string_repr == repr(converted_time) def test_parsers_iso8601(self): # GH 12060 @@ -1358,7 +1341,7 @@ def test_parsers_iso8601(self): '2013-1-1 5:30:00': datetime(2013, 1, 1, 5, 30)} for date_str, exp in compat.iteritems(cases): actual = tslib._test_parse_iso8601(date_str) - self.assertEqual(actual, exp) + assert actual == exp # seperators must all match - YYYYMM not valid invalid_cases = ['2011-01/02', '2011^11^11', diff --git a/pandas/tests/indexes/period/test_asfreq.py b/pandas/tests/indexes/period/test_asfreq.py index 4d1fe9c46f126..f9effd3d1aea6 100644 --- a/pandas/tests/indexes/period/test_asfreq.py +++ b/pandas/tests/indexes/period/test_asfreq.py @@ -20,64 +20,64 @@ def test_asfreq(self): pi6 = PeriodIndex(freq='Min', start='1/1/2001', end='1/1/2001 00:00') pi7 = PeriodIndex(freq='S', start='1/1/2001', end='1/1/2001 00:00:00') - self.assertEqual(pi1.asfreq('Q', 'S'), pi2) - self.assertEqual(pi1.asfreq('Q', 's'), pi2) - self.assertEqual(pi1.asfreq('M', 'start'), pi3) - self.assertEqual(pi1.asfreq('D', 'StarT'), pi4) - self.assertEqual(pi1.asfreq('H', 'beGIN'), pi5) - self.assertEqual(pi1.asfreq('Min', 'S'), pi6) - self.assertEqual(pi1.asfreq('S', 'S'), pi7) - - self.assertEqual(pi2.asfreq('A', 'S'), pi1) - self.assertEqual(pi2.asfreq('M', 'S'), pi3) - self.assertEqual(pi2.asfreq('D', 'S'), pi4) - self.assertEqual(pi2.asfreq('H', 'S'), pi5) - self.assertEqual(pi2.asfreq('Min', 'S'), pi6) - self.assertEqual(pi2.asfreq('S', 'S'), pi7) - - self.assertEqual(pi3.asfreq('A', 'S'), pi1) - self.assertEqual(pi3.asfreq('Q', 'S'), pi2) - self.assertEqual(pi3.asfreq('D', 'S'), pi4) - self.assertEqual(pi3.asfreq('H', 'S'), pi5) - self.assertEqual(pi3.asfreq('Min', 'S'), pi6) - self.assertEqual(pi3.asfreq('S', 'S'), pi7) - - self.assertEqual(pi4.asfreq('A', 'S'), pi1) - self.assertEqual(pi4.asfreq('Q', 'S'), pi2) - self.assertEqual(pi4.asfreq('M', 'S'), pi3) - self.assertEqual(pi4.asfreq('H', 'S'), pi5) - self.assertEqual(pi4.asfreq('Min', 'S'), pi6) - self.assertEqual(pi4.asfreq('S', 'S'), pi7) - - self.assertEqual(pi5.asfreq('A', 'S'), pi1) - self.assertEqual(pi5.asfreq('Q', 'S'), pi2) - self.assertEqual(pi5.asfreq('M', 'S'), pi3) - self.assertEqual(pi5.asfreq('D', 'S'), pi4) - self.assertEqual(pi5.asfreq('Min', 'S'), pi6) - self.assertEqual(pi5.asfreq('S', 'S'), pi7) - - self.assertEqual(pi6.asfreq('A', 'S'), pi1) - self.assertEqual(pi6.asfreq('Q', 'S'), pi2) - self.assertEqual(pi6.asfreq('M', 'S'), pi3) - self.assertEqual(pi6.asfreq('D', 'S'), pi4) - self.assertEqual(pi6.asfreq('H', 'S'), pi5) - self.assertEqual(pi6.asfreq('S', 'S'), pi7) - - self.assertEqual(pi7.asfreq('A', 'S'), pi1) - self.assertEqual(pi7.asfreq('Q', 'S'), pi2) - self.assertEqual(pi7.asfreq('M', 'S'), pi3) - self.assertEqual(pi7.asfreq('D', 'S'), pi4) - self.assertEqual(pi7.asfreq('H', 'S'), pi5) - self.assertEqual(pi7.asfreq('Min', 'S'), pi6) + assert pi1.asfreq('Q', 'S') == pi2 + assert pi1.asfreq('Q', 's') == pi2 + assert pi1.asfreq('M', 'start') == pi3 + assert pi1.asfreq('D', 'StarT') == pi4 + assert pi1.asfreq('H', 'beGIN') == pi5 + assert pi1.asfreq('Min', 'S') == pi6 + assert pi1.asfreq('S', 'S') == pi7 + + assert pi2.asfreq('A', 'S') == pi1 + assert pi2.asfreq('M', 'S') == pi3 + assert pi2.asfreq('D', 'S') == pi4 + assert pi2.asfreq('H', 'S') == pi5 + assert pi2.asfreq('Min', 'S') == pi6 + assert pi2.asfreq('S', 'S') == pi7 + + assert pi3.asfreq('A', 'S') == pi1 + assert pi3.asfreq('Q', 'S') == pi2 + assert pi3.asfreq('D', 'S') == pi4 + assert pi3.asfreq('H', 'S') == pi5 + assert pi3.asfreq('Min', 'S') == pi6 + assert pi3.asfreq('S', 'S') == pi7 + + assert pi4.asfreq('A', 'S') == pi1 + assert pi4.asfreq('Q', 'S') == pi2 + assert pi4.asfreq('M', 'S') == pi3 + assert pi4.asfreq('H', 'S') == pi5 + assert pi4.asfreq('Min', 'S') == pi6 + assert pi4.asfreq('S', 'S') == pi7 + + assert pi5.asfreq('A', 'S') == pi1 + assert pi5.asfreq('Q', 'S') == pi2 + assert pi5.asfreq('M', 'S') == pi3 + assert pi5.asfreq('D', 'S') == pi4 + assert pi5.asfreq('Min', 'S') == pi6 + assert pi5.asfreq('S', 'S') == pi7 + + assert pi6.asfreq('A', 'S') == pi1 + assert pi6.asfreq('Q', 'S') == pi2 + assert pi6.asfreq('M', 'S') == pi3 + assert pi6.asfreq('D', 'S') == pi4 + assert pi6.asfreq('H', 'S') == pi5 + assert pi6.asfreq('S', 'S') == pi7 + + assert pi7.asfreq('A', 'S') == pi1 + assert pi7.asfreq('Q', 'S') == pi2 + assert pi7.asfreq('M', 'S') == pi3 + assert pi7.asfreq('D', 'S') == pi4 + assert pi7.asfreq('H', 'S') == pi5 + assert pi7.asfreq('Min', 'S') == pi6 pytest.raises(ValueError, pi7.asfreq, 'T', 'foo') result1 = pi1.asfreq('3M') result2 = pi1.asfreq('M') expected = PeriodIndex(freq='M', start='2001-12', end='2001-12') tm.assert_numpy_array_equal(result1.asi8, expected.asi8) - self.assertEqual(result1.freqstr, '3M') + assert result1.freqstr == '3M' tm.assert_numpy_array_equal(result2.asi8, expected.asi8) - self.assertEqual(result2.freqstr, 'M') + assert result2.freqstr == 'M' def test_asfreq_nat(self): idx = PeriodIndex(['2011-01', '2011-02', 'NaT', '2011-04'], freq='M') @@ -93,13 +93,13 @@ def test_asfreq_mult_pi(self): exp = PeriodIndex(['2001-02-28', '2001-03-31', 'NaT', '2001-04-30'], freq=freq) tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, exp.freq) + assert result.freq == exp.freq result = pi.asfreq(freq, how='S') exp = PeriodIndex(['2001-01-01', '2001-02-01', 'NaT', '2001-03-01'], freq=freq) tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, exp.freq) + assert result.freq == exp.freq def test_asfreq_combined_pi(self): pi = pd.PeriodIndex(['2001-01-01 00:00', '2001-01-02 02:00', 'NaT'], @@ -109,7 +109,7 @@ def test_asfreq_combined_pi(self): for freq, how in zip(['1D1H', '1H1D'], ['S', 'E']): result = pi.asfreq(freq, how=how) tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, exp.freq) + assert result.freq == exp.freq for freq in ['1D1H', '1H1D']: pi = pd.PeriodIndex(['2001-01-01 00:00', '2001-01-02 02:00', @@ -118,7 +118,7 @@ def test_asfreq_combined_pi(self): exp = PeriodIndex(['2001-01-02 00:00', '2001-01-03 02:00', 'NaT'], freq='H') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, exp.freq) + assert result.freq == exp.freq pi = pd.PeriodIndex(['2001-01-01 00:00', '2001-01-02 02:00', 'NaT'], freq=freq) @@ -126,7 +126,7 @@ def test_asfreq_combined_pi(self): exp = PeriodIndex(['2001-01-01 00:00', '2001-01-02 02:00', 'NaT'], freq='H') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, exp.freq) + assert result.freq == exp.freq def test_asfreq_ts(self): index = PeriodIndex(freq='A', start='1/1/2001', end='12/31/2010') @@ -136,12 +136,12 @@ def test_asfreq_ts(self): result = ts.asfreq('D', how='end') df_result = df.asfreq('D', how='end') exp_index = index.asfreq('D', how='end') - self.assertEqual(len(result), len(ts)) + assert len(result) == len(ts) tm.assert_index_equal(result.index, exp_index) tm.assert_index_equal(df_result.index, exp_index) result = ts.asfreq('D', how='start') - self.assertEqual(len(result), len(ts)) + assert len(result) == len(ts) tm.assert_index_equal(result.index, index.asfreq('D', how='start')) def test_astype_asfreq(self): diff --git a/pandas/tests/indexes/period/test_construction.py b/pandas/tests/indexes/period/test_construction.py index 6ab42f14efae6..a95ad808cadce 100644 --- a/pandas/tests/indexes/period/test_construction.py +++ b/pandas/tests/indexes/period/test_construction.py @@ -160,12 +160,12 @@ def test_constructor_dtype(self): idx = PeriodIndex(['2013-01', '2013-03'], dtype='period[M]') exp = PeriodIndex(['2013-01', '2013-03'], freq='M') tm.assert_index_equal(idx, exp) - self.assertEqual(idx.dtype, 'period[M]') + assert idx.dtype == 'period[M]' idx = PeriodIndex(['2013-01-05', '2013-03-05'], dtype='period[3D]') exp = PeriodIndex(['2013-01-05', '2013-03-05'], freq='3D') tm.assert_index_equal(idx, exp) - self.assertEqual(idx.dtype, 'period[3D]') + assert idx.dtype == 'period[3D]' # if we already have a freq and its not the same, then asfreq # (not changed) @@ -174,11 +174,11 @@ def test_constructor_dtype(self): res = PeriodIndex(idx, dtype='period[M]') exp = PeriodIndex(['2013-01', '2013-01'], freq='M') tm.assert_index_equal(res, exp) - self.assertEqual(res.dtype, 'period[M]') + assert res.dtype == 'period[M]' res = PeriodIndex(idx, freq='M') tm.assert_index_equal(res, exp) - self.assertEqual(res.dtype, 'period[M]') + assert res.dtype == 'period[M]' msg = 'specified freq and dtype are different' with tm.assert_raises_regex(period.IncompatibleFrequency, msg): @@ -187,8 +187,8 @@ def test_constructor_dtype(self): def test_constructor_empty(self): idx = pd.PeriodIndex([], freq='M') assert isinstance(idx, PeriodIndex) - self.assertEqual(len(idx), 0) - self.assertEqual(idx.freq, 'M') + assert len(idx) == 0 + assert idx.freq == 'M' with tm.assert_raises_regex(ValueError, 'freq not specified'): pd.PeriodIndex([]) @@ -367,64 +367,64 @@ def test_constructor_freq_combined(self): def test_constructor(self): pi = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009') - self.assertEqual(len(pi), 9) + assert len(pi) == 9 pi = PeriodIndex(freq='Q', start='1/1/2001', end='12/1/2009') - self.assertEqual(len(pi), 4 * 9) + assert len(pi) == 4 * 9 pi = PeriodIndex(freq='M', start='1/1/2001', end='12/1/2009') - self.assertEqual(len(pi), 12 * 9) + assert len(pi) == 12 * 9 pi = PeriodIndex(freq='D', start='1/1/2001', end='12/31/2009') - self.assertEqual(len(pi), 365 * 9 + 2) + assert len(pi) == 365 * 9 + 2 pi = PeriodIndex(freq='B', start='1/1/2001', end='12/31/2009') - self.assertEqual(len(pi), 261 * 9) + assert len(pi) == 261 * 9 pi = PeriodIndex(freq='H', start='1/1/2001', end='12/31/2001 23:00') - self.assertEqual(len(pi), 365 * 24) + assert len(pi) == 365 * 24 pi = PeriodIndex(freq='Min', start='1/1/2001', end='1/1/2001 23:59') - self.assertEqual(len(pi), 24 * 60) + assert len(pi) == 24 * 60 pi = PeriodIndex(freq='S', start='1/1/2001', end='1/1/2001 23:59:59') - self.assertEqual(len(pi), 24 * 60 * 60) + assert len(pi) == 24 * 60 * 60 start = Period('02-Apr-2005', 'B') i1 = PeriodIndex(start=start, periods=20) - self.assertEqual(len(i1), 20) - self.assertEqual(i1.freq, start.freq) - self.assertEqual(i1[0], start) + assert len(i1) == 20 + assert i1.freq == start.freq + assert i1[0] == start end_intv = Period('2006-12-31', 'W') i1 = PeriodIndex(end=end_intv, periods=10) - self.assertEqual(len(i1), 10) - self.assertEqual(i1.freq, end_intv.freq) - self.assertEqual(i1[-1], end_intv) + assert len(i1) == 10 + assert i1.freq == end_intv.freq + assert i1[-1] == end_intv end_intv = Period('2006-12-31', '1w') i2 = PeriodIndex(end=end_intv, periods=10) - self.assertEqual(len(i1), len(i2)) + assert len(i1) == len(i2) assert (i1 == i2).all() - self.assertEqual(i1.freq, i2.freq) + assert i1.freq == i2.freq end_intv = Period('2006-12-31', ('w', 1)) i2 = PeriodIndex(end=end_intv, periods=10) - self.assertEqual(len(i1), len(i2)) + assert len(i1) == len(i2) assert (i1 == i2).all() - self.assertEqual(i1.freq, i2.freq) + assert i1.freq == i2.freq end_intv = Period('2005-05-01', 'B') i1 = PeriodIndex(start=start, end=end_intv) # infer freq from first element i2 = PeriodIndex([end_intv, Period('2005-05-05', 'B')]) - self.assertEqual(len(i2), 2) - self.assertEqual(i2[0], end_intv) + assert len(i2) == 2 + assert i2[0] == end_intv i2 = PeriodIndex(np.array([end_intv, Period('2005-05-05', 'B')])) - self.assertEqual(len(i2), 2) - self.assertEqual(i2[0], end_intv) + assert len(i2) == 2 + assert i2[0] == end_intv # Mixed freq should fail vals = [end_intv, Period('2006-12-31', 'w')] diff --git a/pandas/tests/indexes/period/test_indexing.py b/pandas/tests/indexes/period/test_indexing.py index cf5f741fb09ed..ebbe05d51598c 100644 --- a/pandas/tests/indexes/period/test_indexing.py +++ b/pandas/tests/indexes/period/test_indexing.py @@ -22,17 +22,17 @@ def test_getitem(self): for idx in [idx1]: result = idx[0] - self.assertEqual(result, pd.Period('2011-01-01', freq='D')) + assert result == pd.Period('2011-01-01', freq='D') result = idx[-1] - self.assertEqual(result, pd.Period('2011-01-31', freq='D')) + assert result == pd.Period('2011-01-31', freq='D') result = idx[0:5] expected = pd.period_range('2011-01-01', '2011-01-05', freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.freq, 'D') + assert result.freq == expected.freq + assert result.freq == 'D' result = idx[0:10:2] expected = pd.PeriodIndex(['2011-01-01', '2011-01-03', @@ -40,8 +40,8 @@ def test_getitem(self): '2011-01-07', '2011-01-09'], freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.freq, 'D') + assert result.freq == expected.freq + assert result.freq == 'D' result = idx[-20:-5:3] expected = pd.PeriodIndex(['2011-01-12', '2011-01-15', @@ -49,16 +49,16 @@ def test_getitem(self): '2011-01-21', '2011-01-24'], freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.freq, 'D') + assert result.freq == expected.freq + assert result.freq == 'D' result = idx[4::-1] expected = PeriodIndex(['2011-01-05', '2011-01-04', '2011-01-03', '2011-01-02', '2011-01-01'], freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.freq, 'D') + assert result.freq == expected.freq + assert result.freq == 'D' def test_getitem_index(self): idx = period_range('2007-01', periods=10, freq='M', name='x') @@ -84,19 +84,19 @@ def test_getitem_partial(self): assert (result.index.year == 2008).all() result = ts['2008':'2009'] - self.assertEqual(len(result), 24) + assert len(result) == 24 result = ts['2008-1':'2009-12'] - self.assertEqual(len(result), 24) + assert len(result) == 24 result = ts['2008Q1':'2009Q4'] - self.assertEqual(len(result), 24) + assert len(result) == 24 result = ts[:'2009'] - self.assertEqual(len(result), 36) + assert len(result) == 36 result = ts['2009':] - self.assertEqual(len(result), 50 - 24) + assert len(result) == 50 - 24 exp = result result = ts[24:] @@ -120,15 +120,15 @@ def test_getitem_datetime(self): def test_getitem_nat(self): idx = pd.PeriodIndex(['2011-01', 'NaT', '2011-02'], freq='M') - self.assertEqual(idx[0], pd.Period('2011-01', freq='M')) + assert idx[0] == pd.Period('2011-01', freq='M') assert idx[1] is tslib.NaT s = pd.Series([0, 1, 2], index=idx) - self.assertEqual(s[pd.NaT], 1) + assert s[pd.NaT] == 1 s = pd.Series(idx, index=idx) - self.assertEqual(s[pd.Period('2011-01', freq='M')], - pd.Period('2011-01', freq='M')) + assert (s[pd.Period('2011-01', freq='M')] == + pd.Period('2011-01', freq='M')) assert s[pd.NaT] is tslib.NaT def test_getitem_list_periods(self): @@ -210,7 +210,7 @@ def test_get_loc_msg(self): try: idx.get_loc(bad_period) except KeyError as inst: - self.assertEqual(inst.args[0], bad_period) + assert inst.args[0] == bad_period def test_get_loc_nat(self): didx = DatetimeIndex(['2011-01-01', 'NaT', '2011-01-03']) @@ -218,10 +218,10 @@ def test_get_loc_nat(self): # check DatetimeIndex compat for idx in [didx, pidx]: - self.assertEqual(idx.get_loc(pd.NaT), 1) - self.assertEqual(idx.get_loc(None), 1) - self.assertEqual(idx.get_loc(float('nan')), 1) - self.assertEqual(idx.get_loc(np.nan), 1) + assert idx.get_loc(pd.NaT) == 1 + assert idx.get_loc(None) == 1 + assert idx.get_loc(float('nan')) == 1 + assert idx.get_loc(np.nan) == 1 def test_take(self): # GH 10295 @@ -230,46 +230,46 @@ def test_take(self): for idx in [idx1]: result = idx.take([0]) - self.assertEqual(result, pd.Period('2011-01-01', freq='D')) + assert result == pd.Period('2011-01-01', freq='D') result = idx.take([5]) - self.assertEqual(result, pd.Period('2011-01-06', freq='D')) + assert result == pd.Period('2011-01-06', freq='D') result = idx.take([0, 1, 2]) expected = pd.period_range('2011-01-01', '2011-01-03', freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, 'D') - self.assertEqual(result.freq, expected.freq) + assert result.freq == 'D' + assert result.freq == expected.freq result = idx.take([0, 2, 4]) expected = pd.PeriodIndex(['2011-01-01', '2011-01-03', '2011-01-05'], freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.freq, 'D') + assert result.freq == expected.freq + assert result.freq == 'D' result = idx.take([7, 4, 1]) expected = pd.PeriodIndex(['2011-01-08', '2011-01-05', '2011-01-02'], freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.freq, 'D') + assert result.freq == expected.freq + assert result.freq == 'D' result = idx.take([3, 2, 5]) expected = PeriodIndex(['2011-01-04', '2011-01-03', '2011-01-06'], freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.freq, 'D') + assert result.freq == expected.freq + assert result.freq == 'D' result = idx.take([-3, 2, 5]) expected = PeriodIndex(['2011-01-29', '2011-01-03', '2011-01-06'], freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) - self.assertEqual(result.freq, 'D') + assert result.freq == expected.freq + assert result.freq == 'D' def test_take_misc(self): index = PeriodIndex(start='1/1/10', end='12/31/12', freq='D', @@ -284,8 +284,8 @@ def test_take_misc(self): for taken in [taken1, taken2]: tm.assert_index_equal(taken, expected) assert isinstance(taken, PeriodIndex) - self.assertEqual(taken.freq, index.freq) - self.assertEqual(taken.name, expected.name) + assert taken.freq == index.freq + assert taken.name == expected.name def test_take_fill_value(self): # GH 12631 diff --git a/pandas/tests/indexes/period/test_ops.py b/pandas/tests/indexes/period/test_ops.py index af377c1b69922..fb688bda58ae8 100644 --- a/pandas/tests/indexes/period/test_ops.py +++ b/pandas/tests/indexes/period/test_ops.py @@ -38,10 +38,10 @@ def test_asobject_tolist(self): expected = pd.Index(expected_list, dtype=object, name='idx') result = idx.asobject assert isinstance(result, Index) - self.assertEqual(result.dtype, object) + assert result.dtype == object tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(idx.tolist(), expected_list) + assert result.name == expected.name + assert idx.tolist() == expected_list idx = PeriodIndex(['2013-01-01', '2013-01-02', 'NaT', '2013-01-04'], freq='D', name='idx') @@ -52,16 +52,16 @@ def test_asobject_tolist(self): expected = pd.Index(expected_list, dtype=object, name='idx') result = idx.asobject assert isinstance(result, Index) - self.assertEqual(result.dtype, object) + assert result.dtype == object tm.assert_index_equal(result, expected) for i in [0, 1, 3]: - self.assertEqual(result[i], expected[i]) + assert result[i] == expected[i] assert result[2] is pd.NaT - self.assertEqual(result.name, expected.name) + assert result.name == expected.name result_list = idx.tolist() for i in [0, 1, 3]: - self.assertEqual(result_list[i], expected_list[i]) + assert result_list[i] == expected_list[i] assert result_list[2] is pd.NaT def test_minmax(self): @@ -77,12 +77,12 @@ def test_minmax(self): assert not idx2.is_monotonic for idx in [idx1, idx2]: - self.assertEqual(idx.min(), pd.Period('2011-01-01', freq='D')) - self.assertEqual(idx.max(), pd.Period('2011-01-03', freq='D')) - self.assertEqual(idx1.argmin(), 1) - self.assertEqual(idx2.argmin(), 0) - self.assertEqual(idx1.argmax(), 3) - self.assertEqual(idx2.argmax(), 2) + assert idx.min() == pd.Period('2011-01-01', freq='D') + assert idx.max() == pd.Period('2011-01-03', freq='D') + assert idx1.argmin() == 1 + assert idx2.argmin() == 0 + assert idx1.argmax() == 3 + assert idx2.argmax() == 2 for op in ['min', 'max']: # Return NaT @@ -101,15 +101,15 @@ def test_minmax(self): def test_numpy_minmax(self): pr = pd.period_range(start='2016-01-15', end='2016-01-20') - self.assertEqual(np.min(pr), Period('2016-01-15', freq='D')) - self.assertEqual(np.max(pr), Period('2016-01-20', freq='D')) + assert np.min(pr) == Period('2016-01-15', freq='D') + assert np.max(pr) == Period('2016-01-20', freq='D') errmsg = "the 'out' parameter is not supported" tm.assert_raises_regex(ValueError, errmsg, np.min, pr, out=0) tm.assert_raises_regex(ValueError, errmsg, np.max, pr, out=0) - self.assertEqual(np.argmin(pr), 0) - self.assertEqual(np.argmax(pr), 5) + assert np.argmin(pr) == 0 + assert np.argmax(pr) == 5 if not _np_version_under1p10: errmsg = "the 'out' parameter is not supported" @@ -167,7 +167,7 @@ def test_representation(self): exp6, exp7, exp8, exp9, exp10]): for func in ['__repr__', '__unicode__', '__str__']: result = getattr(idx, func)() - self.assertEqual(result, expected) + assert result == expected def test_representation_to_series(self): # GH 10971 @@ -225,7 +225,7 @@ def test_representation_to_series(self): [exp1, exp2, exp3, exp4, exp5, exp6, exp7, exp8, exp9]): result = repr(pd.Series(idx)) - self.assertEqual(result, expected) + assert result == expected def test_summary(self): # GH9116 @@ -274,7 +274,7 @@ def test_summary(self): [exp1, exp2, exp3, exp4, exp5, exp6, exp7, exp8, exp9]): result = idx.summary() - self.assertEqual(result, expected) + assert result == expected def test_resolution(self): for freq, expected in zip(['A', 'Q', 'M', 'D', 'H', @@ -284,7 +284,7 @@ def test_resolution(self): 'millisecond', 'microsecond']): idx = pd.period_range(start='2013-04-01', periods=30, freq=freq) - self.assertEqual(idx.resolution, expected) + assert idx.resolution == expected def test_add_iadd(self): rng = pd.period_range('1/1/2000', freq='D', periods=5) @@ -569,12 +569,12 @@ def test_drop_duplicates_metadata(self): idx = pd.period_range('2011-01-01', '2011-01-31', freq='D', name='idx') result = idx.drop_duplicates() tm.assert_index_equal(idx, result) - self.assertEqual(idx.freq, result.freq) + assert idx.freq == result.freq idx_dup = idx.append(idx) # freq will not be reset result = idx_dup.drop_duplicates() tm.assert_index_equal(idx, result) - self.assertEqual(idx.freq, result.freq) + assert idx.freq == result.freq def test_drop_duplicates(self): # to check Index/Series compat @@ -601,7 +601,7 @@ def test_drop_duplicates(self): def test_order_compat(self): def _check_freq(index, expected_index): if isinstance(index, PeriodIndex): - self.assertEqual(index.freq, expected_index.freq) + assert index.freq == expected_index.freq pidx = PeriodIndex(['2011', '2012', '2013'], name='pidx', freq='A') # for compatibility check @@ -666,13 +666,13 @@ def _check_freq(index, expected_index): expected = PeriodIndex(['NaT', '2011', '2011', '2013'], name='pidx', freq='D') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, 'D') + assert result.freq == 'D' result = pidx.sort_values(ascending=False) expected = PeriodIndex( ['2013', '2011', '2011', 'NaT'], name='pidx', freq='D') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, 'D') + assert result.freq == 'D' def test_order(self): for freq in ['D', '2D', '4D']: @@ -681,20 +681,20 @@ def test_order(self): ordered = idx.sort_values() tm.assert_index_equal(ordered, idx) - self.assertEqual(ordered.freq, idx.freq) + assert ordered.freq == idx.freq ordered = idx.sort_values(ascending=False) expected = idx[::-1] tm.assert_index_equal(ordered, expected) - self.assertEqual(ordered.freq, expected.freq) - self.assertEqual(ordered.freq, freq) + assert ordered.freq == expected.freq + assert ordered.freq == freq ordered, indexer = idx.sort_values(return_indexer=True) tm.assert_index_equal(ordered, idx) tm.assert_numpy_array_equal(indexer, np.array([0, 1, 2]), check_dtype=False) - self.assertEqual(ordered.freq, idx.freq) - self.assertEqual(ordered.freq, freq) + assert ordered.freq == idx.freq + assert ordered.freq == freq ordered, indexer = idx.sort_values(return_indexer=True, ascending=False) @@ -702,8 +702,8 @@ def test_order(self): tm.assert_index_equal(ordered, expected) tm.assert_numpy_array_equal(indexer, np.array([2, 1, 0]), check_dtype=False) - self.assertEqual(ordered.freq, expected.freq) - self.assertEqual(ordered.freq, freq) + assert ordered.freq == expected.freq + assert ordered.freq == freq idx1 = PeriodIndex(['2011-01-01', '2011-01-03', '2011-01-05', '2011-01-02', '2011-01-01'], freq='D', name='idx1') @@ -725,18 +725,18 @@ def test_order(self): for idx, expected in [(idx1, exp1), (idx2, exp2), (idx3, exp3)]: ordered = idx.sort_values() tm.assert_index_equal(ordered, expected) - self.assertEqual(ordered.freq, 'D') + assert ordered.freq == 'D' ordered = idx.sort_values(ascending=False) tm.assert_index_equal(ordered, expected[::-1]) - self.assertEqual(ordered.freq, 'D') + assert ordered.freq == 'D' ordered, indexer = idx.sort_values(return_indexer=True) tm.assert_index_equal(ordered, expected) exp = np.array([0, 4, 3, 1, 2]) tm.assert_numpy_array_equal(indexer, exp, check_dtype=False) - self.assertEqual(ordered.freq, 'D') + assert ordered.freq == 'D' ordered, indexer = idx.sort_values(return_indexer=True, ascending=False) @@ -744,7 +744,7 @@ def test_order(self): exp = np.array([2, 1, 3, 4, 0]) tm.assert_numpy_array_equal(indexer, exp, check_dtype=False) - self.assertEqual(ordered.freq, 'D') + assert ordered.freq == 'D' def test_nat_new(self): @@ -1144,7 +1144,7 @@ def test_ops_series_timedelta(self): # GH 13043 s = pd.Series([pd.Period('2015-01-01', freq='D'), pd.Period('2015-01-02', freq='D')], name='xxx') - self.assertEqual(s.dtype, object) + assert s.dtype == object exp = pd.Series([pd.Period('2015-01-02', freq='D'), pd.Period('2015-01-03', freq='D')], name='xxx') @@ -1158,7 +1158,7 @@ def test_ops_series_period(self): # GH 13043 s = pd.Series([pd.Period('2015-01-01', freq='D'), pd.Period('2015-01-02', freq='D')], name='xxx') - self.assertEqual(s.dtype, object) + assert s.dtype == object p = pd.Period('2015-01-10', freq='D') # dtype will be object because of original dtype @@ -1168,7 +1168,7 @@ def test_ops_series_period(self): s2 = pd.Series([pd.Period('2015-01-05', freq='D'), pd.Period('2015-01-04', freq='D')], name='xxx') - self.assertEqual(s2.dtype, object) + assert s2.dtype == object exp = pd.Series([4, 2], name='xxx', dtype=object) tm.assert_series_equal(s2 - s, exp) @@ -1183,8 +1183,8 @@ def test_ops_frame_period(self): pd.Period('2015-02', freq='M')], 'B': [pd.Period('2014-01', freq='M'), pd.Period('2014-02', freq='M')]}) - self.assertEqual(df['A'].dtype, object) - self.assertEqual(df['B'].dtype, object) + assert df['A'].dtype == object + assert df['B'].dtype == object p = pd.Period('2015-03', freq='M') # dtype will be object because of original dtype @@ -1197,8 +1197,8 @@ def test_ops_frame_period(self): pd.Period('2015-06', freq='M')], 'B': [pd.Period('2015-05', freq='M'), pd.Period('2015-06', freq='M')]}) - self.assertEqual(df2['A'].dtype, object) - self.assertEqual(df2['B'].dtype, object) + assert df2['A'].dtype == object + assert df2['B'].dtype == object exp = pd.DataFrame({'A': np.array([4, 4], dtype=object), 'B': np.array([16, 16], dtype=object)}) diff --git a/pandas/tests/indexes/period/test_partial_slicing.py b/pandas/tests/indexes/period/test_partial_slicing.py index 7c1279a12450c..04b4e6795e770 100644 --- a/pandas/tests/indexes/period/test_partial_slicing.py +++ b/pandas/tests/indexes/period/test_partial_slicing.py @@ -51,7 +51,7 @@ def test_slice_with_zero_step_raises(self): def test_slice_keep_name(self): idx = period_range('20010101', periods=10, freq='D', name='bob') - self.assertEqual(idx.name, idx[1:].name) + assert idx.name == idx[1:].name def test_pindex_slice_index(self): pi = PeriodIndex(start='1/1/10', end='12/31/12', freq='M') diff --git a/pandas/tests/indexes/period/test_period.py b/pandas/tests/indexes/period/test_period.py index 8ee3e9d6707b4..6ec567509cd76 100644 --- a/pandas/tests/indexes/period/test_period.py +++ b/pandas/tests/indexes/period/test_period.py @@ -56,8 +56,8 @@ def test_pickle_compat_construction(self): pass def test_pickle_round_trip(self): - for freq in ['D', 'M', 'Y']: - idx = PeriodIndex(['2016-05-16', 'NaT', NaT, np.NaN], freq='D') + for freq in ['D', 'M', 'A']: + idx = PeriodIndex(['2016-05-16', 'NaT', NaT, np.NaN], freq=freq) result = tm.round_trip_pickle(idx) tm.assert_index_equal(result, idx) @@ -65,23 +65,22 @@ def test_get_loc(self): idx = pd.period_range('2000-01-01', periods=3) for method in [None, 'pad', 'backfill', 'nearest']: - self.assertEqual(idx.get_loc(idx[1], method), 1) - self.assertEqual( - idx.get_loc(idx[1].asfreq('H', how='start'), method), 1) - self.assertEqual(idx.get_loc(idx[1].to_timestamp(), method), 1) - self.assertEqual( - idx.get_loc(idx[1].to_timestamp().to_pydatetime(), method), 1) - self.assertEqual(idx.get_loc(str(idx[1]), method), 1) + assert idx.get_loc(idx[1], method) == 1 + assert idx.get_loc(idx[1].asfreq('H', how='start'), method) == 1 + assert idx.get_loc(idx[1].to_timestamp(), method) == 1 + assert idx.get_loc(idx[1].to_timestamp() + .to_pydatetime(), method) == 1 + assert idx.get_loc(str(idx[1]), method) == 1 idx = pd.period_range('2000-01-01', periods=5)[::2] - self.assertEqual(idx.get_loc('2000-01-02T12', method='nearest', - tolerance='1 day'), 1) - self.assertEqual(idx.get_loc('2000-01-02T12', method='nearest', - tolerance=pd.Timedelta('1D')), 1) - self.assertEqual(idx.get_loc('2000-01-02T12', method='nearest', - tolerance=np.timedelta64(1, 'D')), 1) - self.assertEqual(idx.get_loc('2000-01-02T12', method='nearest', - tolerance=timedelta(1)), 1) + assert idx.get_loc('2000-01-02T12', method='nearest', + tolerance='1 day') == 1 + assert idx.get_loc('2000-01-02T12', method='nearest', + tolerance=pd.Timedelta('1D')) == 1 + assert idx.get_loc('2000-01-02T12', method='nearest', + tolerance=np.timedelta64(1, 'D')) == 1 + assert idx.get_loc('2000-01-02T12', method='nearest', + tolerance=timedelta(1)) == 1 with tm.assert_raises_regex(ValueError, 'must be convertible'): idx.get_loc('2000-01-10', method='nearest', tolerance='foo') @@ -164,7 +163,7 @@ def test_repeat(self): res = idx.repeat(3) exp = PeriodIndex(idx.values.repeat(3), freq='D') tm.assert_index_equal(res, exp) - self.assertEqual(res.freqstr, 'D') + assert res.freqstr == 'D' def test_period_index_indexer(self): # GH4125 @@ -243,12 +242,12 @@ def test_shallow_copy_empty(self): def test_dtype_str(self): pi = pd.PeriodIndex([], freq='M') - self.assertEqual(pi.dtype_str, 'period[M]') - self.assertEqual(pi.dtype_str, str(pi.dtype)) + assert pi.dtype_str == 'period[M]' + assert pi.dtype_str == str(pi.dtype) pi = pd.PeriodIndex([], freq='3M') - self.assertEqual(pi.dtype_str, 'period[3M]') - self.assertEqual(pi.dtype_str, str(pi.dtype)) + assert pi.dtype_str == 'period[3M]' + assert pi.dtype_str == str(pi.dtype) def test_view_asi8(self): idx = pd.PeriodIndex([], freq='M') @@ -296,37 +295,37 @@ def test_values(self): def test_period_index_length(self): pi = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009') - self.assertEqual(len(pi), 9) + assert len(pi) == 9 pi = PeriodIndex(freq='Q', start='1/1/2001', end='12/1/2009') - self.assertEqual(len(pi), 4 * 9) + assert len(pi) == 4 * 9 pi = PeriodIndex(freq='M', start='1/1/2001', end='12/1/2009') - self.assertEqual(len(pi), 12 * 9) + assert len(pi) == 12 * 9 start = Period('02-Apr-2005', 'B') i1 = PeriodIndex(start=start, periods=20) - self.assertEqual(len(i1), 20) - self.assertEqual(i1.freq, start.freq) - self.assertEqual(i1[0], start) + assert len(i1) == 20 + assert i1.freq == start.freq + assert i1[0] == start end_intv = Period('2006-12-31', 'W') i1 = PeriodIndex(end=end_intv, periods=10) - self.assertEqual(len(i1), 10) - self.assertEqual(i1.freq, end_intv.freq) - self.assertEqual(i1[-1], end_intv) + assert len(i1) == 10 + assert i1.freq == end_intv.freq + assert i1[-1] == end_intv end_intv = Period('2006-12-31', '1w') i2 = PeriodIndex(end=end_intv, periods=10) - self.assertEqual(len(i1), len(i2)) + assert len(i1) == len(i2) assert (i1 == i2).all() - self.assertEqual(i1.freq, i2.freq) + assert i1.freq == i2.freq end_intv = Period('2006-12-31', ('w', 1)) i2 = PeriodIndex(end=end_intv, periods=10) - self.assertEqual(len(i1), len(i2)) + assert len(i1) == len(i2) assert (i1 == i2).all() - self.assertEqual(i1.freq, i2.freq) + assert i1.freq == i2.freq try: PeriodIndex(start=start, end=end_intv) @@ -346,12 +345,12 @@ def test_period_index_length(self): # infer freq from first element i2 = PeriodIndex([end_intv, Period('2005-05-05', 'B')]) - self.assertEqual(len(i2), 2) - self.assertEqual(i2[0], end_intv) + assert len(i2) == 2 + assert i2[0] == end_intv i2 = PeriodIndex(np.array([end_intv, Period('2005-05-05', 'B')])) - self.assertEqual(len(i2), 2) - self.assertEqual(i2[0], end_intv) + assert len(i2) == 2 + assert i2[0] == end_intv # Mixed freq should fail vals = [end_intv, Period('2006-12-31', 'w')] @@ -402,17 +401,17 @@ def _check_all_fields(self, periodindex): for field in fields: field_idx = getattr(periodindex, field) - self.assertEqual(len(periodindex), len(field_idx)) + assert len(periodindex) == len(field_idx) for x, val in zip(periods, field_idx): - self.assertEqual(getattr(x, field), val) + assert getattr(x, field) == val if len(s) == 0: continue field_s = getattr(s.dt, field) - self.assertEqual(len(periodindex), len(field_s)) + assert len(periodindex) == len(field_s) for x, val in zip(periods, field_s): - self.assertEqual(getattr(x, field), val) + assert getattr(x, field) == val def test_indexing(self): @@ -421,7 +420,7 @@ def test_indexing(self): s = Series(randn(10), index=index) expected = s[index[0]] result = s.iat[0] - self.assertEqual(expected, result) + assert expected == result def test_period_set_index_reindex(self): # GH 6631 @@ -486,20 +485,19 @@ def test_is_(self): create_index = lambda: PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009') index = create_index() - self.assertEqual(index.is_(index), True) - self.assertEqual(index.is_(create_index()), False) - self.assertEqual(index.is_(index.view()), True) - self.assertEqual( - index.is_(index.view().view().view().view().view()), True) - self.assertEqual(index.view().is_(index), True) + assert index.is_(index) + assert not index.is_(create_index()) + assert index.is_(index.view()) + assert index.is_(index.view().view().view().view().view()) + assert index.view().is_(index) ind2 = index.view() index.name = "Apple" - self.assertEqual(ind2.is_(index), True) - self.assertEqual(index.is_(index[:]), False) - self.assertEqual(index.is_(index.asfreq('M')), False) - self.assertEqual(index.is_(index.asfreq('A')), False) - self.assertEqual(index.is_(index - 2), False) - self.assertEqual(index.is_(index - 0), False) + assert ind2.is_(index) + assert not index.is_(index[:]) + assert not index.is_(index.asfreq('M')) + assert not index.is_(index.asfreq('A')) + assert not index.is_(index - 2) + assert not index.is_(index - 0) def test_comp_period(self): idx = period_range('2007-01', periods=20, freq='M') @@ -566,14 +564,14 @@ def test_index_unique(self): idx = PeriodIndex([2000, 2007, 2007, 2009, 2009], freq='A-JUN') expected = PeriodIndex([2000, 2007, 2009], freq='A-JUN') tm.assert_index_equal(idx.unique(), expected) - self.assertEqual(idx.nunique(), 3) + assert idx.nunique() == 3 idx = PeriodIndex([2000, 2007, 2007, 2009, 2007], freq='A-JUN', tz='US/Eastern') expected = PeriodIndex([2000, 2007, 2009], freq='A-JUN', tz='US/Eastern') tm.assert_index_equal(idx.unique(), expected) - self.assertEqual(idx.nunique(), 3) + assert idx.nunique() == 3 def test_shift_gh8083(self): @@ -591,32 +589,32 @@ def test_shift(self): tm.assert_index_equal(pi1.shift(0), pi1) - self.assertEqual(len(pi1), len(pi2)) + assert len(pi1) == len(pi2) tm.assert_index_equal(pi1.shift(1), pi2) pi1 = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009') pi2 = PeriodIndex(freq='A', start='1/1/2000', end='12/1/2008') - self.assertEqual(len(pi1), len(pi2)) + assert len(pi1) == len(pi2) tm.assert_index_equal(pi1.shift(-1), pi2) pi1 = PeriodIndex(freq='M', start='1/1/2001', end='12/1/2009') pi2 = PeriodIndex(freq='M', start='2/1/2001', end='1/1/2010') - self.assertEqual(len(pi1), len(pi2)) + assert len(pi1) == len(pi2) tm.assert_index_equal(pi1.shift(1), pi2) pi1 = PeriodIndex(freq='M', start='1/1/2001', end='12/1/2009') pi2 = PeriodIndex(freq='M', start='12/1/2000', end='11/1/2009') - self.assertEqual(len(pi1), len(pi2)) + assert len(pi1) == len(pi2) tm.assert_index_equal(pi1.shift(-1), pi2) pi1 = PeriodIndex(freq='D', start='1/1/2001', end='12/1/2009') pi2 = PeriodIndex(freq='D', start='1/2/2001', end='12/2/2009') - self.assertEqual(len(pi1), len(pi2)) + assert len(pi1) == len(pi2) tm.assert_index_equal(pi1.shift(1), pi2) pi1 = PeriodIndex(freq='D', start='1/1/2001', end='12/1/2009') pi2 = PeriodIndex(freq='D', start='12/31/2000', end='11/30/2009') - self.assertEqual(len(pi1), len(pi2)) + assert len(pi1) == len(pi2) tm.assert_index_equal(pi1.shift(-1), pi2) def test_shift_nat(self): @@ -626,7 +624,7 @@ def test_shift_nat(self): expected = PeriodIndex(['2011-02', '2011-03', 'NaT', '2011-05'], freq='M', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) + assert result.name == expected.name def test_ndarray_compat_properties(self): if compat.is_platform_32bit(): @@ -669,7 +667,7 @@ def test_pindex_qaccess(self): pi = PeriodIndex(['2Q05', '3Q05', '4Q05', '1Q06', '2Q06'], freq='Q') s = Series(np.random.rand(len(pi)), index=pi).cumsum() # Todo: fix these accessors! - self.assertEqual(s['05Q4'], s[2]) + assert s['05Q4'] == s[2] def test_numpy_repeat(self): index = period_range('20010101', periods=2) @@ -687,25 +685,25 @@ def test_pindex_multiples(self): expected = PeriodIndex(['2011-01', '2011-03', '2011-05', '2011-07', '2011-09', '2011-11'], freq='2M') tm.assert_index_equal(pi, expected) - self.assertEqual(pi.freq, offsets.MonthEnd(2)) - self.assertEqual(pi.freqstr, '2M') + assert pi.freq == offsets.MonthEnd(2) + assert pi.freqstr == '2M' pi = period_range(start='1/1/11', end='12/31/11', freq='2M') tm.assert_index_equal(pi, expected) - self.assertEqual(pi.freq, offsets.MonthEnd(2)) - self.assertEqual(pi.freqstr, '2M') + assert pi.freq == offsets.MonthEnd(2) + assert pi.freqstr == '2M' pi = period_range(start='1/1/11', periods=6, freq='2M') tm.assert_index_equal(pi, expected) - self.assertEqual(pi.freq, offsets.MonthEnd(2)) - self.assertEqual(pi.freqstr, '2M') + assert pi.freq == offsets.MonthEnd(2) + assert pi.freqstr == '2M' def test_iteration(self): index = PeriodIndex(start='1/1/10', periods=4, freq='B') result = list(index) assert isinstance(result[0], Period) - self.assertEqual(result[0].freq, index.freq) + assert result[0].freq == index.freq def test_is_full(self): index = PeriodIndex([2005, 2007, 2009], freq='A') @@ -757,14 +755,14 @@ def test_append_concat(self): # drops index result = pd.concat([s1, s2]) assert isinstance(result.index, PeriodIndex) - self.assertEqual(result.index[0], s1.index[0]) + assert result.index[0] == s1.index[0] def test_pickle_freq(self): # GH2891 prng = period_range('1/1/2011', '1/1/2012', freq='M') new_prng = tm.round_trip_pickle(prng) - self.assertEqual(new_prng.freq, offsets.MonthEnd()) - self.assertEqual(new_prng.freqstr, 'M') + assert new_prng.freq == offsets.MonthEnd() + assert new_prng.freqstr == 'M' def test_map(self): index = PeriodIndex([2005, 2007, 2009], freq='A') diff --git a/pandas/tests/indexes/period/test_setops.py b/pandas/tests/indexes/period/test_setops.py index e1fdc85d670d4..025ee7e732a7c 100644 --- a/pandas/tests/indexes/period/test_setops.py +++ b/pandas/tests/indexes/period/test_setops.py @@ -24,7 +24,7 @@ def test_joins(self): joined = index.join(index[:-5], how=kind) assert isinstance(joined, PeriodIndex) - self.assertEqual(joined.freq, index.freq) + assert joined.freq == index.freq def test_join_self(self): index = period_range('1/1/2000', '1/20/2000', freq='D') @@ -172,8 +172,8 @@ def test_intersection_cases(self): (rng4, expected4)]: result = base.intersection(rng) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) + assert result.name == expected.name + assert result.freq == expected.freq # non-monotonic base = PeriodIndex(['2011-01-05', '2011-01-04', '2011-01-02', @@ -198,16 +198,16 @@ def test_intersection_cases(self): (rng4, expected4)]: result = base.intersection(rng) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, 'D') + assert result.name == expected.name + assert result.freq == 'D' # empty same freq rng = date_range('6/1/2000', '6/15/2000', freq='T') result = rng[0:0].intersection(rng) - self.assertEqual(len(result), 0) + assert len(result) == 0 result = rng.intersection(rng[0:0]) - self.assertEqual(len(result), 0) + assert len(result) == 0 def test_difference(self): # diff diff --git a/pandas/tests/indexes/period/test_tools.py b/pandas/tests/indexes/period/test_tools.py index 60ad8fed32399..9e5994dd54f50 100644 --- a/pandas/tests/indexes/period/test_tools.py +++ b/pandas/tests/indexes/period/test_tools.py @@ -65,7 +65,7 @@ def test_negone_ordinals(self): for freq in freqs: period = Period(ordinal=-1, freq=freq) repr(period) - self.assertEqual(period.year, 1969) + assert period.year == 1969 period = Period(ordinal=-1, freq='B') repr(period) @@ -75,97 +75,79 @@ def test_negone_ordinals(self): class TestTslib(tm.TestCase): def test_intraday_conversion_factors(self): - self.assertEqual(period_asfreq( - 1, get_freq('D'), get_freq('H'), False), 24) - self.assertEqual(period_asfreq( - 1, get_freq('D'), get_freq('T'), False), 1440) - self.assertEqual(period_asfreq( - 1, get_freq('D'), get_freq('S'), False), 86400) - self.assertEqual(period_asfreq(1, get_freq( - 'D'), get_freq('L'), False), 86400000) - self.assertEqual(period_asfreq(1, get_freq( - 'D'), get_freq('U'), False), 86400000000) - self.assertEqual(period_asfreq(1, get_freq( - 'D'), get_freq('N'), False), 86400000000000) - - self.assertEqual(period_asfreq( - 1, get_freq('H'), get_freq('T'), False), 60) - self.assertEqual(period_asfreq( - 1, get_freq('H'), get_freq('S'), False), 3600) - self.assertEqual(period_asfreq(1, get_freq('H'), - get_freq('L'), False), 3600000) - self.assertEqual(period_asfreq(1, get_freq( - 'H'), get_freq('U'), False), 3600000000) - self.assertEqual(period_asfreq(1, get_freq( - 'H'), get_freq('N'), False), 3600000000000) - - self.assertEqual(period_asfreq( - 1, get_freq('T'), get_freq('S'), False), 60) - self.assertEqual(period_asfreq( - 1, get_freq('T'), get_freq('L'), False), 60000) - self.assertEqual(period_asfreq(1, get_freq( - 'T'), get_freq('U'), False), 60000000) - self.assertEqual(period_asfreq(1, get_freq( - 'T'), get_freq('N'), False), 60000000000) - - self.assertEqual(period_asfreq( - 1, get_freq('S'), get_freq('L'), False), 1000) - self.assertEqual(period_asfreq(1, get_freq('S'), - get_freq('U'), False), 1000000) - self.assertEqual(period_asfreq(1, get_freq( - 'S'), get_freq('N'), False), 1000000000) - - self.assertEqual(period_asfreq( - 1, get_freq('L'), get_freq('U'), False), 1000) - self.assertEqual(period_asfreq(1, get_freq('L'), - get_freq('N'), False), 1000000) - - self.assertEqual(period_asfreq( - 1, get_freq('U'), get_freq('N'), False), 1000) + assert period_asfreq(1, get_freq('D'), get_freq('H'), False) == 24 + assert period_asfreq(1, get_freq('D'), get_freq('T'), False) == 1440 + assert period_asfreq(1, get_freq('D'), get_freq('S'), False) == 86400 + assert period_asfreq(1, get_freq('D'), + get_freq('L'), False) == 86400000 + assert period_asfreq(1, get_freq('D'), + get_freq('U'), False) == 86400000000 + assert period_asfreq(1, get_freq('D'), + get_freq('N'), False) == 86400000000000 + + assert period_asfreq(1, get_freq('H'), get_freq('T'), False) == 60 + assert period_asfreq(1, get_freq('H'), get_freq('S'), False) == 3600 + assert period_asfreq(1, get_freq('H'), + get_freq('L'), False) == 3600000 + assert period_asfreq(1, get_freq('H'), + get_freq('U'), False) == 3600000000 + assert period_asfreq(1, get_freq('H'), + get_freq('N'), False) == 3600000000000 + + assert period_asfreq(1, get_freq('T'), get_freq('S'), False) == 60 + assert period_asfreq(1, get_freq('T'), get_freq('L'), False) == 60000 + assert period_asfreq(1, get_freq('T'), + get_freq('U'), False) == 60000000 + assert period_asfreq(1, get_freq('T'), + get_freq('N'), False) == 60000000000 + + assert period_asfreq(1, get_freq('S'), get_freq('L'), False) == 1000 + assert period_asfreq(1, get_freq('S'), + get_freq('U'), False) == 1000000 + assert period_asfreq(1, get_freq('S'), + get_freq('N'), False) == 1000000000 + + assert period_asfreq(1, get_freq('L'), get_freq('U'), False) == 1000 + assert period_asfreq(1, get_freq('L'), + get_freq('N'), False) == 1000000 + + assert period_asfreq(1, get_freq('U'), get_freq('N'), False) == 1000 def test_period_ordinal_start_values(self): # information for 1.1.1970 - self.assertEqual(0, period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, - get_freq('A'))) - self.assertEqual(0, period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, - get_freq('M'))) - self.assertEqual(1, period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, - get_freq('W'))) - self.assertEqual(0, period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, - get_freq('D'))) - self.assertEqual(0, period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, - get_freq('B'))) + assert period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, get_freq('A')) == 0 + assert period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, get_freq('M')) == 0 + assert period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, get_freq('W')) == 1 + assert period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, get_freq('D')) == 0 + assert period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, get_freq('B')) == 0 def test_period_ordinal_week(self): - self.assertEqual(1, period_ordinal(1970, 1, 4, 0, 0, 0, 0, 0, - get_freq('W'))) - self.assertEqual(2, period_ordinal(1970, 1, 5, 0, 0, 0, 0, 0, - get_freq('W'))) - - self.assertEqual(2284, period_ordinal(2013, 10, 6, 0, 0, 0, 0, 0, - get_freq('W'))) - self.assertEqual(2285, period_ordinal(2013, 10, 7, 0, 0, 0, 0, 0, - get_freq('W'))) + assert period_ordinal(1970, 1, 4, 0, 0, 0, 0, 0, get_freq('W')) == 1 + assert period_ordinal(1970, 1, 5, 0, 0, 0, 0, 0, get_freq('W')) == 2 + assert period_ordinal(2013, 10, 6, 0, + 0, 0, 0, 0, get_freq('W')) == 2284 + assert period_ordinal(2013, 10, 7, 0, + 0, 0, 0, 0, get_freq('W')) == 2285 def test_period_ordinal_business_day(self): # Thursday - self.assertEqual(11415, period_ordinal(2013, 10, 3, 0, 0, 0, 0, 0, - get_freq('B'))) + assert period_ordinal(2013, 10, 3, 0, + 0, 0, 0, 0, get_freq('B')) == 11415 # Friday - self.assertEqual(11416, period_ordinal(2013, 10, 4, 0, 0, 0, 0, 0, - get_freq('B'))) + assert period_ordinal(2013, 10, 4, 0, + 0, 0, 0, 0, get_freq('B')) == 11416 # Saturday - self.assertEqual(11417, period_ordinal(2013, 10, 5, 0, 0, 0, 0, 0, - get_freq('B'))) + assert period_ordinal(2013, 10, 5, 0, + 0, 0, 0, 0, get_freq('B')) == 11417 # Sunday - self.assertEqual(11417, period_ordinal(2013, 10, 6, 0, 0, 0, 0, 0, - get_freq('B'))) + assert period_ordinal(2013, 10, 6, 0, + 0, 0, 0, 0, get_freq('B')) == 11417 # Monday - self.assertEqual(11417, period_ordinal(2013, 10, 7, 0, 0, 0, 0, 0, - get_freq('B'))) + assert period_ordinal(2013, 10, 7, 0, + 0, 0, 0, 0, get_freq('B')) == 11417 # Tuesday - self.assertEqual(11418, period_ordinal(2013, 10, 8, 0, 0, 0, 0, 0, - get_freq('B'))) + assert period_ordinal(2013, 10, 8, 0, + 0, 0, 0, 0, get_freq('B')) == 11418 class TestPeriodIndex(tm.TestCase): @@ -189,7 +171,7 @@ def test_to_timestamp(self): exp_index = date_range('1/1/2001', end='12/31/2009', freq='A-DEC') result = series.to_timestamp(how='end') tm.assert_index_equal(result.index, exp_index) - self.assertEqual(result.name, 'foo') + assert result.name == 'foo' exp_index = date_range('1/1/2001', end='1/1/2009', freq='AS-JAN') result = series.to_timestamp(how='start') @@ -221,7 +203,7 @@ def _get_with_delta(delta, freq='A-DEC'): freq='H') result = series.to_timestamp(how='end') tm.assert_index_equal(result.index, exp_index) - self.assertEqual(result.name, 'foo') + assert result.name == 'foo' def test_to_timestamp_quarterly_bug(self): years = np.arange(1960, 2000).repeat(4) @@ -236,10 +218,10 @@ def test_to_timestamp_quarterly_bug(self): def test_to_timestamp_preserve_name(self): index = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009', name='foo') - self.assertEqual(index.name, 'foo') + assert index.name == 'foo' conv = index.to_timestamp('D') - self.assertEqual(conv.name, 'foo') + assert conv.name == 'foo' def test_to_timestamp_repr_is_code(self): zs = [Timestamp('99-04-17 00:00:00', tz='UTC'), @@ -247,7 +229,7 @@ def test_to_timestamp_repr_is_code(self): Timestamp('2001-04-17 00:00:00', tz='America/Los_Angeles'), Timestamp('2001-04-17 00:00:00', tz=None)] for z in zs: - self.assertEqual(eval(repr(z)), z) + assert eval(repr(z)) == z def test_to_timestamp_pi_nat(self): # GH 7228 @@ -258,16 +240,16 @@ def test_to_timestamp_pi_nat(self): expected = DatetimeIndex([pd.NaT, datetime(2011, 1, 1), datetime(2011, 2, 1)], name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.name, 'idx') + assert result.name == 'idx' result2 = result.to_period(freq='M') tm.assert_index_equal(result2, index) - self.assertEqual(result2.name, 'idx') + assert result2.name == 'idx' result3 = result.to_period(freq='3M') exp = PeriodIndex(['NaT', '2011-01', '2011-02'], freq='3M', name='idx') tm.assert_index_equal(result3, exp) - self.assertEqual(result3.freqstr, '3M') + assert result3.freqstr == '3M' msg = ('Frequency must be positive, because it' ' represents span: -2A') @@ -317,13 +299,13 @@ def test_dti_to_period(self): pi2 = dti.to_period(freq='D') pi3 = dti.to_period(freq='3D') - self.assertEqual(pi1[0], Period('Jan 2005', freq='M')) - self.assertEqual(pi2[0], Period('1/31/2005', freq='D')) - self.assertEqual(pi3[0], Period('1/31/2005', freq='3D')) + assert pi1[0] == Period('Jan 2005', freq='M') + assert pi2[0] == Period('1/31/2005', freq='D') + assert pi3[0] == Period('1/31/2005', freq='3D') - self.assertEqual(pi1[-1], Period('Nov 2005', freq='M')) - self.assertEqual(pi2[-1], Period('11/30/2005', freq='D')) - self.assertEqual(pi3[-1], Period('11/30/2005', freq='3D')) + assert pi1[-1] == Period('Nov 2005', freq='M') + assert pi2[-1] == Period('11/30/2005', freq='D') + assert pi3[-1], Period('11/30/2005', freq='3D') tm.assert_index_equal(pi1, period_range('1/1/2005', '11/1/2005', freq='M')) @@ -365,25 +347,25 @@ def test_to_period_quarterlyish(self): for off in offsets: rng = date_range('01-Jan-2012', periods=8, freq=off) prng = rng.to_period() - self.assertEqual(prng.freq, 'Q-DEC') + assert prng.freq == 'Q-DEC' def test_to_period_annualish(self): offsets = ['BA', 'AS', 'BAS'] for off in offsets: rng = date_range('01-Jan-2012', periods=8, freq=off) prng = rng.to_period() - self.assertEqual(prng.freq, 'A-DEC') + assert prng.freq == 'A-DEC' def test_to_period_monthish(self): offsets = ['MS', 'BM'] for off in offsets: rng = date_range('01-Jan-2012', periods=8, freq=off) prng = rng.to_period() - self.assertEqual(prng.freq, 'M') + assert prng.freq == 'M' rng = date_range('01-Jan-2012', periods=8, freq='M') prng = rng.to_period() - self.assertEqual(prng.freq, 'M') + assert prng.freq == 'M' msg = pd.tseries.frequencies._INVALID_FREQ_ERROR with tm.assert_raises_regex(ValueError, msg): @@ -402,7 +384,7 @@ def test_to_timestamp_1703(self): index = period_range('1/1/2012', periods=4, freq='D') result = index.to_timestamp() - self.assertEqual(result[0], Timestamp('1/1/2012')) + assert result[0] == Timestamp('1/1/2012') def test_to_datetime_depr(self): index = period_range('1/1/2012', periods=4, freq='D') @@ -410,7 +392,7 @@ def test_to_datetime_depr(self): with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = index.to_datetime() - self.assertEqual(result[0], Timestamp('1/1/2012')) + assert result[0] == Timestamp('1/1/2012') def test_combine_first(self): # GH 3367 @@ -433,10 +415,10 @@ def test_searchsorted(self): '2014-01-04', '2014-01-05'], freq=freq) p1 = pd.Period('2014-01-01', freq=freq) - self.assertEqual(pidx.searchsorted(p1), 0) + assert pidx.searchsorted(p1) == 0 p2 = pd.Period('2014-01-04', freq=freq) - self.assertEqual(pidx.searchsorted(p2), 3) + assert pidx.searchsorted(p2) == 3 msg = "Input has different freq=H from PeriodIndex" with tm.assert_raises_regex( diff --git a/pandas/tests/indexes/test_base.py b/pandas/tests/indexes/test_base.py index 8ac1ef3e1911b..23c72e511d2b3 100644 --- a/pandas/tests/indexes/test_base.py +++ b/pandas/tests/indexes/test_base.py @@ -54,14 +54,14 @@ def create_index(self): def test_new_axis(self): new_index = self.dateIndex[None, :] - self.assertEqual(new_index.ndim, 2) + assert new_index.ndim == 2 assert isinstance(new_index, np.ndarray) def test_copy_and_deepcopy(self): super(TestIndex, self).test_copy_and_deepcopy() new_copy2 = self.intIndex.copy(dtype=int) - self.assertEqual(new_copy2.dtype.kind, 'i') + assert new_copy2.dtype.kind == 'i' def test_constructor(self): # regular instance creation @@ -78,7 +78,7 @@ def test_constructor(self): arr = np.array(self.strIndex) index = Index(arr, copy=True, name='name') assert isinstance(index, Index) - self.assertEqual(index.name, 'name') + assert index.name == 'name' tm.assert_numpy_array_equal(arr, index.values) arr[0] = "SOMEBIGLONGSTRING" self.assertNotEqual(index[0], "SOMEBIGLONGSTRING") @@ -107,11 +107,11 @@ def test_constructor_from_index_datetimetz(self): tz='US/Eastern') result = pd.Index(idx) tm.assert_index_equal(result, idx) - self.assertEqual(result.tz, idx.tz) + assert result.tz == idx.tz result = pd.Index(idx.asobject) tm.assert_index_equal(result, idx) - self.assertEqual(result.tz, idx.tz) + assert result.tz == idx.tz def test_constructor_from_index_timedelta(self): idx = pd.timedelta_range('1 days', freq='D', periods=3) @@ -134,7 +134,7 @@ def test_constructor_from_series_datetimetz(self): tz='US/Eastern') result = pd.Index(pd.Series(idx)) tm.assert_index_equal(result, idx) - self.assertEqual(result.tz, idx.tz) + assert result.tz == idx.tz def test_constructor_from_series_timedelta(self): idx = pd.timedelta_range('1 days', freq='D', periods=3) @@ -172,7 +172,7 @@ def test_constructor_from_series(self): result = DatetimeIndex(df['date'], freq='MS') expected.name = 'date' tm.assert_index_equal(result, expected) - self.assertEqual(df['date'].dtype, object) + assert df['date'].dtype == object exp = pd.Series(['1-1-1990', '2-1-1990', '3-1-1990', '4-1-1990', '5-1-1990'], name='date') @@ -181,7 +181,7 @@ def test_constructor_from_series(self): # GH 6274 # infer freq of same result = pd.infer_freq(df['date']) - self.assertEqual(result, 'MS') + assert result == 'MS' def test_constructor_ndarray_like(self): # GH 5460#issuecomment-44474502 @@ -221,17 +221,17 @@ def test_constructor_int_dtype_nan(self): def test_index_ctor_infer_nan_nat(self): # GH 13467 exp = pd.Float64Index([np.nan, np.nan]) - self.assertEqual(exp.dtype, np.float64) + assert exp.dtype == np.float64 tm.assert_index_equal(Index([np.nan, np.nan]), exp) tm.assert_index_equal(Index(np.array([np.nan, np.nan])), exp) exp = pd.DatetimeIndex([pd.NaT, pd.NaT]) - self.assertEqual(exp.dtype, 'datetime64[ns]') + assert exp.dtype == 'datetime64[ns]' tm.assert_index_equal(Index([pd.NaT, pd.NaT]), exp) tm.assert_index_equal(Index(np.array([pd.NaT, pd.NaT])), exp) exp = pd.DatetimeIndex([pd.NaT, pd.NaT]) - self.assertEqual(exp.dtype, 'datetime64[ns]') + assert exp.dtype == 'datetime64[ns]' for data in [[pd.NaT, np.nan], [np.nan, pd.NaT], [np.nan, np.datetime64('nat')], @@ -240,7 +240,7 @@ def test_index_ctor_infer_nan_nat(self): tm.assert_index_equal(Index(np.array(data, dtype=object)), exp) exp = pd.TimedeltaIndex([pd.NaT, pd.NaT]) - self.assertEqual(exp.dtype, 'timedelta64[ns]') + assert exp.dtype == 'timedelta64[ns]' for data in [[np.nan, np.timedelta64('nat')], [np.timedelta64('nat'), np.nan], @@ -407,7 +407,7 @@ def test_astype(self): # pass on name self.intIndex.name = 'foobar' casted = self.intIndex.astype('i8') - self.assertEqual(casted.name, 'foobar') + assert casted.name == 'foobar' def test_equals_object(self): # same @@ -449,12 +449,12 @@ def test_delete(self): expected = Index(['b', 'c', 'd'], name='idx') result = idx.delete(0) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) + assert result.name == expected.name expected = Index(['a', 'b', 'c'], name='idx') result = idx.delete(-1) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) + assert result.name == expected.name with pytest.raises((IndexError, ValueError)): # either depending on numpy version @@ -505,11 +505,11 @@ def test_is_(self): def test_asof(self): d = self.dateIndex[0] - self.assertEqual(self.dateIndex.asof(d), d) + assert self.dateIndex.asof(d) == d assert isnull(self.dateIndex.asof(d - timedelta(1))) d = self.dateIndex[-1] - self.assertEqual(self.dateIndex.asof(d + timedelta(1)), d) + assert self.dateIndex.asof(d + timedelta(1)) == d d = self.dateIndex[0].to_pydatetime() assert isinstance(self.dateIndex.asof(d), Timestamp) @@ -518,7 +518,7 @@ def test_asof_datetime_partial(self): idx = pd.date_range('2010-01-01', periods=2, freq='m') expected = Timestamp('2010-02-28') result = idx.asof('2010-02') - self.assertEqual(result, expected) + assert result == expected assert not isinstance(result, Index) def test_nanosecond_index_access(self): @@ -529,12 +529,11 @@ def test_nanosecond_index_access(self): first_value = x.asof(x.index[0]) # this does not yet work, as parsing strings is done via dateutil - # self.assertEqual(first_value, - # x['2013-01-01 00:00:00.000000050+0000']) + # assert first_value == x['2013-01-01 00:00:00.000000050+0000'] exp_ts = np_datetime64_compat('2013-01-01 00:00:00.000000050+0000', 'ns') - self.assertEqual(first_value, x[Timestamp(exp_ts)]) + assert first_value == x[Timestamp(exp_ts)] def test_comparators(self): index = self.dateIndex @@ -564,16 +563,16 @@ def test_booleanindex(self): subIndex = self.strIndex[boolIdx] for i, val in enumerate(subIndex): - self.assertEqual(subIndex.get_loc(val), i) + assert subIndex.get_loc(val) == i subIndex = self.strIndex[list(boolIdx)] for i, val in enumerate(subIndex): - self.assertEqual(subIndex.get_loc(val), i) + assert subIndex.get_loc(val) == i def test_fancy(self): sl = self.strIndex[[1, 2, 3]] for i in sl: - self.assertEqual(i, sl[sl.get_loc(i)]) + assert i == sl[sl.get_loc(i)] def test_empty_fancy(self): empty_farr = np.array([], dtype=np.float_) @@ -598,7 +597,7 @@ def test_getitem(self): exp = self.dateIndex[5] exp = _to_m8(exp) - self.assertEqual(exp, arr[5]) + assert exp == arr[5] def test_intersection(self): first = self.strIndex[:20] @@ -616,14 +615,14 @@ def test_intersection(self): expected2 = Index([3, 4, 5], name='idx') result2 = idx1.intersection(idx2) tm.assert_index_equal(result2, expected2) - self.assertEqual(result2.name, expected2.name) + assert result2.name == expected2.name # if target name is different, it will be reset idx3 = Index([3, 4, 5, 6, 7], name='other') expected3 = Index([3, 4, 5], name=None) result3 = idx1.intersection(idx3) tm.assert_index_equal(result3, expected3) - self.assertEqual(result3.name, expected3.name) + assert result3.name == expected3.name # non monotonic idx1 = Index([5, 3, 2, 4, 1], name='idx') @@ -655,7 +654,7 @@ def test_intersection(self): first.name = 'A' second.name = 'A' intersect = first.intersection(second) - self.assertEqual(intersect.name, 'A') + assert intersect.name == 'A' second.name = 'B' intersect = first.intersection(second) @@ -838,7 +837,7 @@ def test_append_empty_preserve_name(self): right = Index([1, 2, 3], name='foo') result = left.append(right) - self.assertEqual(result.name, 'foo') + assert result.name == 'foo' left = Index([], name='foo') right = Index([1, 2, 3], name='bar') @@ -872,22 +871,22 @@ def test_difference(self): result = first.difference(second) assert tm.equalContents(result, answer) - self.assertEqual(result.name, None) + assert result.name is None # same names second.name = 'name' result = first.difference(second) - self.assertEqual(result.name, 'name') + assert result.name == 'name' # with empty result = first.difference([]) assert tm.equalContents(result, first) - self.assertEqual(result.name, first.name) + assert result.name == first.name - # with everythin + # with everything result = first.difference(first) - self.assertEqual(len(result), 0) - self.assertEqual(result.name, first.name) + assert len(result) == 0 + assert result.name == first.name def test_symmetric_difference(self): # smoke @@ -931,11 +930,11 @@ def test_symmetric_difference(self): expected = Index([1, 5]) result = idx1.symmetric_difference(idx2) assert tm.equalContents(result, expected) - self.assertEqual(result.name, 'idx1') + assert result.name == 'idx1' result = idx1.symmetric_difference(idx2, result_name='new_name') assert tm.equalContents(result, expected) - self.assertEqual(result.name, 'new_name') + assert result.name == 'new_name' def test_is_numeric(self): assert not self.dateIndex.is_numeric() @@ -978,19 +977,19 @@ def test_format(self): index = Index([now]) formatted = index.format() expected = [str(index[0])] - self.assertEqual(formatted, expected) + assert formatted == expected # 2845 index = Index([1, 2.0 + 3.0j, np.nan]) formatted = index.format() expected = [str(index[0]), str(index[1]), u('NaN')] - self.assertEqual(formatted, expected) + assert formatted == expected # is this really allowed? index = Index([1, 2.0 + 3.0j, None]) formatted = index.format() expected = [str(index[0]), str(index[1]), u('NaN')] - self.assertEqual(formatted, expected) + assert formatted == expected self.strIndex[:0].format() @@ -1000,15 +999,15 @@ def test_format_with_name_time_info(self): dates = Index([dt + inc for dt in self.dateIndex], name='something') formatted = dates.format(name=True) - self.assertEqual(formatted[0], 'something') + assert formatted[0] == 'something' def test_format_datetime_with_time(self): t = Index([datetime(2012, 2, 7), datetime(2012, 2, 7, 23)]) result = t.format() expected = ['2012-02-07 00:00:00', '2012-02-07 23:00:00'] - self.assertEqual(len(result), 2) - self.assertEqual(result, expected) + assert len(result) == 2 + assert result == expected def test_format_none(self): values = ['a', 'b', 'c', None] @@ -1019,8 +1018,8 @@ def test_format_none(self): def test_logical_compat(self): idx = self.create_index() - self.assertEqual(idx.all(), idx.values.all()) - self.assertEqual(idx.any(), idx.values.any()) + assert idx.all() == idx.values.all() + assert idx.any() == idx.values.any() def _check_method_works(self, method): method(self.empty) @@ -1138,17 +1137,17 @@ def test_get_loc(self): idx = pd.Index([0, 1, 2]) all_methods = [None, 'pad', 'backfill', 'nearest'] for method in all_methods: - self.assertEqual(idx.get_loc(1, method=method), 1) + assert idx.get_loc(1, method=method) == 1 if method is not None: - self.assertEqual(idx.get_loc(1, method=method, tolerance=0), 1) + assert idx.get_loc(1, method=method, tolerance=0) == 1 with pytest.raises(TypeError): idx.get_loc([1, 2], method=method) for method, loc in [('pad', 1), ('backfill', 2), ('nearest', 1)]: - self.assertEqual(idx.get_loc(1.1, method), loc) + assert idx.get_loc(1.1, method) == loc for method, loc in [('pad', 1), ('backfill', 2), ('nearest', 1)]: - self.assertEqual(idx.get_loc(1.1, method, tolerance=1), loc) + assert idx.get_loc(1.1, method, tolerance=1) == loc for method in ['pad', 'backfill', 'nearest']: with pytest.raises(KeyError): @@ -1170,26 +1169,26 @@ def test_slice_locs(self): idx = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=dtype)) n = len(idx) - self.assertEqual(idx.slice_locs(start=2), (2, n)) - self.assertEqual(idx.slice_locs(start=3), (3, n)) - self.assertEqual(idx.slice_locs(3, 8), (3, 6)) - self.assertEqual(idx.slice_locs(5, 10), (3, n)) - self.assertEqual(idx.slice_locs(end=8), (0, 6)) - self.assertEqual(idx.slice_locs(end=9), (0, 7)) + assert idx.slice_locs(start=2) == (2, n) + assert idx.slice_locs(start=3) == (3, n) + assert idx.slice_locs(3, 8) == (3, 6) + assert idx.slice_locs(5, 10) == (3, n) + assert idx.slice_locs(end=8) == (0, 6) + assert idx.slice_locs(end=9) == (0, 7) # reversed idx2 = idx[::-1] - self.assertEqual(idx2.slice_locs(8, 2), (2, 6)) - self.assertEqual(idx2.slice_locs(7, 3), (2, 5)) + assert idx2.slice_locs(8, 2) == (2, 6) + assert idx2.slice_locs(7, 3) == (2, 5) # float slicing idx = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=float)) n = len(idx) - self.assertEqual(idx.slice_locs(5.0, 10.0), (3, n)) - self.assertEqual(idx.slice_locs(4.5, 10.5), (3, 8)) + assert idx.slice_locs(5.0, 10.0) == (3, n) + assert idx.slice_locs(4.5, 10.5) == (3, 8) idx2 = idx[::-1] - self.assertEqual(idx2.slice_locs(8.5, 1.5), (2, 6)) - self.assertEqual(idx2.slice_locs(10.5, -1), (0, n)) + assert idx2.slice_locs(8.5, 1.5) == (2, 6) + assert idx2.slice_locs(10.5, -1) == (0, n) # int slicing with floats # GH 4892, these are all TypeErrors @@ -1206,35 +1205,35 @@ def test_slice_locs(self): def test_slice_locs_dup(self): idx = Index(['a', 'a', 'b', 'c', 'd', 'd']) - self.assertEqual(idx.slice_locs('a', 'd'), (0, 6)) - self.assertEqual(idx.slice_locs(end='d'), (0, 6)) - self.assertEqual(idx.slice_locs('a', 'c'), (0, 4)) - self.assertEqual(idx.slice_locs('b', 'd'), (2, 6)) + assert idx.slice_locs('a', 'd') == (0, 6) + assert idx.slice_locs(end='d') == (0, 6) + assert idx.slice_locs('a', 'c') == (0, 4) + assert idx.slice_locs('b', 'd') == (2, 6) idx2 = idx[::-1] - self.assertEqual(idx2.slice_locs('d', 'a'), (0, 6)) - self.assertEqual(idx2.slice_locs(end='a'), (0, 6)) - self.assertEqual(idx2.slice_locs('d', 'b'), (0, 4)) - self.assertEqual(idx2.slice_locs('c', 'a'), (2, 6)) + assert idx2.slice_locs('d', 'a') == (0, 6) + assert idx2.slice_locs(end='a') == (0, 6) + assert idx2.slice_locs('d', 'b') == (0, 4) + assert idx2.slice_locs('c', 'a') == (2, 6) for dtype in [int, float]: idx = Index(np.array([10, 12, 12, 14], dtype=dtype)) - self.assertEqual(idx.slice_locs(12, 12), (1, 3)) - self.assertEqual(idx.slice_locs(11, 13), (1, 3)) + assert idx.slice_locs(12, 12) == (1, 3) + assert idx.slice_locs(11, 13) == (1, 3) idx2 = idx[::-1] - self.assertEqual(idx2.slice_locs(12, 12), (1, 3)) - self.assertEqual(idx2.slice_locs(13, 11), (1, 3)) + assert idx2.slice_locs(12, 12) == (1, 3) + assert idx2.slice_locs(13, 11) == (1, 3) def test_slice_locs_na(self): idx = Index([np.nan, 1, 2]) pytest.raises(KeyError, idx.slice_locs, start=1.5) pytest.raises(KeyError, idx.slice_locs, end=1.5) - self.assertEqual(idx.slice_locs(1), (1, 3)) - self.assertEqual(idx.slice_locs(np.nan), (0, 3)) + assert idx.slice_locs(1) == (1, 3) + assert idx.slice_locs(np.nan) == (0, 3) idx = Index([0, np.nan, np.nan, 1, 2]) - self.assertEqual(idx.slice_locs(np.nan), (1, 5)) + assert idx.slice_locs(np.nan) == (1, 5) def test_slice_locs_negative_step(self): idx = Index(list('bcdxy')) @@ -1320,13 +1319,13 @@ def test_tuple_union_bug(self): int_idx = idx1.intersection(idx2) # needs to be 1d like idx1 and idx2 expected = idx1[:4] # pandas.Index(sorted(set(idx1) & set(idx2))) - self.assertEqual(int_idx.ndim, 1) + assert int_idx.ndim == 1 tm.assert_index_equal(int_idx, expected) # union broken union_idx = idx1.union(idx2) expected = idx2 - self.assertEqual(union_idx.ndim, 1) + assert union_idx.ndim == 1 tm.assert_index_equal(union_idx, expected) def test_is_monotonic_incomparable(self): @@ -1341,7 +1340,7 @@ def test_get_set_value(self): assert_almost_equal(self.dateIndex.get_value(values, date), values[67]) self.dateIndex.set_value(values, date, 10) - self.assertEqual(values[67], 10) + assert values[67] == 10 def test_isin(self): values = ['foo', 'bar', 'quux'] @@ -1358,8 +1357,8 @@ def test_isin(self): # empty, return dtype bool idx = Index([]) result = idx.isin(values) - self.assertEqual(len(result), 0) - self.assertEqual(result.dtype, np.bool_) + assert len(result) == 0 + assert result.dtype == np.bool_ def test_isin_nan(self): tm.assert_numpy_array_equal(Index(['a', np.nan]).isin([np.nan]), @@ -1423,7 +1422,7 @@ def test_get_level_values(self): def test_slice_keep_name(self): idx = Index(['a', 'b'], name='asdf') - self.assertEqual(idx.name, idx[1:].name) + assert idx.name == idx[1:].name def test_join_self(self): # instance attributes of the form self.Index @@ -1546,28 +1545,28 @@ def test_reindex_preserves_name_if_target_is_list_or_ndarray(self): dt_idx = pd.date_range('20130101', periods=3) idx.name = None - self.assertEqual(idx.reindex([])[0].name, None) - self.assertEqual(idx.reindex(np.array([]))[0].name, None) - self.assertEqual(idx.reindex(idx.tolist())[0].name, None) - self.assertEqual(idx.reindex(idx.tolist()[:-1])[0].name, None) - self.assertEqual(idx.reindex(idx.values)[0].name, None) - self.assertEqual(idx.reindex(idx.values[:-1])[0].name, None) + assert idx.reindex([])[0].name is None + assert idx.reindex(np.array([]))[0].name is None + assert idx.reindex(idx.tolist())[0].name is None + assert idx.reindex(idx.tolist()[:-1])[0].name is None + assert idx.reindex(idx.values)[0].name is None + assert idx.reindex(idx.values[:-1])[0].name is None # Must preserve name even if dtype changes. - self.assertEqual(idx.reindex(dt_idx.values)[0].name, None) - self.assertEqual(idx.reindex(dt_idx.tolist())[0].name, None) + assert idx.reindex(dt_idx.values)[0].name is None + assert idx.reindex(dt_idx.tolist())[0].name is None idx.name = 'foobar' - self.assertEqual(idx.reindex([])[0].name, 'foobar') - self.assertEqual(idx.reindex(np.array([]))[0].name, 'foobar') - self.assertEqual(idx.reindex(idx.tolist())[0].name, 'foobar') - self.assertEqual(idx.reindex(idx.tolist()[:-1])[0].name, 'foobar') - self.assertEqual(idx.reindex(idx.values)[0].name, 'foobar') - self.assertEqual(idx.reindex(idx.values[:-1])[0].name, 'foobar') + assert idx.reindex([])[0].name == 'foobar' + assert idx.reindex(np.array([]))[0].name == 'foobar' + assert idx.reindex(idx.tolist())[0].name == 'foobar' + assert idx.reindex(idx.tolist()[:-1])[0].name == 'foobar' + assert idx.reindex(idx.values)[0].name == 'foobar' + assert idx.reindex(idx.values[:-1])[0].name == 'foobar' # Must preserve name even if dtype changes. - self.assertEqual(idx.reindex(dt_idx.values)[0].name, 'foobar') - self.assertEqual(idx.reindex(dt_idx.tolist())[0].name, 'foobar') + assert idx.reindex(dt_idx.values)[0].name == 'foobar' + assert idx.reindex(dt_idx.tolist())[0].name == 'foobar' def test_reindex_preserves_type_if_target_is_empty_list_or_array(self): # GH7774 @@ -1576,10 +1575,9 @@ def test_reindex_preserves_type_if_target_is_empty_list_or_array(self): def get_reindex_type(target): return idx.reindex(target)[0].dtype.type - self.assertEqual(get_reindex_type([]), np.object_) - self.assertEqual(get_reindex_type(np.array([])), np.object_) - self.assertEqual(get_reindex_type(np.array([], dtype=np.int64)), - np.object_) + assert get_reindex_type([]) == np.object_ + assert get_reindex_type(np.array([])) == np.object_ + assert get_reindex_type(np.array([], dtype=np.int64)) == np.object_ def test_reindex_doesnt_preserve_type_if_target_is_empty_index(self): # GH7774 @@ -1588,14 +1586,14 @@ def test_reindex_doesnt_preserve_type_if_target_is_empty_index(self): def get_reindex_type(target): return idx.reindex(target)[0].dtype.type - self.assertEqual(get_reindex_type(pd.Int64Index([])), np.int64) - self.assertEqual(get_reindex_type(pd.Float64Index([])), np.float64) - self.assertEqual(get_reindex_type(pd.DatetimeIndex([])), np.datetime64) + assert get_reindex_type(pd.Int64Index([])) == np.int64 + assert get_reindex_type(pd.Float64Index([])) == np.float64 + assert get_reindex_type(pd.DatetimeIndex([])) == np.datetime64 reindexed = idx.reindex(pd.MultiIndex( [pd.Int64Index([]), pd.Float64Index([])], [[], []]))[0] - self.assertEqual(reindexed.levels[0].dtype.type, np.int64) - self.assertEqual(reindexed.levels[1].dtype.type, np.float64) + assert reindexed.levels[0].dtype.type == np.int64 + assert reindexed.levels[1].dtype.type == np.float64 def test_groupby(self): idx = Index(range(5)) @@ -1628,8 +1626,8 @@ def test_equals_op_multiindex(self): def test_conversion_preserves_name(self): # GH 10875 i = pd.Index(['01:02:03', '01:02:04'], name='label') - self.assertEqual(i.name, pd.to_datetime(i).name) - self.assertEqual(i.name, pd.to_timedelta(i).name) + assert i.name == pd.to_datetime(i).name + assert i.name == pd.to_timedelta(i).name def test_string_index_repr(self): # py3/py2 repr can differ because of "u" prefix @@ -1644,10 +1642,10 @@ def test_string_index_repr(self): idx = pd.Index(['a', 'bb', 'ccc']) if PY3: expected = u"""Index(['a', 'bb', 'ccc'], dtype='object')""" - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""Index([u'a', u'bb', u'ccc'], dtype='object')""" - self.assertEqual(coerce(idx), expected) + assert coerce(idx) == expected # multiple lines idx = pd.Index(['a', 'bb', 'ccc'] * 10) @@ -1658,7 +1656,7 @@ def test_string_index_repr(self): 'a', 'bb', 'ccc', 'a', 'bb', 'ccc'], dtype='object')""" - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""\ Index([u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', @@ -1666,7 +1664,7 @@ def test_string_index_repr(self): u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc'], dtype='object')""" - self.assertEqual(coerce(idx), expected) + assert coerce(idx) == expected # truncated idx = pd.Index(['a', 'bb', 'ccc'] * 100) @@ -1677,7 +1675,7 @@ def test_string_index_repr(self): 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc'], dtype='object', length=300)""" - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""\ Index([u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', @@ -1685,16 +1683,16 @@ def test_string_index_repr(self): u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc'], dtype='object', length=300)""" - self.assertEqual(coerce(idx), expected) + assert coerce(idx) == expected # short idx = pd.Index([u'あ', u'いい', u'ううう']) if PY3: expected = u"""Index(['あ', 'いい', 'ううう'], dtype='object')""" - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""Index([u'あ', u'いい', u'ううう'], dtype='object')""" - self.assertEqual(coerce(idx), expected) + assert coerce(idx) == expected # multiple lines idx = pd.Index([u'あ', u'いい', u'ううう'] * 10) @@ -1706,7 +1704,7 @@ def test_string_index_repr(self): u" 'あ', 'いい', 'ううう', 'あ', 'いい', " u"'ううう'],\n" u" dtype='object')") - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', " u"u'ううう', u'あ', u'いい', u'ううう', u'あ',\n" @@ -1715,7 +1713,7 @@ def test_string_index_repr(self): u" u'ううう', u'あ', u'いい', u'ううう', u'あ', " u"u'いい', u'ううう', u'あ', u'いい', u'ううう'],\n" u" dtype='object')") - self.assertEqual(coerce(idx), expected) + assert coerce(idx) == expected # truncated idx = pd.Index([u'あ', u'いい', u'ううう'] * 100) @@ -1726,7 +1724,7 @@ def test_string_index_repr(self): u" 'ううう', 'あ', 'いい', 'ううう', 'あ', 'いい', " u"'ううう', 'あ', 'いい', 'ううう'],\n" u" dtype='object', length=300)") - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', " u"u'ううう', u'あ', u'いい', u'ううう', u'あ',\n" @@ -1735,7 +1733,7 @@ def test_string_index_repr(self): u"u'いい', u'ううう', u'あ', u'いい', u'ううう'],\n" u" dtype='object', length=300)") - self.assertEqual(coerce(idx), expected) + assert coerce(idx) == expected # Emable Unicode option ----------------------------------------- with cf.option_context('display.unicode.east_asian_width', True): @@ -1745,11 +1743,11 @@ def test_string_index_repr(self): if PY3: expected = (u"Index(['あ', 'いい', 'ううう'], " u"dtype='object')") - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう'], " u"dtype='object')") - self.assertEqual(coerce(idx), expected) + assert coerce(idx) == expected # multiple lines idx = pd.Index([u'あ', u'いい', u'ううう'] * 10) @@ -1763,7 +1761,7 @@ def test_string_index_repr(self): u" 'あ', 'いい', 'ううう'],\n" u" dtype='object')""") - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', " u"u'ううう', u'あ', u'いい',\n" @@ -1775,7 +1773,7 @@ def test_string_index_repr(self): u"u'あ', u'いい', u'ううう'],\n" u" dtype='object')") - self.assertEqual(coerce(idx), expected) + assert coerce(idx) == expected # truncated idx = pd.Index([u'あ', u'いい', u'ううう'] * 100) @@ -1789,7 +1787,7 @@ def test_string_index_repr(self): u" 'ううう'],\n" u" dtype='object', length=300)") - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = (u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', " u"u'ううう', u'あ', u'いい',\n" @@ -1800,7 +1798,7 @@ def test_string_index_repr(self): u" u'いい', u'ううう'],\n" u" dtype='object', length=300)") - self.assertEqual(coerce(idx), expected) + assert coerce(idx) == expected class TestMixedIntIndex(Base, tm.TestCase): @@ -1876,22 +1874,22 @@ def test_copy_name2(self): idx1 = idx.copy() assert idx.equals(idx1) - self.assertEqual(idx.name, 'MyName') - self.assertEqual(idx1.name, 'MyName') + assert idx.name == 'MyName' + assert idx1.name == 'MyName' idx2 = idx.copy(name='NewName') assert idx.equals(idx2) - self.assertEqual(idx.name, 'MyName') - self.assertEqual(idx2.name, 'NewName') + assert idx.name == 'MyName' + assert idx2.name == 'NewName' idx3 = idx.copy(names=['NewName']) assert idx.equals(idx3) - self.assertEqual(idx.name, 'MyName') - self.assertEqual(idx.names, ['MyName']) - self.assertEqual(idx3.name, 'NewName') - self.assertEqual(idx3.names, ['NewName']) + assert idx.name == 'MyName' + assert idx.names == ['MyName'] + assert idx3.name == 'NewName' + assert idx3.names == ['NewName'] def test_union_base(self): idx = self.create_index() @@ -1960,8 +1958,8 @@ def test_symmetric_difference(self): def test_logical_compat(self): idx = self.create_index() - self.assertEqual(idx.all(), idx.values.all()) - self.assertEqual(idx.any(), idx.values.any()) + assert idx.all() == idx.values.all() + assert idx.any() == idx.values.any() def test_dropna(self): # GH 6194 @@ -2074,4 +2072,4 @@ def test_intersect_str_dates(self): i2 = Index(['aa'], dtype=object) res = i2.intersection(i1) - self.assertEqual(len(res), 0) + assert len(res) == 0 diff --git a/pandas/tests/indexes/test_category.py b/pandas/tests/indexes/test_category.py index 7b2d27c9b51a4..6a2eea0b84b72 100644 --- a/pandas/tests/indexes/test_category.py +++ b/pandas/tests/indexes/test_category.py @@ -198,8 +198,8 @@ def test_min_max(self): ci = self.create_index(ordered=True) - self.assertEqual(ci.min(), 'c') - self.assertEqual(ci.max(), 'b') + assert ci.min() == 'c' + assert ci.max() == 'b' def test_map(self): ci = pd.CategoricalIndex(list('ABABC'), categories=list('CBA'), @@ -450,8 +450,8 @@ def test_get_loc(self): # GH 12531 cidx1 = CategoricalIndex(list('abcde'), categories=list('edabc')) idx1 = Index(list('abcde')) - self.assertEqual(cidx1.get_loc('a'), idx1.get_loc('a')) - self.assertEqual(cidx1.get_loc('e'), idx1.get_loc('e')) + assert cidx1.get_loc('a') == idx1.get_loc('a') + assert cidx1.get_loc('e') == idx1.get_loc('e') for i in [cidx1, idx1]: with pytest.raises(KeyError): @@ -468,8 +468,8 @@ def test_get_loc(self): True, False, True])) # unique element results in scalar res = cidx2.get_loc('e') - self.assertEqual(res, idx2.get_loc('e')) - self.assertEqual(res, 4) + assert res == idx2.get_loc('e') + assert res == 4 for i in [cidx2, idx2]: with pytest.raises(KeyError): @@ -481,12 +481,12 @@ def test_get_loc(self): # results in slice res = cidx3.get_loc('a') - self.assertEqual(res, idx3.get_loc('a')) - self.assertEqual(res, slice(0, 2, None)) + assert res == idx3.get_loc('a') + assert res == slice(0, 2, None) res = cidx3.get_loc('b') - self.assertEqual(res, idx3.get_loc('b')) - self.assertEqual(res, slice(2, 5, None)) + assert res == idx3.get_loc('b') + assert res == slice(2, 5, None) for i in [cidx3, idx3]: with pytest.raises(KeyError): @@ -612,10 +612,10 @@ def test_string_categorical_index_repr(self): idx = pd.CategoricalIndex(['a', 'bb', 'ccc']) if PY3: expected = u"""CategoricalIndex(['a', 'bb', 'ccc'], categories=['a', 'bb', 'ccc'], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'a', u'bb', u'ccc'], categories=[u'a', u'bb', u'ccc'], ordered=False, dtype='category')""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # multiple lines idx = pd.CategoricalIndex(['a', 'bb', 'ccc'] * 10) @@ -625,7 +625,7 @@ def test_string_categorical_index_repr(self): 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc'], categories=['a', 'bb', 'ccc'], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', @@ -633,7 +633,7 @@ def test_string_categorical_index_repr(self): u'a', u'bb', u'ccc', u'a', u'bb', u'ccc'], categories=[u'a', u'bb', u'ccc'], ordered=False, dtype='category')""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # truncated idx = pd.CategoricalIndex(['a', 'bb', 'ccc'] * 100) @@ -643,7 +643,7 @@ def test_string_categorical_index_repr(self): 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc'], categories=['a', 'bb', 'ccc'], ordered=False, dtype='category', length=300)""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', @@ -652,7 +652,7 @@ def test_string_categorical_index_repr(self): u'bb', u'ccc'], categories=[u'a', u'bb', u'ccc'], ordered=False, dtype='category', length=300)""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # larger categories idx = pd.CategoricalIndex(list('abcdefghijklmmo')) @@ -661,22 +661,22 @@ def test_string_categorical_index_repr(self): 'm', 'm', 'o'], categories=['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', ...], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'a', u'b', u'c', u'd', u'e', u'f', u'g', u'h', u'i', u'j', u'k', u'l', u'm', u'm', u'o'], categories=[u'a', u'b', u'c', u'd', u'e', u'f', u'g', u'h', ...], ordered=False, dtype='category')""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # short idx = pd.CategoricalIndex([u'あ', u'いい', u'ううう']) if PY3: expected = u"""CategoricalIndex(['あ', 'いい', 'ううう'], categories=['あ', 'いい', 'ううう'], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'あ', u'いい', u'ううう'], categories=[u'あ', u'いい', u'ううう'], ordered=False, dtype='category')""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # multiple lines idx = pd.CategoricalIndex([u'あ', u'いい', u'ううう'] * 10) @@ -686,7 +686,7 @@ def test_string_categorical_index_repr(self): 'ううう', 'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう'], categories=['あ', 'いい', 'ううう'], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', @@ -694,7 +694,7 @@ def test_string_categorical_index_repr(self): u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう'], categories=[u'あ', u'いい', u'ううう'], ordered=False, dtype='category')""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # truncated idx = pd.CategoricalIndex([u'あ', u'いい', u'ううう'] * 100) @@ -704,7 +704,7 @@ def test_string_categorical_index_repr(self): 'ううう', 'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう'], categories=['あ', 'いい', 'ううう'], ordered=False, dtype='category', length=300)""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', @@ -713,7 +713,7 @@ def test_string_categorical_index_repr(self): u'いい', u'ううう'], categories=[u'あ', u'いい', u'ううう'], ordered=False, dtype='category', length=300)""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # larger categories idx = pd.CategoricalIndex(list(u'あいうえおかきくけこさしすせそ')) @@ -722,13 +722,13 @@ def test_string_categorical_index_repr(self): 'す', 'せ', 'そ'], categories=['あ', 'い', 'う', 'え', 'お', 'か', 'き', 'く', ...], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'あ', u'い', u'う', u'え', u'お', u'か', u'き', u'く', u'け', u'こ', u'さ', u'し', u'す', u'せ', u'そ'], categories=[u'あ', u'い', u'う', u'え', u'お', u'か', u'き', u'く', ...], ordered=False, dtype='category')""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # Emable Unicode option ----------------------------------------- with cf.option_context('display.unicode.east_asian_width', True): @@ -737,10 +737,10 @@ def test_string_categorical_index_repr(self): idx = pd.CategoricalIndex([u'あ', u'いい', u'ううう']) if PY3: expected = u"""CategoricalIndex(['あ', 'いい', 'ううう'], categories=['あ', 'いい', 'ううう'], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'あ', u'いい', u'ううう'], categories=[u'あ', u'いい', u'ううう'], ordered=False, dtype='category')""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # multiple lines idx = pd.CategoricalIndex([u'あ', u'いい', u'ううう'] * 10) @@ -751,7 +751,7 @@ def test_string_categorical_index_repr(self): 'ううう', 'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう'], categories=['あ', 'いい', 'ううう'], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', @@ -760,7 +760,7 @@ def test_string_categorical_index_repr(self): u'いい', u'ううう', u'あ', u'いい', u'ううう'], categories=[u'あ', u'いい', u'ううう'], ordered=False, dtype='category')""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # truncated idx = pd.CategoricalIndex([u'あ', u'いい', u'ううう'] * 100) @@ -772,7 +772,7 @@ def test_string_categorical_index_repr(self): 'あ', 'いい', 'ううう'], categories=['あ', 'いい', 'ううう'], ordered=False, dtype='category', length=300)""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', u'いい', u'ううう', u'あ', @@ -781,7 +781,7 @@ def test_string_categorical_index_repr(self): u'ううう', u'あ', u'いい', u'ううう'], categories=[u'あ', u'いい', u'ううう'], ordered=False, dtype='category', length=300)""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected # larger categories idx = pd.CategoricalIndex(list(u'あいうえおかきくけこさしすせそ')) @@ -790,13 +790,13 @@ def test_string_categorical_index_repr(self): 'さ', 'し', 'す', 'せ', 'そ'], categories=['あ', 'い', 'う', 'え', 'お', 'か', 'き', 'く', ...], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), expected) + assert repr(idx) == expected else: expected = u"""CategoricalIndex([u'あ', u'い', u'う', u'え', u'お', u'か', u'き', u'く', u'け', u'こ', u'さ', u'し', u'す', u'せ', u'そ'], categories=[u'あ', u'い', u'う', u'え', u'お', u'か', u'き', u'く', ...], ordered=False, dtype='category')""" # noqa - self.assertEqual(unicode(idx), expected) + assert unicode(idx) == expected def test_fillna_categorical(self): # GH 11343 diff --git a/pandas/tests/indexes/test_interval.py b/pandas/tests/indexes/test_interval.py index 815fefa813a9d..00897f290f292 100644 --- a/pandas/tests/indexes/test_interval.py +++ b/pandas/tests/indexes/test_interval.py @@ -118,15 +118,15 @@ def f(): def test_properties(self): index = self.index - self.assertEqual(len(index), 2) - self.assertEqual(index.size, 2) - self.assertEqual(index.shape, (2, )) + assert len(index) == 2 + assert index.size == 2 + assert index.shape == (2, ) tm.assert_index_equal(index.left, Index([0, 1])) tm.assert_index_equal(index.right, Index([1, 2])) tm.assert_index_equal(index.mid, Index([0.5, 1.5])) - self.assertEqual(index.closed, 'right') + assert index.closed == 'right' expected = np.array([Interval(0, 1), Interval(1, 2)], dtype=object) tm.assert_numpy_array_equal(np.asarray(index), expected) @@ -134,15 +134,15 @@ def test_properties(self): # with nans index = self.index_with_nan - self.assertEqual(len(index), 3) - self.assertEqual(index.size, 3) - self.assertEqual(index.shape, (3, )) + assert len(index) == 3 + assert index.size == 3 + assert index.shape == (3, ) tm.assert_index_equal(index.left, Index([0, np.nan, 1])) tm.assert_index_equal(index.right, Index([1, np.nan, 2])) tm.assert_index_equal(index.mid, Index([0.5, np.nan, 1.5])) - self.assertEqual(index.closed, 'right') + assert index.closed == 'right' expected = np.array([Interval(0, 1), np.nan, Interval(1, 2)], dtype=object) @@ -285,7 +285,7 @@ def test_repr(self): "\n right=[1, 2]," "\n closed='right'," "\n dtype='interval[int64]')") - self.assertEqual(repr(i), expected) + assert repr(i) == expected i = IntervalIndex.from_tuples((Timestamp('20130101'), Timestamp('20130102')), @@ -296,7 +296,7 @@ def test_repr(self): "\n right=['2013-01-02', '2013-01-03']," "\n closed='right'," "\n dtype='interval[datetime64[ns]]')") - self.assertEqual(repr(i), expected) + assert repr(i) == expected @pytest.mark.xfail(reason='not a valid repr as we use interval notation') def test_repr_max_seq_item_setting(self): @@ -328,21 +328,21 @@ def test_get_item(self): def test_get_loc_value(self): pytest.raises(KeyError, self.index.get_loc, 0) - self.assertEqual(self.index.get_loc(0.5), 0) - self.assertEqual(self.index.get_loc(1), 0) - self.assertEqual(self.index.get_loc(1.5), 1) - self.assertEqual(self.index.get_loc(2), 1) + assert self.index.get_loc(0.5) == 0 + assert self.index.get_loc(1) == 0 + assert self.index.get_loc(1.5) == 1 + assert self.index.get_loc(2) == 1 pytest.raises(KeyError, self.index.get_loc, -1) pytest.raises(KeyError, self.index.get_loc, 3) idx = IntervalIndex.from_tuples([(0, 2), (1, 3)]) - self.assertEqual(idx.get_loc(0.5), 0) - self.assertEqual(idx.get_loc(1), 0) + assert idx.get_loc(0.5) == 0 + assert idx.get_loc(1) == 0 tm.assert_numpy_array_equal(idx.get_loc(1.5), np.array([0, 1], dtype='int64')) tm.assert_numpy_array_equal(np.sort(idx.get_loc(2)), np.array([0, 1], dtype='int64')) - self.assertEqual(idx.get_loc(3), 1) + assert idx.get_loc(3) == 1 pytest.raises(KeyError, idx.get_loc, 3.5) idx = IntervalIndex.from_arrays([0, 2], [1, 3]) @@ -351,29 +351,29 @@ def test_get_loc_value(self): def slice_locs_cases(self, breaks): # TODO: same tests for more index types index = IntervalIndex.from_breaks([0, 1, 2], closed='right') - self.assertEqual(index.slice_locs(), (0, 2)) - self.assertEqual(index.slice_locs(0, 1), (0, 1)) - self.assertEqual(index.slice_locs(1, 1), (0, 1)) - self.assertEqual(index.slice_locs(0, 2), (0, 2)) - self.assertEqual(index.slice_locs(0.5, 1.5), (0, 2)) - self.assertEqual(index.slice_locs(0, 0.5), (0, 1)) - self.assertEqual(index.slice_locs(start=1), (0, 2)) - self.assertEqual(index.slice_locs(start=1.2), (1, 2)) - self.assertEqual(index.slice_locs(end=1), (0, 1)) - self.assertEqual(index.slice_locs(end=1.1), (0, 2)) - self.assertEqual(index.slice_locs(end=1.0), (0, 1)) - self.assertEqual(*index.slice_locs(-1, -1)) + assert index.slice_locs() == (0, 2) + assert index.slice_locs(0, 1) == (0, 1) + assert index.slice_locs(1, 1) == (0, 1) + assert index.slice_locs(0, 2) == (0, 2) + assert index.slice_locs(0.5, 1.5) == (0, 2) + assert index.slice_locs(0, 0.5) == (0, 1) + assert index.slice_locs(start=1) == (0, 2) + assert index.slice_locs(start=1.2) == (1, 2) + assert index.slice_locs(end=1) == (0, 1) + assert index.slice_locs(end=1.1) == (0, 2) + assert index.slice_locs(end=1.0) == (0, 1) + assert index.slice_locs(-1, -1) == (0, 0) index = IntervalIndex.from_breaks([0, 1, 2], closed='neither') - self.assertEqual(index.slice_locs(0, 1), (0, 1)) - self.assertEqual(index.slice_locs(0, 2), (0, 2)) - self.assertEqual(index.slice_locs(0.5, 1.5), (0, 2)) - self.assertEqual(index.slice_locs(1, 1), (1, 1)) - self.assertEqual(index.slice_locs(1, 2), (1, 2)) + assert index.slice_locs(0, 1) == (0, 1) + assert index.slice_locs(0, 2) == (0, 2) + assert index.slice_locs(0.5, 1.5) == (0, 2) + assert index.slice_locs(1, 1) == (1, 1) + assert index.slice_locs(1, 2) == (1, 2) index = IntervalIndex.from_breaks([0, 1, 2], closed='both') - self.assertEqual(index.slice_locs(1, 1), (0, 2)) - self.assertEqual(index.slice_locs(1, 2), (0, 2)) + assert index.slice_locs(1, 1) == (0, 2) + assert index.slice_locs(1, 2) == (0, 2) def test_slice_locs_int64(self): self.slice_locs_cases([0, 1, 2]) @@ -383,14 +383,16 @@ def test_slice_locs_float64(self): def slice_locs_decreasing_cases(self, tuples): index = IntervalIndex.from_tuples(tuples) - self.assertEqual(index.slice_locs(1.5, 0.5), (1, 3)) - self.assertEqual(index.slice_locs(2, 0), (1, 3)) - self.assertEqual(index.slice_locs(2, 1), (1, 3)) - self.assertEqual(index.slice_locs(3, 1.1), (0, 3)) - self.assertEqual(index.slice_locs(3, 3), (0, 2)) - self.assertEqual(index.slice_locs(3.5, 3.3), (0, 1)) - self.assertEqual(index.slice_locs(1, -3), (2, 3)) - self.assertEqual(*index.slice_locs(-1, -1)) + assert index.slice_locs(1.5, 0.5) == (1, 3) + assert index.slice_locs(2, 0) == (1, 3) + assert index.slice_locs(2, 1) == (1, 3) + assert index.slice_locs(3, 1.1) == (0, 3) + assert index.slice_locs(3, 3) == (0, 2) + assert index.slice_locs(3.5, 3.3) == (0, 1) + assert index.slice_locs(1, -3) == (2, 3) + + slice_locs = index.slice_locs(-1, -1) + assert slice_locs[0] == slice_locs[1] def test_slice_locs_decreasing_int64(self): self.slice_locs_cases([(2, 4), (1, 3), (0, 2)]) @@ -404,9 +406,9 @@ def test_slice_locs_fails(self): index.slice_locs(1, 2) def test_get_loc_interval(self): - self.assertEqual(self.index.get_loc(Interval(0, 1)), 0) - self.assertEqual(self.index.get_loc(Interval(0, 0.5)), 0) - self.assertEqual(self.index.get_loc(Interval(0, 1, 'left')), 0) + assert self.index.get_loc(Interval(0, 1)) == 0 + assert self.index.get_loc(Interval(0, 0.5)) == 0 + assert self.index.get_loc(Interval(0, 1, 'left')) == 0 pytest.raises(KeyError, self.index.get_loc, Interval(2, 3)) pytest.raises(KeyError, self.index.get_loc, Interval(-1, 0, 'left')) diff --git a/pandas/tests/indexes/test_multi.py b/pandas/tests/indexes/test_multi.py index 714e901532ed9..a840711e37fb0 100644 --- a/pandas/tests/indexes/test_multi.py +++ b/pandas/tests/indexes/test_multi.py @@ -128,35 +128,35 @@ def test_numpy_repeat(self): def test_set_name_methods(self): # so long as these are synonyms, we don't need to test set_names - self.assertEqual(self.index.rename, self.index.set_names) + assert self.index.rename == self.index.set_names new_names = [name + "SUFFIX" for name in self.index_names] ind = self.index.set_names(new_names) - self.assertEqual(self.index.names, self.index_names) - self.assertEqual(ind.names, new_names) + assert self.index.names == self.index_names + assert ind.names == new_names with tm.assert_raises_regex(ValueError, "^Length"): ind.set_names(new_names + new_names) new_names2 = [name + "SUFFIX2" for name in new_names] res = ind.set_names(new_names2, inplace=True) assert res is None - self.assertEqual(ind.names, new_names2) + assert ind.names == new_names2 # set names for specific level (# GH7792) ind = self.index.set_names(new_names[0], level=0) - self.assertEqual(self.index.names, self.index_names) - self.assertEqual(ind.names, [new_names[0], self.index_names[1]]) + assert self.index.names == self.index_names + assert ind.names == [new_names[0], self.index_names[1]] res = ind.set_names(new_names2[0], level=0, inplace=True) assert res is None - self.assertEqual(ind.names, [new_names2[0], self.index_names[1]]) + assert ind.names == [new_names2[0], self.index_names[1]] # set names for multiple levels ind = self.index.set_names(new_names, level=[0, 1]) - self.assertEqual(self.index.names, self.index_names) - self.assertEqual(ind.names, new_names) + assert self.index.names == self.index_names + assert ind.names == new_names res = ind.set_names(new_names2, level=[0, 1], inplace=True) assert res is None - self.assertEqual(ind.names, new_names2) + assert ind.names == new_names2 def test_set_levels(self): # side note - you probably wouldn't want to use levels and labels @@ -167,7 +167,7 @@ def test_set_levels(self): def assert_matching(actual, expected, check_dtype=False): # avoid specifying internal representation # as much as possible - self.assertEqual(len(actual), len(expected)) + assert len(actual) == len(expected) for act, exp in zip(actual, expected): act = np.asarray(act) exp = np.asarray(exp) @@ -256,7 +256,7 @@ def test_set_labels(self): def assert_matching(actual, expected): # avoid specifying internal representation # as much as possible - self.assertEqual(len(actual), len(expected)) + assert len(actual) == len(expected) for act, exp in zip(actual, expected): act = np.asarray(act) exp = np.asarray(exp, dtype=np.int8) @@ -439,12 +439,12 @@ def test_copy_in_constructor(self): val = labels[0] mi = MultiIndex(levels=[levels, levels], labels=[labels, labels], copy=True) - self.assertEqual(mi.labels[0][0], val) + assert mi.labels[0][0] == val labels[0] = 15 - self.assertEqual(mi.labels[0][0], val) + assert mi.labels[0][0] == val val = levels[0] levels[0] = "PANDA" - self.assertEqual(mi.levels[0][0], val) + assert mi.levels[0][0] == val def test_set_value_keeps_names(self): # motivating example from #3742 @@ -457,10 +457,10 @@ def test_set_value_keeps_names(self): index=idx) df = df.sort_index() assert df.is_copy is None - self.assertEqual(df.index.names, ('Name', 'Number')) + assert df.index.names == ('Name', 'Number') df = df.set_value(('grethe', '4'), 'one', 99.34) assert df.is_copy is None - self.assertEqual(df.index.names, ('Name', 'Number')) + assert df.index.names == ('Name', 'Number') def test_copy_names(self): # Check that adding a "names" parameter to the copy is honored @@ -469,27 +469,27 @@ def test_copy_names(self): multi_idx1 = multi_idx.copy() assert multi_idx.equals(multi_idx1) - self.assertEqual(multi_idx.names, ['MyName1', 'MyName2']) - self.assertEqual(multi_idx1.names, ['MyName1', 'MyName2']) + assert multi_idx.names == ['MyName1', 'MyName2'] + assert multi_idx1.names == ['MyName1', 'MyName2'] multi_idx2 = multi_idx.copy(names=['NewName1', 'NewName2']) assert multi_idx.equals(multi_idx2) - self.assertEqual(multi_idx.names, ['MyName1', 'MyName2']) - self.assertEqual(multi_idx2.names, ['NewName1', 'NewName2']) + assert multi_idx.names == ['MyName1', 'MyName2'] + assert multi_idx2.names == ['NewName1', 'NewName2'] multi_idx3 = multi_idx.copy(name=['NewName1', 'NewName2']) assert multi_idx.equals(multi_idx3) - self.assertEqual(multi_idx.names, ['MyName1', 'MyName2']) - self.assertEqual(multi_idx3.names, ['NewName1', 'NewName2']) + assert multi_idx.names == ['MyName1', 'MyName2'] + assert multi_idx3.names == ['NewName1', 'NewName2'] def test_names(self): # names are assigned in __init__ names = self.index_names level_names = [level.name for level in self.index.levels] - self.assertEqual(names, level_names) + assert names == level_names # setting bad names on existing index = self.index @@ -515,7 +515,7 @@ def test_names(self): index.names = ["a", "b"] ind_names = list(index.names) level_names = [level.name for level in index.levels] - self.assertEqual(ind_names, level_names) + assert ind_names == level_names def test_reference_duplicate_name(self): idx = MultiIndex.from_tuples( @@ -623,7 +623,7 @@ def test_view(self): self.assert_multiindex_copied(i_view, self.index) def check_level_names(self, index, names): - self.assertEqual([level.name for level in index.levels], list(names)) + assert [level.name for level in index.levels] == list(names) def test_changing_names(self): @@ -656,8 +656,8 @@ def test_duplicate_names(self): def test_get_level_number_integer(self): self.index.names = [1, 0] - self.assertEqual(self.index._get_level_number(1), 0) - self.assertEqual(self.index._get_level_number(0), 1) + assert self.index._get_level_number(1) == 0 + assert self.index._get_level_number(0) == 1 pytest.raises(IndexError, self.index._get_level_number, 2) tm.assert_raises_regex(KeyError, 'Level fourth not found', self.index._get_level_number, 'fourth') @@ -668,7 +668,7 @@ def test_from_arrays(self): arrays.append(np.asarray(lev).take(lab)) result = MultiIndex.from_arrays(arrays) - self.assertEqual(list(result), list(self.index)) + assert list(result) == list(self.index) # infer correctly result = MultiIndex.from_arrays([[pd.NaT, Timestamp('20130101')], @@ -819,7 +819,7 @@ def test_from_product(self): expected = MultiIndex.from_tuples(tuples, names=names) tm.assert_index_equal(result, expected) - self.assertEqual(result.names, names) + assert result.names == names def test_from_product_empty(self): # 0 levels @@ -914,7 +914,7 @@ def test_append_mixed_dtypes(self): [1.1, np.nan, 3.3], ['a', 'b', 'c'], dti, dti_tz, pi]) - self.assertEqual(mi.nlevels, 6) + assert mi.nlevels == 6 res = mi.append(mi) exp = MultiIndex.from_arrays([[1, 2, 3, 1, 2, 3], @@ -943,7 +943,7 @@ def test_get_level_values(self): expected = Index(['foo', 'foo', 'bar', 'baz', 'qux', 'qux'], name='first') tm.assert_index_equal(result, expected) - self.assertEqual(result.name, 'first') + assert result.name == 'first' result = self.index.get_level_values('first') expected = self.index.get_level_values(0) @@ -989,7 +989,7 @@ def test_get_level_values_na(self): arrays = [[], []] index = pd.MultiIndex.from_arrays(arrays) values = index.get_level_values(0) - self.assertEqual(values.shape, (0, )) + assert values.shape == (0, ) def test_reorder_levels(self): # this blows up @@ -997,13 +997,13 @@ def test_reorder_levels(self): self.index.reorder_levels, [2, 1, 0]) def test_nlevels(self): - self.assertEqual(self.index.nlevels, 2) + assert self.index.nlevels == 2 def test_iter(self): result = list(self.index) expected = [('foo', 'one'), ('foo', 'two'), ('bar', 'one'), ('baz', 'two'), ('qux', 'one'), ('qux', 'two')] - self.assertEqual(result, expected) + assert result == expected def test_legacy_pickle(self): if PY3: @@ -1089,7 +1089,7 @@ def test_is_numeric(self): def test_getitem(self): # scalar - self.assertEqual(self.index[2], ('bar', 'one')) + assert self.index[2] == ('bar', 'one') # slice result = self.index[2:5] @@ -1105,12 +1105,12 @@ def test_getitem(self): def test_getitem_group_select(self): sorted_idx, _ = self.index.sortlevel(0) - self.assertEqual(sorted_idx.get_loc('baz'), slice(3, 4)) - self.assertEqual(sorted_idx.get_loc('foo'), slice(0, 2)) + assert sorted_idx.get_loc('baz') == slice(3, 4) + assert sorted_idx.get_loc('foo') == slice(0, 2) def test_get_loc(self): - self.assertEqual(self.index.get_loc(('foo', 'two')), 1) - self.assertEqual(self.index.get_loc(('baz', 'two')), 3) + assert self.index.get_loc(('foo', 'two')) == 1 + assert self.index.get_loc(('baz', 'two')) == 3 pytest.raises(KeyError, self.index.get_loc, ('bar', 'two')) pytest.raises(KeyError, self.index.get_loc, 'quux') @@ -1122,19 +1122,19 @@ def test_get_loc(self): lrange(4))], labels=[np.array([0, 0, 1, 2, 2, 2, 3, 3]), np.array( [0, 1, 0, 0, 0, 1, 0, 1]), np.array([1, 0, 1, 1, 0, 0, 1, 0])]) pytest.raises(KeyError, index.get_loc, (1, 1)) - self.assertEqual(index.get_loc((2, 0)), slice(3, 5)) + assert index.get_loc((2, 0)) == slice(3, 5) def test_get_loc_duplicates(self): index = Index([2, 2, 2, 2]) result = index.get_loc(2) expected = slice(0, 4) - self.assertEqual(result, expected) + assert result == expected # pytest.raises(Exception, index.get_loc, 2) index = Index(['c', 'a', 'a', 'b', 'b']) rs = index.get_loc('c') xp = 0 - assert (rs == xp) + assert rs == xp def test_get_value_duplicates(self): index = MultiIndex(levels=[['D', 'B', 'C'], @@ -1155,12 +1155,12 @@ def test_get_loc_level(self): loc, new_index = index.get_loc_level((0, 1)) expected = slice(1, 2) exp_index = index[expected].droplevel(0).droplevel(0) - self.assertEqual(loc, expected) + assert loc == expected assert new_index.equals(exp_index) loc, new_index = index.get_loc_level((0, 1, 0)) expected = 1 - self.assertEqual(loc, expected) + assert loc == expected assert new_index is None pytest.raises(KeyError, index.get_loc_level, (2, 2)) @@ -1169,7 +1169,7 @@ def test_get_loc_level(self): [0, 0, 0, 0]), np.array([0, 1, 2, 3])]) result, new_index = index.get_loc_level((2000, slice(None, None))) expected = slice(None, None) - self.assertEqual(result, expected) + assert result == expected assert new_index.equals(index.droplevel(0)) def test_slice_locs(self): @@ -1225,16 +1225,16 @@ def test_slice_locs_partial(self): sorted_idx, _ = self.index.sortlevel(0) result = sorted_idx.slice_locs(('foo', 'two'), ('qux', 'one')) - self.assertEqual(result, (1, 5)) + assert result == (1, 5) result = sorted_idx.slice_locs(None, ('qux', 'one')) - self.assertEqual(result, (0, 5)) + assert result == (0, 5) result = sorted_idx.slice_locs(('foo', 'two'), None) - self.assertEqual(result, (1, len(sorted_idx))) + assert result == (1, len(sorted_idx)) result = sorted_idx.slice_locs('bar', 'baz') - self.assertEqual(result, (2, 4)) + assert result == (2, 4) def test_slice_locs_not_contained(self): # some searchsorted action @@ -1244,22 +1244,22 @@ def test_slice_locs_not_contained(self): [0, 1, 2, 1, 2, 2, 0, 1, 2]], sortorder=0) result = index.slice_locs((1, 0), (5, 2)) - self.assertEqual(result, (3, 6)) + assert result == (3, 6) result = index.slice_locs(1, 5) - self.assertEqual(result, (3, 6)) + assert result == (3, 6) result = index.slice_locs((2, 2), (5, 2)) - self.assertEqual(result, (3, 6)) + assert result == (3, 6) result = index.slice_locs(2, 5) - self.assertEqual(result, (3, 6)) + assert result == (3, 6) result = index.slice_locs((1, 0), (6, 3)) - self.assertEqual(result, (3, 8)) + assert result == (3, 8) result = index.slice_locs(-1, 10) - self.assertEqual(result, (0, len(index))) + assert result == (0, len(index)) def test_consistency(self): # need to construct an overflow @@ -1374,7 +1374,7 @@ def test_hash_collisions(self): for i in [0, 1, len(index) - 2, len(index) - 1]: result = index.get_loc(index[i]) - self.assertEqual(result, i) + assert result == i def test_format(self): self.index.format() @@ -1391,7 +1391,7 @@ def test_format_sparse_display(self): [0, 1, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0]]) result = index.format() - self.assertEqual(result[3], '1 0 0 0') + assert result[3] == '1 0 0 0' def test_format_sparse_config(self): warn_filters = warnings.filters @@ -1401,7 +1401,7 @@ def test_format_sparse_config(self): pd.set_option('display.multi_sparse', False) result = self.index.format() - self.assertEqual(result[1], 'foo two') + assert result[1] == 'foo two' tm.reset_display_options() @@ -1452,7 +1452,7 @@ def test_to_hierarchical(self): labels=[[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1], [0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1]]) tm.assert_index_equal(result, expected) - self.assertEqual(result.names, index.names) + assert result.names == index.names # K > 1 result = index.to_hierarchical(3, 2) @@ -1460,7 +1460,7 @@ def test_to_hierarchical(self): labels=[[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1], [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1]]) tm.assert_index_equal(result, expected) - self.assertEqual(result.names, index.names) + assert result.names == index.names # non-sorted index = MultiIndex.from_tuples([(2, 'c'), (1, 'b'), @@ -1474,7 +1474,7 @@ def test_to_hierarchical(self): (2, 'b'), (2, 'b')], names=['N1', 'N2']) tm.assert_index_equal(result, expected) - self.assertEqual(result.names, index.names) + assert result.names == index.names def test_bounds(self): self.index._bounds @@ -1655,35 +1655,35 @@ def test_difference(self): assert isinstance(result, MultiIndex) assert result.equals(expected) - self.assertEqual(result.names, self.index.names) + assert result.names == self.index.names # empty difference: reflexive result = self.index.difference(self.index) expected = self.index[:0] assert result.equals(expected) - self.assertEqual(result.names, self.index.names) + assert result.names == self.index.names # empty difference: superset result = self.index[-3:].difference(self.index) expected = self.index[:0] assert result.equals(expected) - self.assertEqual(result.names, self.index.names) + assert result.names == self.index.names # empty difference: degenerate result = self.index[:0].difference(self.index) expected = self.index[:0] assert result.equals(expected) - self.assertEqual(result.names, self.index.names) + assert result.names == self.index.names # names not the same chunklet = self.index[-3:] chunklet.names = ['foo', 'baz'] result = first.difference(chunklet) - self.assertEqual(result.names, (None, None)) + assert result.names == (None, None) # empty, but non-equal result = self.index.difference(self.index.sortlevel(1)[0]) - self.assertEqual(len(result), 0) + assert len(result) == 0 # raise Exception called with non-MultiIndex result = first.difference(first.values) @@ -1692,14 +1692,14 @@ def test_difference(self): # name from empty array result = first.difference([]) assert first.equals(result) - self.assertEqual(first.names, result.names) + assert first.names == result.names # name from non-empty array result = first.difference([('foo', 'one')]) expected = pd.MultiIndex.from_tuples([('bar', 'one'), ('baz', 'two'), ( 'foo', 'two'), ('qux', 'one'), ('qux', 'two')]) expected.names = first.names - self.assertEqual(first.names, result.names) + assert first.names == result.names tm.assert_raises_regex(TypeError, "other must be a MultiIndex " "or a list of tuples", first.difference, [1, 2, 3, 4, 5]) @@ -1710,7 +1710,7 @@ def test_from_tuples(self): MultiIndex.from_tuples, []) idx = MultiIndex.from_tuples(((1, 2), (3, 4)), names=['a', 'b']) - self.assertEqual(len(idx), 2) + assert len(idx) == 2 def test_argsort(self): result = self.index.argsort() @@ -1824,14 +1824,14 @@ def test_drop(self): def test_droplevel_with_names(self): index = self.index[self.index.get_loc('foo')] dropped = index.droplevel(0) - self.assertEqual(dropped.name, 'second') + assert dropped.name == 'second' index = MultiIndex(levels=[Index(lrange(4)), Index(lrange(4)), Index( lrange(4))], labels=[np.array([0, 0, 1, 2, 2, 2, 3, 3]), np.array( [0, 1, 0, 0, 0, 1, 0, 1]), np.array([1, 0, 1, 1, 0, 0, 1, 0])], names=['one', 'two', 'three']) dropped = index.droplevel(0) - self.assertEqual(dropped.names, ('two', 'three')) + assert dropped.names == ('two', 'three') dropped = index.droplevel('two') expected = index.droplevel(1) @@ -1873,7 +1873,7 @@ def test_insert(self): # key contained in all levels new_index = self.index.insert(0, ('bar', 'two')) assert new_index.equal_levels(self.index) - self.assertEqual(new_index[0], ('bar', 'two')) + assert new_index[0] == ('bar', 'two') # key not contained in all levels new_index = self.index.insert(0, ('abc', 'three')) @@ -1883,7 +1883,7 @@ def test_insert(self): exp1 = Index(list(self.index.levels[1]) + ['three'], name='second') tm.assert_index_equal(new_index.levels[1], exp1) - self.assertEqual(new_index[0], ('abc', 'three')) + assert new_index[0] == ('abc', 'three') # key wrong length msg = "Item must have length equal to number of levels" @@ -1937,7 +1937,7 @@ def test_insert(self): def test_take_preserve_name(self): taken = self.index.take([3, 0, 1]) - self.assertEqual(taken.names, self.index.names) + assert taken.names == self.index.names def test_take_fill_value(self): # GH 12631 @@ -2203,7 +2203,7 @@ def check(nlevels, with_nulls): for a in [101, 102]: mi = MultiIndex.from_arrays([[101, a], [3.5, np.nan]]) assert not mi.has_duplicates - self.assertEqual(mi.get_duplicates(), []) + assert mi.get_duplicates() == [] tm.assert_numpy_array_equal(mi.duplicated(), np.zeros( 2, dtype='bool')) @@ -2213,9 +2213,9 @@ def check(nlevels, with_nulls): lab = product(range(-1, n), range(-1, m)) mi = MultiIndex(levels=[list('abcde')[:n], list('WXYZ')[:m]], labels=np.random.permutation(list(lab)).T) - self.assertEqual(len(mi), (n + 1) * (m + 1)) + assert len(mi) == (n + 1) * (m + 1) assert not mi.has_duplicates - self.assertEqual(mi.get_duplicates(), []) + assert mi.get_duplicates() == [] tm.assert_numpy_array_equal(mi.duplicated(), np.zeros( len(mi), dtype='bool')) @@ -2228,7 +2228,7 @@ def test_duplicate_meta_data(self): index.set_names([None, 'Num']), index.set_names(['Upper', 'Num']), ]: assert idx.has_duplicates - self.assertEqual(idx.drop_duplicates().names, idx.names) + assert idx.drop_duplicates().names == idx.names def test_get_unique_index(self): idx = self.index[[0, 1, 0, 1, 1, 0, 0]] @@ -2274,7 +2274,7 @@ def test_unique_datetimelike(self): def test_tolist(self): result = self.index.tolist() exp = list(self.index.values) - self.assertEqual(result, exp) + assert result == exp def test_repr_with_unicode_data(self): with pd.core.config.option_context("display.encoding", 'UTF-8'): @@ -2294,10 +2294,8 @@ def test_repr_roundtrip(self): result = eval(repr(mi)) # string coerces to unicode tm.assert_index_equal(result, mi, exact=False) - self.assertEqual( - mi.get_level_values('first').inferred_type, 'string') - self.assertEqual( - result.get_level_values('first').inferred_type, 'unicode') + assert mi.get_level_values('first').inferred_type == 'string' + assert result.get_level_values('first').inferred_type == 'unicode' mi_u = MultiIndex.from_product( [list(u'ab'), range(3)], names=['first', 'second']) @@ -2313,7 +2311,6 @@ def test_repr_roundtrip(self): # long format mi = MultiIndex.from_product([list('abcdefg'), range(10)], names=['first', 'second']) - result = str(mi) if PY3: tm.assert_index_equal(eval(repr(mi)), mi, exact=True) @@ -2321,13 +2318,9 @@ def test_repr_roundtrip(self): result = eval(repr(mi)) # string coerces to unicode tm.assert_index_equal(result, mi, exact=False) - self.assertEqual( - mi.get_level_values('first').inferred_type, 'string') - self.assertEqual( - result.get_level_values('first').inferred_type, 'unicode') + assert mi.get_level_values('first').inferred_type == 'string' + assert result.get_level_values('first').inferred_type == 'unicode' - mi = MultiIndex.from_product( - [list(u'abcdefg'), range(10)], names=['first', 'second']) result = eval(repr(mi_u)) tm.assert_index_equal(result, mi_u, exact=True) @@ -2356,7 +2349,7 @@ def test_bytestring_with_unicode(self): def test_slice_keep_name(self): x = MultiIndex.from_tuples([('a', 'b'), (1, 2), ('c', 'd')], names=['x', 'y']) - self.assertEqual(x[1:].names, x.names) + assert x[1:].names == x.names def test_isnull_behavior(self): # should not segfault GH5123 @@ -2510,8 +2503,8 @@ def test_isin(self): # empty, return dtype bool idx = MultiIndex.from_arrays([[], []]) result = idx.isin(values) - self.assertEqual(len(result), 0) - self.assertEqual(result.dtype, np.bool_) + assert len(result) == 0 + assert result.dtype == np.bool_ def test_isin_nan(self): idx = MultiIndex.from_arrays([['foo', 'bar'], [1.0, np.nan]]) @@ -2556,39 +2549,33 @@ def test_reindex_preserves_names_when_target_is_list_or_ndarray(self): other_dtype = pd.MultiIndex.from_product([[1, 2], [3, 4]]) # list & ndarray cases - self.assertEqual(idx.reindex([])[0].names, [None, None]) - self.assertEqual(idx.reindex(np.array([]))[0].names, [None, None]) - self.assertEqual(idx.reindex(target.tolist())[0].names, [None, None]) - self.assertEqual(idx.reindex(target.values)[0].names, [None, None]) - self.assertEqual( - idx.reindex(other_dtype.tolist())[0].names, [None, None]) - self.assertEqual( - idx.reindex(other_dtype.values)[0].names, [None, None]) + assert idx.reindex([])[0].names == [None, None] + assert idx.reindex(np.array([]))[0].names == [None, None] + assert idx.reindex(target.tolist())[0].names == [None, None] + assert idx.reindex(target.values)[0].names == [None, None] + assert idx.reindex(other_dtype.tolist())[0].names == [None, None] + assert idx.reindex(other_dtype.values)[0].names == [None, None] idx.names = ['foo', 'bar'] - self.assertEqual(idx.reindex([])[0].names, ['foo', 'bar']) - self.assertEqual(idx.reindex(np.array([]))[0].names, ['foo', 'bar']) - self.assertEqual(idx.reindex(target.tolist())[0].names, ['foo', 'bar']) - self.assertEqual(idx.reindex(target.values)[0].names, ['foo', 'bar']) - self.assertEqual( - idx.reindex(other_dtype.tolist())[0].names, ['foo', 'bar']) - self.assertEqual( - idx.reindex(other_dtype.values)[0].names, ['foo', 'bar']) + assert idx.reindex([])[0].names == ['foo', 'bar'] + assert idx.reindex(np.array([]))[0].names == ['foo', 'bar'] + assert idx.reindex(target.tolist())[0].names == ['foo', 'bar'] + assert idx.reindex(target.values)[0].names == ['foo', 'bar'] + assert idx.reindex(other_dtype.tolist())[0].names == ['foo', 'bar'] + assert idx.reindex(other_dtype.values)[0].names == ['foo', 'bar'] def test_reindex_lvl_preserves_names_when_target_is_list_or_array(self): # GH7774 idx = pd.MultiIndex.from_product([[0, 1], ['a', 'b']], names=['foo', 'bar']) - self.assertEqual(idx.reindex([], level=0)[0].names, ['foo', 'bar']) - self.assertEqual(idx.reindex([], level=1)[0].names, ['foo', 'bar']) + assert idx.reindex([], level=0)[0].names == ['foo', 'bar'] + assert idx.reindex([], level=1)[0].names == ['foo', 'bar'] def test_reindex_lvl_preserves_type_if_target_is_empty_list_or_array(self): # GH7774 idx = pd.MultiIndex.from_product([[0, 1], ['a', 'b']]) - self.assertEqual(idx.reindex([], level=0)[0].levels[0].dtype.type, - np.int64) - self.assertEqual(idx.reindex([], level=1)[0].levels[1].dtype.type, - np.object_) + assert idx.reindex([], level=0)[0].levels[0].dtype.type == np.int64 + assert idx.reindex([], level=1)[0].levels[1].dtype.type == np.object_ def test_groupby(self): groups = self.index.groupby(np.array([1, 1, 1, 2, 2, 2])) @@ -2781,7 +2768,7 @@ def test_unsortedindex(self): with pytest.raises(UnsortedIndexError): df.loc(axis=0)['z', :] df.sort_index(inplace=True) - self.assertEqual(len(df.loc(axis=0)['z', :]), 2) + assert len(df.loc(axis=0)['z', :]) == 2 with pytest.raises(KeyError): df.loc(axis=0)['q', :] diff --git a/pandas/tests/indexes/test_numeric.py b/pandas/tests/indexes/test_numeric.py index 68a329a7f741f..19bca875e650d 100644 --- a/pandas/tests/indexes/test_numeric.py +++ b/pandas/tests/indexes/test_numeric.py @@ -216,15 +216,15 @@ def test_constructor(self): assert isinstance(index, Float64Index) index = Float64Index(np.array([1., 2, 3, 4, 5])) assert isinstance(index, Float64Index) - self.assertEqual(index.dtype, float) + assert index.dtype == float index = Float64Index(np.array([1., 2, 3, 4, 5]), dtype=np.float32) assert isinstance(index, Float64Index) - self.assertEqual(index.dtype, np.float64) + assert index.dtype == np.float64 index = Float64Index(np.array([1, 2, 3, 4, 5]), dtype=np.float32) assert isinstance(index, Float64Index) - self.assertEqual(index.dtype, np.float64) + assert index.dtype == np.float64 # nan handling result = Float64Index([np.nan, np.nan]) @@ -336,13 +336,13 @@ def test_get_indexer(self): def test_get_loc(self): idx = Float64Index([0.0, 1.0, 2.0]) for method in [None, 'pad', 'backfill', 'nearest']: - self.assertEqual(idx.get_loc(1, method), 1) + assert idx.get_loc(1, method) == 1 if method is not None: - self.assertEqual(idx.get_loc(1, method, tolerance=0), 1) + assert idx.get_loc(1, method, tolerance=0) == 1 for method, loc in [('pad', 1), ('backfill', 2), ('nearest', 1)]: - self.assertEqual(idx.get_loc(1.1, method), loc) - self.assertEqual(idx.get_loc(1.1, method, tolerance=0.9), loc) + assert idx.get_loc(1.1, method) == loc + assert idx.get_loc(1.1, method, tolerance=0.9) == loc pytest.raises(KeyError, idx.get_loc, 'foo') pytest.raises(KeyError, idx.get_loc, 1.5) @@ -354,21 +354,21 @@ def test_get_loc(self): def test_get_loc_na(self): idx = Float64Index([np.nan, 1, 2]) - self.assertEqual(idx.get_loc(1), 1) - self.assertEqual(idx.get_loc(np.nan), 0) + assert idx.get_loc(1) == 1 + assert idx.get_loc(np.nan) == 0 idx = Float64Index([np.nan, 1, np.nan]) - self.assertEqual(idx.get_loc(1), 1) + assert idx.get_loc(1) == 1 # representable by slice [0:2:2] # pytest.raises(KeyError, idx.slice_locs, np.nan) sliced = idx.slice_locs(np.nan) assert isinstance(sliced, tuple) - self.assertEqual(sliced, (0, 3)) + assert sliced == (0, 3) # not representable by slice idx = Float64Index([np.nan, 1, np.nan, np.nan]) - self.assertEqual(idx.get_loc(1), 1) + assert idx.get_loc(1) == 1 pytest.raises(KeyError, idx.slice_locs, np.nan) def test_contains_nans(self): @@ -400,7 +400,7 @@ def test_astype_from_object(self): index = Index([1.0, np.nan, 0.2], dtype='object') result = index.astype(float) expected = Float64Index([1.0, np.nan, 0.2]) - self.assertEqual(result.dtype, expected.dtype) + assert result.dtype == expected.dtype tm.assert_index_equal(result, expected) def test_fillna_float64(self): @@ -454,7 +454,7 @@ def test_view(self): i = self._holder([], name='Foo') i_view = i.view() - self.assertEqual(i_view.name, 'Foo') + assert i_view.name == 'Foo' i_view = i.view(self._dtype) tm.assert_index_equal(i, self._holder(i_view, name='Foo')) @@ -478,8 +478,8 @@ def test_is_monotonic(self): def test_logical_compat(self): idx = self.create_index() - self.assertEqual(idx.all(), idx.values.all()) - self.assertEqual(idx.any(), idx.values.any()) + assert idx.all() == idx.values.all() + assert idx.any() == idx.values.any() def test_identical(self): i = Index(self.index.copy()) @@ -546,12 +546,12 @@ def test_view_index(self): def test_prevent_casting(self): result = self.index.astype('O') - self.assertEqual(result.dtype, np.object_) + assert result.dtype == np.object_ def test_take_preserve_name(self): index = self._holder([1, 2, 3, 4], name='foo') taken = index.take([3, 0, 1]) - self.assertEqual(index.name, taken.name) + assert index.name == taken.name def test_take_fill_value(self): # see gh-12631 @@ -584,7 +584,7 @@ def test_take_fill_value(self): def test_slice_keep_name(self): idx = self._holder([1, 2], name='asdf') - self.assertEqual(idx.name, idx[1:].name) + assert idx.name == idx[1:].name def test_ufunc_coercions(self): idx = self._holder([1, 2, 3, 4, 5], name='x') @@ -666,7 +666,7 @@ def test_constructor(self): def test_constructor_corner(self): arr = np.array([1, 2, 3, 4], dtype=object) index = Int64Index(arr) - self.assertEqual(index.values.dtype, np.int64) + assert index.values.dtype == np.int64 tm.assert_index_equal(index, Index(arr)) # preventing casting diff --git a/pandas/tests/indexes/test_range.py b/pandas/tests/indexes/test_range.py index 49536be1aa57c..0379718b004e1 100644 --- a/pandas/tests/indexes/test_range.py +++ b/pandas/tests/indexes/test_range.py @@ -70,22 +70,22 @@ def test_constructor(self): index = RangeIndex(5) expected = np.arange(5, dtype=np.int64) assert isinstance(index, RangeIndex) - self.assertEqual(index._start, 0) - self.assertEqual(index._stop, 5) - self.assertEqual(index._step, 1) - self.assertEqual(index.name, None) + assert index._start == 0 + assert index._stop == 5 + assert index._step == 1 + assert index.name is None tm.assert_index_equal(Index(expected), index) index = RangeIndex(1, 5) expected = np.arange(1, 5, dtype=np.int64) assert isinstance(index, RangeIndex) - self.assertEqual(index._start, 1) + assert index._start == 1 tm.assert_index_equal(Index(expected), index) index = RangeIndex(1, 5, 2) expected = np.arange(1, 5, 2, dtype=np.int64) assert isinstance(index, RangeIndex) - self.assertEqual(index._step, 2) + assert index._step == 2 tm.assert_index_equal(Index(expected), index) msg = "RangeIndex\\(\\.\\.\\.\\) must be called with integers" @@ -96,9 +96,9 @@ def test_constructor(self): RangeIndex(0, 0)]: expected = np.empty(0, dtype=np.int64) assert isinstance(index, RangeIndex) - self.assertEqual(index._start, 0) - self.assertEqual(index._stop, 0) - self.assertEqual(index._step, 1) + assert index._start == 0 + assert index._stop == 0 + assert index._step == 1 tm.assert_index_equal(Index(expected), index) with tm.assert_raises_regex(TypeError, msg): @@ -109,7 +109,7 @@ def test_constructor(self): RangeIndex(stop=0, name='Foo'), RangeIndex(0, 0, name='Foo')]: assert isinstance(index, RangeIndex) - self.assertEqual(index.name, 'Foo') + assert index.name == 'Foo' # we don't allow on a bare Index pytest.raises(TypeError, lambda: Index(0, 1000)) @@ -246,7 +246,7 @@ def test_numeric_compat2(self): def test_constructor_corner(self): arr = np.array([1, 2, 3, 4], dtype=object) index = RangeIndex(1, 5) - self.assertEqual(index.values.dtype, np.int64) + assert index.values.dtype == np.int64 tm.assert_index_equal(index, Index(arr)) # non-int raise Exception @@ -261,10 +261,10 @@ def test_copy(self): i_copy = i.copy() assert i_copy is not i assert i_copy.identical(i) - self.assertEqual(i_copy._start, 0) - self.assertEqual(i_copy._stop, 5) - self.assertEqual(i_copy._step, 1) - self.assertEqual(i_copy.name, 'Foo') + assert i_copy._start == 0 + assert i_copy._stop == 5 + assert i_copy._step == 1 + assert i_copy.name == 'Foo' def test_repr(self): i = RangeIndex(5, name='Foo') @@ -281,7 +281,7 @@ def test_repr(self): i = RangeIndex(5, 0, -1) result = repr(i) expected = "RangeIndex(start=5, stop=0, step=-1)" - self.assertEqual(result, expected) + assert result == expected result = eval(result) tm.assert_index_equal(result, i, exact=True) @@ -300,12 +300,12 @@ def test_delete(self): expected = idx[1:].astype(int) result = idx.delete(0) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) + assert result.name == expected.name expected = idx[:-1].astype(int) result = idx.delete(-1) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) + assert result.name == expected.name with pytest.raises((IndexError, ValueError)): # either depending on numpy version @@ -316,7 +316,7 @@ def test_view(self): i = RangeIndex(0, name='Foo') i_view = i.view() - self.assertEqual(i_view.name, 'Foo') + assert i_view.name == 'Foo' i_view = i.view('i8') tm.assert_numpy_array_equal(i.values, i_view) @@ -325,7 +325,7 @@ def test_view(self): tm.assert_index_equal(i, i_view) def test_dtype(self): - self.assertEqual(self.index.dtype, np.int64) + assert self.index.dtype == np.int64 def test_is_monotonic(self): assert self.index.is_monotonic @@ -362,8 +362,8 @@ def test_equals_range(self): def test_logical_compat(self): idx = self.create_index() - self.assertEqual(idx.all(), idx.values.all()) - self.assertEqual(idx.any(), idx.values.any()) + assert idx.all() == idx.values.all() + assert idx.any() == idx.values.any() def test_identical(self): i = Index(self.index.copy()) @@ -636,7 +636,7 @@ def test_intersect_str_dates(self): i2 = Index(['aa'], dtype=object) res = i2.intersection(i1) - self.assertEqual(len(res), 0) + assert len(res) == 0 def test_union_noncomparable(self): from datetime import datetime, timedelta @@ -692,7 +692,7 @@ def test_nbytes(self): # constant memory usage i2 = RangeIndex(0, 10) - self.assertEqual(i.nbytes, i2.nbytes) + assert i.nbytes == i2.nbytes def test_cant_or_shouldnt_cast(self): # can't @@ -706,12 +706,12 @@ def test_view_Index(self): def test_prevent_casting(self): result = self.index.astype('O') - self.assertEqual(result.dtype, np.object_) + assert result.dtype == np.object_ def test_take_preserve_name(self): index = RangeIndex(1, 5, name='foo') taken = index.take([3, 0, 1]) - self.assertEqual(index.name, taken.name) + assert index.name == taken.name def test_take_fill_value(self): # GH 12631 @@ -751,7 +751,7 @@ def test_repr_roundtrip(self): def test_slice_keep_name(self): idx = RangeIndex(1, 2, name='asdf') - self.assertEqual(idx.name, idx[1:].name) + assert idx.name == idx[1:].name def test_explicit_conversions(self): @@ -794,48 +794,48 @@ def test_ufunc_compat(self): def test_extended_gcd(self): result = self.index._extended_gcd(6, 10) - self.assertEqual(result[0], result[1] * 6 + result[2] * 10) - self.assertEqual(2, result[0]) + assert result[0] == result[1] * 6 + result[2] * 10 + assert 2 == result[0] result = self.index._extended_gcd(10, 6) - self.assertEqual(2, result[1] * 10 + result[2] * 6) - self.assertEqual(2, result[0]) + assert 2 == result[1] * 10 + result[2] * 6 + assert 2 == result[0] def test_min_fitting_element(self): result = RangeIndex(0, 20, 2)._min_fitting_element(1) - self.assertEqual(2, result) + assert 2 == result result = RangeIndex(1, 6)._min_fitting_element(1) - self.assertEqual(1, result) + assert 1 == result result = RangeIndex(18, -2, -2)._min_fitting_element(1) - self.assertEqual(2, result) + assert 2 == result result = RangeIndex(5, 0, -1)._min_fitting_element(1) - self.assertEqual(1, result) + assert 1 == result big_num = 500000000000000000000000 result = RangeIndex(5, big_num * 2, 1)._min_fitting_element(big_num) - self.assertEqual(big_num, result) + assert big_num == result def test_max_fitting_element(self): result = RangeIndex(0, 20, 2)._max_fitting_element(17) - self.assertEqual(16, result) + assert 16 == result result = RangeIndex(1, 6)._max_fitting_element(4) - self.assertEqual(4, result) + assert 4 == result result = RangeIndex(18, -2, -2)._max_fitting_element(17) - self.assertEqual(16, result) + assert 16 == result result = RangeIndex(5, 0, -1)._max_fitting_element(4) - self.assertEqual(4, result) + assert 4 == result big_num = 500000000000000000000000 result = RangeIndex(5, big_num * 2, 1)._max_fitting_element(big_num) - self.assertEqual(big_num, result) + assert big_num == result def test_pickle_compat_construction(self): # RangeIndex() is a valid constructor @@ -846,11 +846,11 @@ def test_slice_specialised(self): # scalar indexing res = self.index[1] expected = 2 - self.assertEqual(res, expected) + assert res == expected res = self.index[-1] expected = 18 - self.assertEqual(res, expected) + assert res == expected # slicing # slice value completion @@ -903,19 +903,19 @@ def test_len_specialised(self): arr = np.arange(0, 5, step) i = RangeIndex(0, 5, step) - self.assertEqual(len(i), len(arr)) + assert len(i) == len(arr) i = RangeIndex(5, 0, step) - self.assertEqual(len(i), 0) + assert len(i) == 0 for step in np.arange(-6, -1, 1): arr = np.arange(5, 0, step) i = RangeIndex(5, 0, step) - self.assertEqual(len(i), len(arr)) + assert len(i) == len(arr) i = RangeIndex(0, 5, step) - self.assertEqual(len(i), 0) + assert len(i) == 0 def test_where(self): i = self.create_index() diff --git a/pandas/tests/indexes/timedeltas/test_construction.py b/pandas/tests/indexes/timedeltas/test_construction.py index 6681a03a3b271..bdaa62c5ce221 100644 --- a/pandas/tests/indexes/timedeltas/test_construction.py +++ b/pandas/tests/indexes/timedeltas/test_construction.py @@ -81,8 +81,8 @@ def test_constructor_coverage(self): def test_constructor_name(self): idx = TimedeltaIndex(start='1 days', periods=1, freq='D', name='TEST') - self.assertEqual(idx.name, 'TEST') + assert idx.name == 'TEST' # GH10025 idx2 = TimedeltaIndex(idx, name='something else') - self.assertEqual(idx2.name, 'something else') + assert idx2.name == 'something else' diff --git a/pandas/tests/indexes/timedeltas/test_indexing.py b/pandas/tests/indexes/timedeltas/test_indexing.py index 58b83dde5f402..6ffe3516c4a94 100644 --- a/pandas/tests/indexes/timedeltas/test_indexing.py +++ b/pandas/tests/indexes/timedeltas/test_indexing.py @@ -76,8 +76,8 @@ def test_delete(self): for n, expected in compat.iteritems(cases): result = idx.delete(n) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) + assert result.name == expected.name + assert result.freq == expected.freq with pytest.raises((IndexError, ValueError)): # either depeidnig on numpy version @@ -103,10 +103,10 @@ def test_delete_slice(self): for n, expected in compat.iteritems(cases): result = idx.delete(n) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) + assert result.name == expected.name + assert result.freq == expected.freq result = idx.delete(slice(n[0], n[-1] + 1)) tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(result.freq, expected.freq) + assert result.name == expected.name + assert result.freq == expected.freq diff --git a/pandas/tests/indexes/timedeltas/test_ops.py b/pandas/tests/indexes/timedeltas/test_ops.py index feaec50264872..474dd283530c5 100644 --- a/pandas/tests/indexes/timedeltas/test_ops.py +++ b/pandas/tests/indexes/timedeltas/test_ops.py @@ -35,10 +35,10 @@ def test_asobject_tolist(self): result = idx.asobject assert isinstance(result, Index) - self.assertEqual(result.dtype, object) + assert result.dtype == object tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(idx.tolist(), expected_list) + assert result.name == expected.name + assert idx.tolist() == expected_list idx = TimedeltaIndex([timedelta(days=1), timedelta(days=2), pd.NaT, timedelta(days=4)], name='idx') @@ -47,10 +47,10 @@ def test_asobject_tolist(self): expected = pd.Index(expected_list, dtype=object, name='idx') result = idx.asobject assert isinstance(result, Index) - self.assertEqual(result.dtype, object) + assert result.dtype == object tm.assert_index_equal(result, expected) - self.assertEqual(result.name, expected.name) - self.assertEqual(idx.tolist(), expected_list) + assert result.name == expected.name + assert idx.tolist() == expected_list def test_minmax(self): @@ -63,10 +63,10 @@ def test_minmax(self): assert not idx2.is_monotonic for idx in [idx1, idx2]: - self.assertEqual(idx.min(), Timedelta('1 days')), - self.assertEqual(idx.max(), Timedelta('3 days')), - self.assertEqual(idx.argmin(), 0) - self.assertEqual(idx.argmax(), 2) + assert idx.min() == Timedelta('1 days') + assert idx.max() == Timedelta('3 days') + assert idx.argmin() == 0 + assert idx.argmax() == 2 for op in ['min', 'max']: # Return NaT @@ -83,15 +83,15 @@ def test_numpy_minmax(self): dr = pd.date_range(start='2016-01-15', end='2016-01-20') td = TimedeltaIndex(np.asarray(dr)) - self.assertEqual(np.min(td), Timedelta('16815 days')) - self.assertEqual(np.max(td), Timedelta('16820 days')) + assert np.min(td) == Timedelta('16815 days') + assert np.max(td) == Timedelta('16820 days') errmsg = "the 'out' parameter is not supported" tm.assert_raises_regex(ValueError, errmsg, np.min, td, out=0) tm.assert_raises_regex(ValueError, errmsg, np.max, td, out=0) - self.assertEqual(np.argmin(td), 0) - self.assertEqual(np.argmax(td), 5) + assert np.argmin(td) == 0 + assert np.argmax(td) == 5 if not _np_version_under1p10: errmsg = "the 'out' parameter is not supported" @@ -114,7 +114,7 @@ def test_round(self): expected_elt = expected_rng[1] tm.assert_index_equal(td.round(freq='H'), expected_rng) - self.assertEqual(elt.round(freq='H'), expected_elt) + assert elt.round(freq='H') == expected_elt msg = pd.tseries.frequencies._INVALID_FREQ_ERROR with tm.assert_raises_regex(ValueError, msg): @@ -152,7 +152,7 @@ def test_representation(self): [exp1, exp2, exp3, exp4, exp5]): for func in ['__repr__', '__unicode__', '__str__']: result = getattr(idx, func)() - self.assertEqual(result, expected) + assert result == expected def test_representation_to_series(self): idx1 = TimedeltaIndex([], freq='D') @@ -184,7 +184,7 @@ def test_representation_to_series(self): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = repr(pd.Series(idx)) - self.assertEqual(result, expected) + assert result == expected def test_summary(self): # GH9116 @@ -212,7 +212,7 @@ def test_summary(self): for idx, expected in zip([idx1, idx2, idx3, idx4, idx5], [exp1, exp2, exp3, exp4, exp5]): result = idx.summary() - self.assertEqual(result, expected) + assert result == expected def test_add_iadd(self): @@ -355,7 +355,7 @@ def test_subtraction_ops_with_tz(self): td = Timedelta('1 days') def _check(result, expected): - self.assertEqual(result, expected) + assert result == expected assert isinstance(result, Timedelta) # scalars @@ -491,11 +491,11 @@ def test_addition_ops(self): result = dt + td expected = Timestamp('20130102') - self.assertEqual(result, expected) + assert result == expected result = td + dt expected = Timestamp('20130102') - self.assertEqual(result, expected) + assert result == expected def test_comp_nat(self): left = pd.TimedeltaIndex([pd.Timedelta('1 days'), pd.NaT, @@ -582,25 +582,25 @@ def test_order(self): for idx in [idx1, idx2]: ordered = idx.sort_values() tm.assert_index_equal(ordered, idx) - self.assertEqual(ordered.freq, idx.freq) + assert ordered.freq == idx.freq ordered = idx.sort_values(ascending=False) expected = idx[::-1] tm.assert_index_equal(ordered, expected) - self.assertEqual(ordered.freq, expected.freq) - self.assertEqual(ordered.freq.n, -1) + assert ordered.freq == expected.freq + assert ordered.freq.n == -1 ordered, indexer = idx.sort_values(return_indexer=True) tm.assert_index_equal(ordered, idx) tm.assert_numpy_array_equal(indexer, np.array([0, 1, 2]), check_dtype=False) - self.assertEqual(ordered.freq, idx.freq) + assert ordered.freq == idx.freq ordered, indexer = idx.sort_values(return_indexer=True, ascending=False) tm.assert_index_equal(ordered, idx[::-1]) - self.assertEqual(ordered.freq, expected.freq) - self.assertEqual(ordered.freq.n, -1) + assert ordered.freq == expected.freq + assert ordered.freq.n == -1 idx1 = TimedeltaIndex(['1 hour', '3 hour', '5 hour', '2 hour ', '1 hour'], name='idx1') @@ -648,39 +648,39 @@ def test_getitem(self): for idx in [idx1]: result = idx[0] - self.assertEqual(result, pd.Timedelta('1 day')) + assert result == pd.Timedelta('1 day') result = idx[0:5] expected = pd.timedelta_range('1 day', '5 day', freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx[0:10:2] expected = pd.timedelta_range('1 day', '9 day', freq='2D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx[-20:-5:3] expected = pd.timedelta_range('12 day', '24 day', freq='3D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx[4::-1] expected = TimedeltaIndex(['5 day', '4 day', '3 day', '2 day', '1 day'], freq='-1D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq def test_drop_duplicates_metadata(self): # GH 10115 idx = pd.timedelta_range('1 day', '31 day', freq='D', name='idx') result = idx.drop_duplicates() tm.assert_index_equal(idx, result) - self.assertEqual(idx.freq, result.freq) + assert idx.freq == result.freq idx_dup = idx.append(idx) assert idx_dup.freq is None # freq is reset @@ -715,28 +715,28 @@ def test_take(self): for idx in [idx1]: result = idx.take([0]) - self.assertEqual(result, pd.Timedelta('1 day')) + assert result == pd.Timedelta('1 day') result = idx.take([-1]) - self.assertEqual(result, pd.Timedelta('31 day')) + assert result == pd.Timedelta('31 day') result = idx.take([0, 1, 2]) expected = pd.timedelta_range('1 day', '3 day', freq='D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx.take([0, 2, 4]) expected = pd.timedelta_range('1 day', '5 day', freq='2D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx.take([7, 4, 1]) expected = pd.timedelta_range('8 day', '2 day', freq='-3D', name='idx') tm.assert_index_equal(result, expected) - self.assertEqual(result.freq, expected.freq) + assert result.freq == expected.freq result = idx.take([3, 2, 5]) expected = TimedeltaIndex(['4 day', '3 day', '6 day'], name='idx') @@ -771,7 +771,7 @@ def test_infer_freq(self): idx = pd.timedelta_range('1', freq=freq, periods=10) result = pd.TimedeltaIndex(idx.asi8, freq='infer') tm.assert_index_equal(idx, result) - self.assertEqual(result.freq, freq) + assert result.freq == freq def test_nat_new(self): @@ -867,27 +867,27 @@ class TestTimedeltas(tm.TestCase): def test_ops(self): td = Timedelta(10, unit='d') - self.assertEqual(-td, Timedelta(-10, unit='d')) - self.assertEqual(+td, Timedelta(10, unit='d')) - self.assertEqual(td - td, Timedelta(0, unit='ns')) + assert -td == Timedelta(-10, unit='d') + assert +td == Timedelta(10, unit='d') + assert td - td == Timedelta(0, unit='ns') assert (td - pd.NaT) is pd.NaT - self.assertEqual(td + td, Timedelta(20, unit='d')) + assert td + td == Timedelta(20, unit='d') assert (td + pd.NaT) is pd.NaT - self.assertEqual(td * 2, Timedelta(20, unit='d')) + assert td * 2 == Timedelta(20, unit='d') assert (td * pd.NaT) is pd.NaT - self.assertEqual(td / 2, Timedelta(5, unit='d')) - self.assertEqual(td // 2, Timedelta(5, unit='d')) - self.assertEqual(abs(td), td) - self.assertEqual(abs(-td), td) - self.assertEqual(td / td, 1) + assert td / 2 == Timedelta(5, unit='d') + assert td // 2 == Timedelta(5, unit='d') + assert abs(td) == td + assert abs(-td) == td + assert td / td == 1 assert (td / pd.NaT) is np.nan assert (td // pd.NaT) is np.nan # invert - self.assertEqual(-td, Timedelta('-10d')) - self.assertEqual(td * -1, Timedelta('-10d')) - self.assertEqual(-1 * td, Timedelta('-10d')) - self.assertEqual(abs(-td), Timedelta('10d')) + assert -td == Timedelta('-10d') + assert td * -1 == Timedelta('-10d') + assert -1 * td == Timedelta('-10d') + assert abs(-td) == Timedelta('10d') # invalid multiply with another timedelta pytest.raises(TypeError, lambda: td * td) @@ -898,12 +898,12 @@ def test_ops(self): def test_ops_offsets(self): td = Timedelta(10, unit='d') - self.assertEqual(Timedelta(241, unit='h'), td + pd.offsets.Hour(1)) - self.assertEqual(Timedelta(241, unit='h'), pd.offsets.Hour(1) + td) - self.assertEqual(240, td / pd.offsets.Hour(1)) - self.assertEqual(1 / 240.0, pd.offsets.Hour(1) / td) - self.assertEqual(Timedelta(239, unit='h'), td - pd.offsets.Hour(1)) - self.assertEqual(Timedelta(-239, unit='h'), pd.offsets.Hour(1) - td) + assert Timedelta(241, unit='h') == td + pd.offsets.Hour(1) + assert Timedelta(241, unit='h') == pd.offsets.Hour(1) + td + assert 240 == td / pd.offsets.Hour(1) + assert 1 / 240.0 == pd.offsets.Hour(1) / td + assert Timedelta(239, unit='h') == td - pd.offsets.Hour(1) + assert Timedelta(-239, unit='h') == pd.offsets.Hour(1) - td def test_ops_ndarray(self): td = Timedelta('1 day') @@ -961,7 +961,7 @@ def test_ops_series_object(self): s = pd.Series([pd.Timestamp('2015-01-01', tz='US/Eastern'), pd.Timestamp('2015-01-01', tz='Asia/Tokyo')], name='xxx') - self.assertEqual(s.dtype, object) + assert s.dtype == object exp = pd.Series([pd.Timestamp('2015-01-02', tz='US/Eastern'), pd.Timestamp('2015-01-02', tz='Asia/Tokyo')], @@ -973,7 +973,7 @@ def test_ops_series_object(self): s2 = pd.Series([pd.Timestamp('2015-01-03', tz='US/Eastern'), pd.Timestamp('2015-01-05', tz='Asia/Tokyo')], name='xxx') - self.assertEqual(s2.dtype, object) + assert s2.dtype == object exp = pd.Series([pd.Timedelta('2 days'), pd.Timedelta('4 days')], name='xxx') tm.assert_series_equal(s2 - s, exp) @@ -981,7 +981,7 @@ def test_ops_series_object(self): s = pd.Series([pd.Timedelta('01:00:00'), pd.Timedelta('02:00:00')], name='xxx', dtype=object) - self.assertEqual(s.dtype, object) + assert s.dtype == object exp = pd.Series([pd.Timedelta('01:30:00'), pd.Timedelta('02:30:00')], name='xxx') @@ -1027,38 +1027,38 @@ def test_timedelta_ops(self): result = td.mean() expected = to_timedelta(timedelta(seconds=9)) - self.assertEqual(result, expected) + assert result == expected result = td.to_frame().mean() - self.assertEqual(result[0], expected) + assert result[0] == expected result = td.quantile(.1) expected = Timedelta(np.timedelta64(2600, 'ms')) - self.assertEqual(result, expected) + assert result == expected result = td.median() expected = to_timedelta('00:00:09') - self.assertEqual(result, expected) + assert result == expected result = td.to_frame().median() - self.assertEqual(result[0], expected) + assert result[0] == expected # GH 6462 # consistency in returned values for sum result = td.sum() expected = to_timedelta('00:01:21') - self.assertEqual(result, expected) + assert result == expected result = td.to_frame().sum() - self.assertEqual(result[0], expected) + assert result[0] == expected # std result = td.std() expected = to_timedelta(Series(td.dropna().values).std()) - self.assertEqual(result, expected) + assert result == expected result = td.to_frame().std() - self.assertEqual(result[0], expected) + assert result[0] == expected # invalid ops for op in ['skew', 'kurt', 'sem', 'prod']: @@ -1067,11 +1067,11 @@ def test_timedelta_ops(self): # GH 10040 # make sure NaT is properly handled by median() s = Series([Timestamp('2015-02-03'), Timestamp('2015-02-07')]) - self.assertEqual(s.diff().median(), timedelta(days=4)) + assert s.diff().median() == timedelta(days=4) s = Series([Timestamp('2015-02-03'), Timestamp('2015-02-07'), Timestamp('2015-02-15')]) - self.assertEqual(s.diff().median(), timedelta(days=6)) + assert s.diff().median() == timedelta(days=6) def test_timedelta_ops_scalar(self): # GH 6808 @@ -1084,10 +1084,10 @@ def test_timedelta_ops_scalar(self): np.timedelta64(10000000000, 'ns'), pd.offsets.Second(10)]: result = base + offset - self.assertEqual(result, expected_add) + assert result == expected_add result = base - offset - self.assertEqual(result, expected_sub) + assert result == expected_sub base = pd.to_datetime('20130102 09:01:12.123456') expected_add = pd.to_datetime('20130103 09:01:22.123456') @@ -1099,10 +1099,10 @@ def test_timedelta_ops_scalar(self): np.timedelta64(1, 'D') + np.timedelta64(10, 's'), pd.offsets.Day() + pd.offsets.Second(10)]: result = base + offset - self.assertEqual(result, expected_add) + assert result == expected_add result = base - offset - self.assertEqual(result, expected_sub) + assert result == expected_sub def test_timedelta_ops_with_missing_values(self): # setup @@ -1118,9 +1118,9 @@ def test_timedelta_ops_with_missing_values(self): NA = np.nan actual = scalar1 + scalar1 - self.assertEqual(actual, scalar2) + assert actual == scalar2 actual = scalar2 - scalar1 - self.assertEqual(actual, scalar1) + assert actual == scalar1 actual = s1 + s1 assert_series_equal(actual, s2) @@ -1217,27 +1217,27 @@ def test_tdi_ops_attributes(self): result = rng + 1 exp = timedelta_range('4 days', periods=5, freq='2D', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, '2D') + assert result.freq == '2D' result = rng - 2 exp = timedelta_range('-2 days', periods=5, freq='2D', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, '2D') + assert result.freq == '2D' result = rng * 2 exp = timedelta_range('4 days', periods=5, freq='4D', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, '4D') + assert result.freq == '4D' result = rng / 2 exp = timedelta_range('1 days', periods=5, freq='D', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, 'D') + assert result.freq == 'D' result = -rng exp = timedelta_range('-2 days', periods=5, freq='-2D', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, '-2D') + assert result.freq == '-2D' rng = pd.timedelta_range('-2 days', periods=5, freq='D', name='x') @@ -1245,7 +1245,7 @@ def test_tdi_ops_attributes(self): exp = TimedeltaIndex(['2 days', '1 days', '0 days', '1 days', '2 days'], name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, None) + assert result.freq is None def test_add_overflow(self): # see gh-14068 diff --git a/pandas/tests/indexes/timedeltas/test_partial_slicing.py b/pandas/tests/indexes/timedeltas/test_partial_slicing.py index 230dbe91b4e34..5e6e1440a7c04 100644 --- a/pandas/tests/indexes/timedeltas/test_partial_slicing.py +++ b/pandas/tests/indexes/timedeltas/test_partial_slicing.py @@ -27,7 +27,7 @@ def test_partial_slice(self): assert_series_equal(result, expected) result = s['6 days, 23:11:12'] - self.assertEqual(result, s.iloc[133]) + assert result == s.iloc[133] pytest.raises(KeyError, s.__getitem__, '50 days') @@ -46,7 +46,7 @@ def test_partial_slice_high_reso(self): assert_series_equal(result, expected) result = s['1 days, 10:11:12.001001'] - self.assertEqual(result, s.iloc[1001]) + assert result == s.iloc[1001] def test_slice_with_negative_step(self): ts = Series(np.arange(20), timedelta_range('0', periods=20, freq='H')) diff --git a/pandas/tests/indexes/timedeltas/test_setops.py b/pandas/tests/indexes/timedeltas/test_setops.py index 45900788f7bda..8779f6d49cdd5 100644 --- a/pandas/tests/indexes/timedeltas/test_setops.py +++ b/pandas/tests/indexes/timedeltas/test_setops.py @@ -30,7 +30,7 @@ def test_union_coverage(self): result = ordered[:0].union(ordered) tm.assert_index_equal(result, ordered) - self.assertEqual(result.freq, ordered.freq) + assert result.freq == ordered.freq def test_union_bug_1730(self): @@ -66,7 +66,7 @@ def test_intersection_bug_1708(self): index_2 = index_1 + pd.offsets.Hour(5) result = index_1 & index_2 - self.assertEqual(len(result), 0) + assert len(result) == 0 index_1 = timedelta_range('1 day', periods=4, freq='h') index_2 = index_1 + pd.offsets.Hour(1) diff --git a/pandas/tests/indexes/timedeltas/test_timedelta.py b/pandas/tests/indexes/timedeltas/test_timedelta.py index 8a327d2ecb08f..d1379973dfec5 100644 --- a/pandas/tests/indexes/timedeltas/test_timedelta.py +++ b/pandas/tests/indexes/timedeltas/test_timedelta.py @@ -49,29 +49,30 @@ def test_get_loc(self): idx = pd.to_timedelta(['0 days', '1 days', '2 days']) for method in [None, 'pad', 'backfill', 'nearest']: - self.assertEqual(idx.get_loc(idx[1], method), 1) - self.assertEqual(idx.get_loc(idx[1].to_pytimedelta(), method), 1) - self.assertEqual(idx.get_loc(str(idx[1]), method), 1) + assert idx.get_loc(idx[1], method) == 1 + assert idx.get_loc(idx[1].to_pytimedelta(), method) == 1 + assert idx.get_loc(str(idx[1]), method) == 1 - self.assertEqual( - idx.get_loc(idx[1], 'pad', tolerance=pd.Timedelta(0)), 1) - self.assertEqual( - idx.get_loc(idx[1], 'pad', tolerance=np.timedelta64(0, 's')), 1) - self.assertEqual(idx.get_loc(idx[1], 'pad', tolerance=timedelta(0)), 1) + assert idx.get_loc(idx[1], 'pad', + tolerance=pd.Timedelta(0)) == 1 + assert idx.get_loc(idx[1], 'pad', + tolerance=np.timedelta64(0, 's')) == 1 + assert idx.get_loc(idx[1], 'pad', + tolerance=timedelta(0)) == 1 with tm.assert_raises_regex(ValueError, 'must be convertible'): idx.get_loc(idx[1], method='nearest', tolerance='foo') for method, loc in [('pad', 1), ('backfill', 2), ('nearest', 1)]: - self.assertEqual(idx.get_loc('1 day 1 hour', method), loc) + assert idx.get_loc('1 day 1 hour', method) == loc def test_get_loc_nat(self): tidx = TimedeltaIndex(['1 days 01:00:00', 'NaT', '2 days 01:00:00']) - self.assertEqual(tidx.get_loc(pd.NaT), 1) - self.assertEqual(tidx.get_loc(None), 1) - self.assertEqual(tidx.get_loc(float('nan')), 1) - self.assertEqual(tidx.get_loc(np.nan), 1) + assert tidx.get_loc(pd.NaT) == 1 + assert tidx.get_loc(None) == 1 + assert tidx.get_loc(float('nan')) == 1 + assert tidx.get_loc(np.nan) == 1 def test_get_indexer(self): idx = pd.to_timedelta(['0 days', '1 days', '2 days']) @@ -138,14 +139,14 @@ def test_ufunc_coercions(self): exp = TimedeltaIndex(['4H', '8H', '12H', '16H', '20H'], freq='4H', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, '4H') + assert result.freq == '4H' for result in [idx / 2, np.divide(idx, 2)]: assert isinstance(result, TimedeltaIndex) exp = TimedeltaIndex(['1H', '2H', '3H', '4H', '5H'], freq='H', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, 'H') + assert result.freq == 'H' idx = TimedeltaIndex(['2H', '4H', '6H', '8H', '10H'], freq='2H', name='x') @@ -154,7 +155,7 @@ def test_ufunc_coercions(self): exp = TimedeltaIndex(['-2H', '-4H', '-6H', '-8H', '-10H'], freq='-2H', name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, '-2H') + assert result.freq == '-2H' idx = TimedeltaIndex(['-2H', '-1H', '0H', '1H', '2H'], freq='H', name='x') @@ -163,7 +164,7 @@ def test_ufunc_coercions(self): exp = TimedeltaIndex(['2H', '1H', '0H', '1H', '2H'], freq=None, name='x') tm.assert_index_equal(result, exp) - self.assertEqual(result.freq, None) + assert result.freq is None def test_fillna_timedelta(self): # GH 11343 @@ -209,7 +210,7 @@ def test_take(self): tm.assert_index_equal(taken, expected) assert isinstance(taken, TimedeltaIndex) assert taken.freq is None - self.assertEqual(taken.name, expected.name) + assert taken.name == expected.name def test_take_fill_value(self): # GH 12631 @@ -289,7 +290,7 @@ def test_slice_keeps_name(self): # GH4226 dr = pd.timedelta_range('1d', '5d', freq='H', name='timebucket') - self.assertEqual(dr[1:].name, dr.name) + assert dr[1:].name == dr.name def test_does_not_convert_mixed_integer(self): df = tm.makeCustomDataframe(10, 10, @@ -299,8 +300,8 @@ def test_does_not_convert_mixed_integer(self): cols = df.columns.join(df.index, how='outer') joined = cols.join(df.columns) - self.assertEqual(cols.dtype, np.dtype('O')) - self.assertEqual(cols.dtype, joined.dtype) + assert cols.dtype == np.dtype('O') + assert cols.dtype == joined.dtype tm.assert_index_equal(cols, joined) def test_sort_values(self): @@ -336,8 +337,8 @@ def test_get_duplicates(self): def test_argmin_argmax(self): idx = TimedeltaIndex(['1 day 00:00:05', '1 day 00:00:01', '1 day 00:00:02']) - self.assertEqual(idx.argmin(), 1) - self.assertEqual(idx.argmax(), 0) + assert idx.argmin() == 1 + assert idx.argmax() == 0 def test_misc_coverage(self): @@ -570,8 +571,8 @@ def test_timedelta(self): shifted = index + timedelta(1) back = shifted + timedelta(-1) assert tm.equalContents(index, back) - self.assertEqual(shifted.freq, index.freq) - self.assertEqual(shifted.freq, back.freq) + assert shifted.freq == index.freq + assert shifted.freq == back.freq result = index - timedelta(1) expected = index + timedelta(-1) diff --git a/pandas/tests/indexes/timedeltas/test_tools.py b/pandas/tests/indexes/timedeltas/test_tools.py index d69f78bfd73b1..faee627488dc0 100644 --- a/pandas/tests/indexes/timedeltas/test_tools.py +++ b/pandas/tests/indexes/timedeltas/test_tools.py @@ -20,16 +20,15 @@ def conv(v): d1 = np.timedelta64(1, 'D') - self.assertEqual(to_timedelta('1 days 06:05:01.00003', box=False), - conv(d1 + np.timedelta64(6 * 3600 + - 5 * 60 + 1, 's') + - np.timedelta64(30, 'us'))) - self.assertEqual(to_timedelta('15.5us', box=False), - conv(np.timedelta64(15500, 'ns'))) + assert (to_timedelta('1 days 06:05:01.00003', box=False) == + conv(d1 + np.timedelta64(6 * 3600 + 5 * 60 + 1, 's') + + np.timedelta64(30, 'us'))) + assert (to_timedelta('15.5us', box=False) == + conv(np.timedelta64(15500, 'ns'))) # empty string result = to_timedelta('', box=False) - self.assertEqual(result.astype('int64'), iNaT) + assert result.astype('int64') == iNaT result = to_timedelta(['', '']) assert isnull(result).all() @@ -42,7 +41,7 @@ def conv(v): # ints result = np.timedelta64(0, 'ns') expected = to_timedelta(0, box=False) - self.assertEqual(result, expected) + assert result == expected # Series expected = Series([timedelta(days=1), timedelta(days=1, seconds=1)]) @@ -59,12 +58,12 @@ def conv(v): v = timedelta(seconds=1) result = to_timedelta(v, box=False) expected = np.timedelta64(timedelta(seconds=1)) - self.assertEqual(result, expected) + assert result == expected v = np.timedelta64(timedelta(seconds=1)) result = to_timedelta(v, box=False) expected = np.timedelta64(timedelta(seconds=1)) - self.assertEqual(result, expected) + assert result == expected # arrays of various dtypes arr = np.array([1] * 5, dtype='int64') @@ -134,8 +133,7 @@ def test_to_timedelta_invalid(self): # gh-13613: these should not error because errors='ignore' invalid_data = 'apple' - self.assertEqual(invalid_data, to_timedelta( - invalid_data, errors='ignore')) + assert invalid_data == to_timedelta(invalid_data, errors='ignore') invalid_data = ['apple', '1 days'] tm.assert_numpy_array_equal( @@ -172,32 +170,32 @@ def test_to_timedelta_on_missing_values(self): assert_series_equal(actual, expected) actual = pd.to_timedelta(np.nan) - self.assertEqual(actual.value, timedelta_NaT.astype('int64')) + assert actual.value == timedelta_NaT.astype('int64') actual = pd.to_timedelta(pd.NaT) - self.assertEqual(actual.value, timedelta_NaT.astype('int64')) + assert actual.value == timedelta_NaT.astype('int64') def test_to_timedelta_on_nanoseconds(self): # GH 9273 result = Timedelta(nanoseconds=100) expected = Timedelta('100ns') - self.assertEqual(result, expected) + assert result == expected result = Timedelta(days=1, hours=1, minutes=1, weeks=1, seconds=1, milliseconds=1, microseconds=1, nanoseconds=1) expected = Timedelta(694861001001001) - self.assertEqual(result, expected) + assert result == expected result = Timedelta(microseconds=1) + Timedelta(nanoseconds=1) expected = Timedelta('1us1ns') - self.assertEqual(result, expected) + assert result == expected result = Timedelta(microseconds=1) - Timedelta(nanoseconds=1) expected = Timedelta('999ns') - self.assertEqual(result, expected) + assert result == expected result = Timedelta(microseconds=1) + 5 * Timedelta(nanoseconds=-2) expected = Timedelta('990ns') - self.assertEqual(result, expected) + assert result == expected pytest.raises(TypeError, lambda: Timedelta(nanoseconds='abc')) diff --git a/pandas/tests/indexing/common.py b/pandas/tests/indexing/common.py index b555a9c1fd0df..bd5b7f45a6f4c 100644 --- a/pandas/tests/indexing/common.py +++ b/pandas/tests/indexing/common.py @@ -201,7 +201,7 @@ def _print(result, error=None): try: if is_scalar(rs) and is_scalar(xp): - self.assertEqual(rs, xp) + assert rs == xp elif xp.ndim == 1: tm.assert_series_equal(rs, xp) elif xp.ndim == 2: diff --git a/pandas/tests/indexing/test_callable.py b/pandas/tests/indexing/test_callable.py index 1d70205076b86..727c87ac90872 100644 --- a/pandas/tests/indexing/test_callable.py +++ b/pandas/tests/indexing/test_callable.py @@ -59,10 +59,10 @@ def test_frame_loc_ix_callable(self): # scalar res = df.loc[lambda x: 1, lambda x: 'A'] - self.assertEqual(res, df.loc[1, 'A']) + assert res == df.loc[1, 'A'] res = df.loc[lambda x: 1, lambda x: 'A'] - self.assertEqual(res, df.loc[1, 'A']) + assert res == df.loc[1, 'A'] def test_frame_loc_ix_callable_mixture(self): # GH 11485 diff --git a/pandas/tests/indexing/test_chaining_and_caching.py b/pandas/tests/indexing/test_chaining_and_caching.py index b776d3c2d08ea..c39876a8c6e44 100644 --- a/pandas/tests/indexing/test_chaining_and_caching.py +++ b/pandas/tests/indexing/test_chaining_and_caching.py @@ -50,8 +50,8 @@ def test_setitem_cache_updating(self): # set it df.loc[7, 'c'] = 1 - self.assertEqual(df.loc[0, 'c'], 0.0) - self.assertEqual(df.loc[7, 'c'], 1.0) + assert df.loc[0, 'c'] == 0.0 + assert df.loc[7, 'c'] == 1.0 # GH 7084 # not updating cache on series setting with slices @@ -395,12 +395,12 @@ def test_cache_updating(self): # but actually works, since everything is a view df.loc[0]['z'].iloc[0] = 1. result = df.loc[(0, 0), 'z'] - self.assertEqual(result, 1) + assert result == 1 # correct setting df.loc[(0, 0), 'z'] = 2 result = df.loc[(0, 0), 'z'] - self.assertEqual(result, 2) + assert result == 2 # 10264 df = DataFrame(np.zeros((5, 5), dtype='int64'), columns=[ diff --git a/pandas/tests/indexing/test_coercion.py b/pandas/tests/indexing/test_coercion.py index b8030d84e7929..56bc8c1d72bb8 100644 --- a/pandas/tests/indexing/test_coercion.py +++ b/pandas/tests/indexing/test_coercion.py @@ -31,8 +31,8 @@ def _assert(self, left, right, dtype): tm.assert_index_equal(left, right) else: raise NotImplementedError - self.assertEqual(left.dtype, dtype) - self.assertEqual(right.dtype, dtype) + assert left.dtype == dtype + assert right.dtype == dtype def test_has_comprehensive_tests(self): for klass in self.klasses: @@ -55,7 +55,7 @@ def _assert_setitem_series_conversion(self, original_series, loc_value, temp[1] = loc_value tm.assert_series_equal(temp, expected_series) # check dtype explicitly for sure - self.assertEqual(temp.dtype, expected_dtype) + assert temp.dtype == expected_dtype # .loc works different rule, temporary disable # temp = original_series.copy() @@ -64,7 +64,7 @@ def _assert_setitem_series_conversion(self, original_series, loc_value, def test_setitem_series_object(self): obj = pd.Series(list('abcd')) - self.assertEqual(obj.dtype, np.object) + assert obj.dtype == np.object # object + int -> object exp = pd.Series(['a', 1, 'c', 'd']) @@ -84,7 +84,7 @@ def test_setitem_series_object(self): def test_setitem_series_int64(self): obj = pd.Series([1, 2, 3, 4]) - self.assertEqual(obj.dtype, np.int64) + assert obj.dtype == np.int64 # int + int -> int exp = pd.Series([1, 1, 3, 4]) @@ -93,7 +93,7 @@ def test_setitem_series_int64(self): # int + float -> float # TODO_GH12747 The result must be float # tm.assert_series_equal(temp, pd.Series([1, 1.1, 3, 4])) - # self.assertEqual(temp.dtype, np.float64) + # assert temp.dtype == np.float64 exp = pd.Series([1, 1, 3, 4]) self._assert_setitem_series_conversion(obj, 1.1, exp, np.int64) @@ -107,7 +107,7 @@ def test_setitem_series_int64(self): def test_setitem_series_float64(self): obj = pd.Series([1.1, 2.2, 3.3, 4.4]) - self.assertEqual(obj.dtype, np.float64) + assert obj.dtype == np.float64 # float + int -> float exp = pd.Series([1.1, 1.0, 3.3, 4.4]) @@ -128,7 +128,7 @@ def test_setitem_series_float64(self): def test_setitem_series_complex128(self): obj = pd.Series([1 + 1j, 2 + 2j, 3 + 3j, 4 + 4j]) - self.assertEqual(obj.dtype, np.complex128) + assert obj.dtype == np.complex128 # complex + int -> complex exp = pd.Series([1 + 1j, 1, 3 + 3j, 4 + 4j]) @@ -148,33 +148,33 @@ def test_setitem_series_complex128(self): def test_setitem_series_bool(self): obj = pd.Series([True, False, True, False]) - self.assertEqual(obj.dtype, np.bool) + assert obj.dtype == np.bool # bool + int -> int # TODO_GH12747 The result must be int # tm.assert_series_equal(temp, pd.Series([1, 1, 1, 0])) - # self.assertEqual(temp.dtype, np.int64) + # assert temp.dtype == np.int64 exp = pd.Series([True, True, True, False]) self._assert_setitem_series_conversion(obj, 1, exp, np.bool) # TODO_GH12747 The result must be int # assigning int greater than bool # tm.assert_series_equal(temp, pd.Series([1, 3, 1, 0])) - # self.assertEqual(temp.dtype, np.int64) + # assert temp.dtype == np.int64 exp = pd.Series([True, True, True, False]) self._assert_setitem_series_conversion(obj, 3, exp, np.bool) # bool + float -> float # TODO_GH12747 The result must be float # tm.assert_series_equal(temp, pd.Series([1., 1.1, 1., 0.])) - # self.assertEqual(temp.dtype, np.float64) + # assert temp.dtype == np.float64 exp = pd.Series([True, True, True, False]) self._assert_setitem_series_conversion(obj, 1.1, exp, np.bool) # bool + complex -> complex (buggy, results in bool) # TODO_GH12747 The result must be complex # tm.assert_series_equal(temp, pd.Series([1, 1 + 1j, 1, 0])) - # self.assertEqual(temp.dtype, np.complex128) + # assert temp.dtype == np.complex128 exp = pd.Series([True, True, True, False]) self._assert_setitem_series_conversion(obj, 1 + 1j, exp, np.bool) @@ -187,7 +187,7 @@ def test_setitem_series_datetime64(self): pd.Timestamp('2011-01-02'), pd.Timestamp('2011-01-03'), pd.Timestamp('2011-01-04')]) - self.assertEqual(obj.dtype, 'datetime64[ns]') + assert obj.dtype == 'datetime64[ns]' # datetime64 + datetime64 -> datetime64 exp = pd.Series([pd.Timestamp('2011-01-01'), @@ -213,7 +213,7 @@ def test_setitem_series_datetime64tz(self): pd.Timestamp('2011-01-02', tz=tz), pd.Timestamp('2011-01-03', tz=tz), pd.Timestamp('2011-01-04', tz=tz)]) - self.assertEqual(obj.dtype, 'datetime64[ns, US/Eastern]') + assert obj.dtype == 'datetime64[ns, US/Eastern]' # datetime64tz + datetime64tz -> datetime64tz exp = pd.Series([pd.Timestamp('2011-01-01', tz=tz), @@ -249,18 +249,18 @@ def _assert_setitem_index_conversion(self, original_series, loc_key, exp = pd.Series([1, 2, 3, 4, 5], index=expected_index) tm.assert_series_equal(temp, exp) # check dtype explicitly for sure - self.assertEqual(temp.index.dtype, expected_dtype) + assert temp.index.dtype == expected_dtype temp = original_series.copy() temp.loc[loc_key] = 5 exp = pd.Series([1, 2, 3, 4, 5], index=expected_index) tm.assert_series_equal(temp, exp) # check dtype explicitly for sure - self.assertEqual(temp.index.dtype, expected_dtype) + assert temp.index.dtype == expected_dtype def test_setitem_index_object(self): obj = pd.Series([1, 2, 3, 4], index=list('abcd')) - self.assertEqual(obj.index.dtype, np.object) + assert obj.index.dtype == np.object # object + object -> object exp_index = pd.Index(list('abcdx')) @@ -278,7 +278,7 @@ def test_setitem_index_object(self): def test_setitem_index_int64(self): # tests setitem with non-existing numeric key obj = pd.Series([1, 2, 3, 4]) - self.assertEqual(obj.index.dtype, np.int64) + assert obj.index.dtype == np.int64 # int + int -> int exp_index = pd.Index([0, 1, 2, 3, 5]) @@ -295,7 +295,7 @@ def test_setitem_index_int64(self): def test_setitem_index_float64(self): # tests setitem with non-existing numeric key obj = pd.Series([1, 2, 3, 4], index=[1.1, 2.1, 3.1, 4.1]) - self.assertEqual(obj.index.dtype, np.float64) + assert obj.index.dtype == np.float64 # float + int -> int temp = obj.copy() @@ -341,11 +341,11 @@ def _assert_insert_conversion(self, original, value, target = original.copy() res = target.insert(1, value) tm.assert_index_equal(res, expected) - self.assertEqual(res.dtype, expected_dtype) + assert res.dtype == expected_dtype def test_insert_index_object(self): obj = pd.Index(list('abcd')) - self.assertEqual(obj.dtype, np.object) + assert obj.dtype == np.object # object + int -> object exp = pd.Index(['a', 1, 'b', 'c', 'd']) @@ -358,7 +358,7 @@ def test_insert_index_object(self): # object + bool -> object res = obj.insert(1, False) tm.assert_index_equal(res, pd.Index(['a', False, 'b', 'c', 'd'])) - self.assertEqual(res.dtype, np.object) + assert res.dtype == np.object # object + object -> object exp = pd.Index(['a', 'x', 'b', 'c', 'd']) @@ -366,7 +366,7 @@ def test_insert_index_object(self): def test_insert_index_int64(self): obj = pd.Int64Index([1, 2, 3, 4]) - self.assertEqual(obj.dtype, np.int64) + assert obj.dtype == np.int64 # int + int -> int exp = pd.Index([1, 1, 2, 3, 4]) @@ -386,7 +386,7 @@ def test_insert_index_int64(self): def test_insert_index_float64(self): obj = pd.Float64Index([1., 2., 3., 4.]) - self.assertEqual(obj.dtype, np.float64) + assert obj.dtype == np.float64 # float + int -> int exp = pd.Index([1., 1., 2., 3., 4.]) @@ -413,7 +413,7 @@ def test_insert_index_bool(self): def test_insert_index_datetime64(self): obj = pd.DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03', '2011-01-04']) - self.assertEqual(obj.dtype, 'datetime64[ns]') + assert obj.dtype == 'datetime64[ns]' # datetime64 + datetime64 => datetime64 exp = pd.DatetimeIndex(['2011-01-01', '2012-01-01', '2011-01-02', @@ -434,7 +434,7 @@ def test_insert_index_datetime64(self): def test_insert_index_datetime64tz(self): obj = pd.DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03', '2011-01-04'], tz='US/Eastern') - self.assertEqual(obj.dtype, 'datetime64[ns, US/Eastern]') + assert obj.dtype == 'datetime64[ns, US/Eastern]' # datetime64tz + datetime64tz => datetime64 exp = pd.DatetimeIndex(['2011-01-01', '2012-01-01', '2011-01-02', @@ -460,7 +460,7 @@ def test_insert_index_datetime64tz(self): def test_insert_index_timedelta64(self): obj = pd.TimedeltaIndex(['1 day', '2 day', '3 day', '4 day']) - self.assertEqual(obj.dtype, 'timedelta64[ns]') + assert obj.dtype == 'timedelta64[ns]' # timedelta64 + timedelta64 => timedelta64 exp = pd.TimedeltaIndex(['1 day', '10 day', '2 day', '3 day', '4 day']) @@ -480,7 +480,7 @@ def test_insert_index_timedelta64(self): def test_insert_index_period(self): obj = pd.PeriodIndex(['2011-01', '2011-02', '2011-03', '2011-04'], freq='M') - self.assertEqual(obj.dtype, 'period[M]') + assert obj.dtype == 'period[M]' # period + period => period exp = pd.PeriodIndex(['2011-01', '2012-01', '2011-02', @@ -527,7 +527,7 @@ def _assert_where_conversion(self, original, cond, values, def _where_object_common(self, klass): obj = klass(list('abcd')) - self.assertEqual(obj.dtype, np.object) + assert obj.dtype == np.object cond = klass([True, False, True, False]) # object + int -> object @@ -580,7 +580,7 @@ def test_where_index_object(self): def _where_int64_common(self, klass): obj = klass([1, 2, 3, 4]) - self.assertEqual(obj.dtype, np.int64) + assert obj.dtype == np.int64 cond = klass([True, False, True, False]) # int + int -> int @@ -626,7 +626,7 @@ def test_where_index_int64(self): def _where_float64_common(self, klass): obj = klass([1.1, 2.2, 3.3, 4.4]) - self.assertEqual(obj.dtype, np.float64) + assert obj.dtype == np.float64 cond = klass([True, False, True, False]) # float + int -> float @@ -672,7 +672,7 @@ def test_where_index_float64(self): def test_where_series_complex128(self): obj = pd.Series([1 + 1j, 2 + 2j, 3 + 3j, 4 + 4j]) - self.assertEqual(obj.dtype, np.complex128) + assert obj.dtype == np.complex128 cond = pd.Series([True, False, True, False]) # complex + int -> complex @@ -712,7 +712,7 @@ def test_where_index_complex128(self): def test_where_series_bool(self): obj = pd.Series([True, False, True, False]) - self.assertEqual(obj.dtype, np.bool) + assert obj.dtype == np.bool cond = pd.Series([True, False, True, False]) # bool + int -> int @@ -755,7 +755,7 @@ def test_where_series_datetime64(self): pd.Timestamp('2011-01-02'), pd.Timestamp('2011-01-03'), pd.Timestamp('2011-01-04')]) - self.assertEqual(obj.dtype, 'datetime64[ns]') + assert obj.dtype == 'datetime64[ns]' cond = pd.Series([True, False, True, False]) # datetime64 + datetime64 -> datetime64 @@ -797,7 +797,7 @@ def test_where_index_datetime64(self): pd.Timestamp('2011-01-02'), pd.Timestamp('2011-01-03'), pd.Timestamp('2011-01-04')]) - self.assertEqual(obj.dtype, 'datetime64[ns]') + assert obj.dtype == 'datetime64[ns]' cond = pd.Index([True, False, True, False]) # datetime64 + datetime64 -> datetime64 @@ -867,7 +867,7 @@ def _assert_fillna_conversion(self, original, value, def _fillna_object_common(self, klass): obj = klass(['a', np.nan, 'c', 'd']) - self.assertEqual(obj.dtype, np.object) + assert obj.dtype == np.object # object + int -> object exp = klass(['a', 1, 'c', 'd']) @@ -900,7 +900,7 @@ def test_fillna_index_int64(self): def _fillna_float64_common(self, klass): obj = klass([1.1, np.nan, 3.3, 4.4]) - self.assertEqual(obj.dtype, np.float64) + assert obj.dtype == np.float64 # float + int -> float exp = klass([1.1, 1.0, 3.3, 4.4]) @@ -933,7 +933,7 @@ def test_fillna_index_float64(self): def test_fillna_series_complex128(self): obj = pd.Series([1 + 1j, np.nan, 3 + 3j, 4 + 4j]) - self.assertEqual(obj.dtype, np.complex128) + assert obj.dtype == np.complex128 # complex + int -> complex exp = pd.Series([1 + 1j, 1, 3 + 3j, 4 + 4j]) @@ -966,7 +966,7 @@ def test_fillna_series_datetime64(self): pd.NaT, pd.Timestamp('2011-01-03'), pd.Timestamp('2011-01-04')]) - self.assertEqual(obj.dtype, 'datetime64[ns]') + assert obj.dtype == 'datetime64[ns]' # datetime64 + datetime64 => datetime64 exp = pd.Series([pd.Timestamp('2011-01-01'), @@ -1006,7 +1006,7 @@ def test_fillna_series_datetime64tz(self): pd.NaT, pd.Timestamp('2011-01-03', tz=tz), pd.Timestamp('2011-01-04', tz=tz)]) - self.assertEqual(obj.dtype, 'datetime64[ns, US/Eastern]') + assert obj.dtype == 'datetime64[ns, US/Eastern]' # datetime64tz + datetime64tz => datetime64tz exp = pd.Series([pd.Timestamp('2011-01-01', tz=tz), @@ -1058,7 +1058,7 @@ def test_fillna_series_period(self): def test_fillna_index_datetime64(self): obj = pd.DatetimeIndex(['2011-01-01', 'NaT', '2011-01-03', '2011-01-04']) - self.assertEqual(obj.dtype, 'datetime64[ns]') + assert obj.dtype == 'datetime64[ns]' # datetime64 + datetime64 => datetime64 exp = pd.DatetimeIndex(['2011-01-01', '2012-01-01', @@ -1093,7 +1093,7 @@ def test_fillna_index_datetime64tz(self): obj = pd.DatetimeIndex(['2011-01-01', 'NaT', '2011-01-03', '2011-01-04'], tz=tz) - self.assertEqual(obj.dtype, 'datetime64[ns, US/Eastern]') + assert obj.dtype == 'datetime64[ns, US/Eastern]' # datetime64tz + datetime64tz => datetime64tz exp = pd.DatetimeIndex(['2011-01-01', '2012-01-01', @@ -1168,7 +1168,7 @@ def setUp(self): def _assert_replace_conversion(self, from_key, to_key, how): index = pd.Index([3, 4], name='xxx') obj = pd.Series(self.rep[from_key], index=index, name='yyy') - self.assertEqual(obj.dtype, from_key) + assert obj.dtype == from_key if (from_key.startswith('datetime') and to_key.startswith('datetime')): # different tz, currently mask_missing raises SystemError @@ -1198,7 +1198,7 @@ def _assert_replace_conversion(self, from_key, to_key, how): else: exp = pd.Series(self.rep[to_key], index=index, name='yyy') - self.assertEqual(exp.dtype, to_key) + assert exp.dtype == to_key tm.assert_series_equal(result, exp) diff --git a/pandas/tests/indexing/test_datetime.py b/pandas/tests/indexing/test_datetime.py index 9b224ba796268..3089bc1dbddea 100644 --- a/pandas/tests/indexing/test_datetime.py +++ b/pandas/tests/indexing/test_datetime.py @@ -37,10 +37,10 @@ def test_indexing_with_datetime_tz(self): df = DataFrame({'a': date_range('2014-01-01', periods=10, tz='UTC')}) result = df.iloc[5] expected = Timestamp('2014-01-06 00:00:00+0000', tz='UTC', freq='D') - self.assertEqual(result, expected) + assert result == expected result = df.loc[5] - self.assertEqual(result, expected) + assert result == expected # indexing - boolean result = df[df.a > df.a[3]] @@ -129,7 +129,7 @@ def test_indexing_with_datetimeindex_tz(self): # single element indexing # getitem - self.assertEqual(ser[index[1]], 1) + assert ser[index[1]] == 1 # setitem result = ser.copy() @@ -138,7 +138,7 @@ def test_indexing_with_datetimeindex_tz(self): tm.assert_series_equal(result, expected) # .loc getitem - self.assertEqual(ser.loc[index[1]], 1) + assert ser.loc[index[1]] == 1 # .loc setitem result = ser.copy() diff --git a/pandas/tests/indexing/test_floats.py b/pandas/tests/indexing/test_floats.py index 4d4ef65b40074..1701dd9f6ba90 100644 --- a/pandas/tests/indexing/test_floats.py +++ b/pandas/tests/indexing/test_floats.py @@ -165,7 +165,7 @@ def f(): result = s2.loc['b'] expected = 2 - self.assertEqual(result, expected) + assert result == expected # mixed index so we have label # indexing @@ -180,14 +180,14 @@ def f(): result = idxr(s3)[1] expected = 2 - self.assertEqual(result, expected) + assert result == expected pytest.raises(TypeError, lambda: s3.iloc[1.0]) pytest.raises(KeyError, lambda: s3.loc[1.0]) result = s3.loc[1.5] expected = 3 - self.assertEqual(result, expected) + assert result == expected def test_scalar_integer(self): @@ -216,7 +216,8 @@ def test_scalar_integer(self): (lambda x: x, True)]: if isinstance(s, Series): - compare = self.assertEqual + def compare(x, y): + assert x == y expected = 100 else: compare = tm.assert_series_equal @@ -576,10 +577,10 @@ def test_floating_index_doc_example(self): index = Index([1.5, 2, 3, 4.5, 5]) s = Series(range(5), index=index) - self.assertEqual(s[3], 2) - self.assertEqual(s.loc[3], 2) - self.assertEqual(s.loc[3], 2) - self.assertEqual(s.iloc[3], 3) + assert s[3] == 2 + assert s.loc[3] == 2 + assert s.loc[3] == 2 + assert s.iloc[3] == 3 def test_floating_misc(self): @@ -598,16 +599,16 @@ def test_floating_misc(self): result1 = s[5.0] result2 = s.loc[5.0] result3 = s.loc[5.0] - self.assertEqual(result1, result2) - self.assertEqual(result1, result3) + assert result1 == result2 + assert result1 == result3 result1 = s[5] result2 = s.loc[5] result3 = s.loc[5] - self.assertEqual(result1, result2) - self.assertEqual(result1, result3) + assert result1 == result2 + assert result1 == result3 - self.assertEqual(s[5.0], s[5]) + assert s[5.0] == s[5] # value not found (and no fallbacking at all) @@ -702,15 +703,17 @@ def test_floating_misc(self): assert_series_equal(result1, Series([1], index=[2.5])) def test_floating_tuples(self): - # GH13509 + # see gh-13509 s = Series([(1, 1), (2, 2), (3, 3)], index=[0.0, 0.1, 0.2], name='foo') + result = s[0.0] - self.assertEqual(result, (1, 1)) + assert result == (1, 1) + expected = Series([(1, 1), (2, 2)], index=[0.0, 0.0], name='foo') s = Series([(1, 1), (2, 2), (3, 3)], index=[0.0, 0.0, 0.2], name='foo') + result = s[0.0] - expected = Series([(1, 1), (2, 2)], index=[0.0, 0.0], name='foo') - assert_series_equal(result, expected) + tm.assert_series_equal(result, expected) def test_float64index_slicing_bug(self): # GH 5557, related to slicing a float index diff --git a/pandas/tests/indexing/test_iloc.py b/pandas/tests/indexing/test_iloc.py index baced46923fd4..3e625fa483f7b 100644 --- a/pandas/tests/indexing/test_iloc.py +++ b/pandas/tests/indexing/test_iloc.py @@ -166,7 +166,7 @@ def test_iloc_getitem_neg_int_can_reach_first_index(self): expected = s.iloc[0] result = s.iloc[-3] - self.assertEqual(result, expected) + assert result == expected expected = s.iloc[[0]] result = s.iloc[[-3]] @@ -256,7 +256,7 @@ def test_iloc_setitem(self): df.iloc[1, 1] = 1 result = df.iloc[1, 1] - self.assertEqual(result, 1) + assert result == 1 df.iloc[:, 2:3] = 0 expected = df.iloc[:, 2:3] @@ -326,7 +326,7 @@ def test_iloc_getitem_frame(self): result = df.iloc[2, 2] with catch_warnings(record=True): exp = df.ix[4, 4] - self.assertEqual(result, exp) + assert result == exp # slice result = df.iloc[4:8] @@ -376,7 +376,7 @@ def test_iloc_getitem_labelled_frame(self): result = df.iloc[1, 1] exp = df.loc['b', 'B'] - self.assertEqual(result, exp) + assert result == exp result = df.iloc[:, 2:3] expected = df.loc[:, ['C']] @@ -385,7 +385,7 @@ def test_iloc_getitem_labelled_frame(self): # negative indexing result = df.iloc[-1, -1] exp = df.loc['j', 'D'] - self.assertEqual(result, exp) + assert result == exp # out-of-bounds exception pytest.raises(IndexError, df.iloc.__getitem__, tuple([10, 5])) @@ -444,7 +444,7 @@ def test_iloc_setitem_series(self): df.iloc[1, 1] = 1 result = df.iloc[1, 1] - self.assertEqual(result, 1) + assert result == 1 df.iloc[:, 2:3] = 0 expected = df.iloc[:, 2:3] @@ -455,7 +455,7 @@ def test_iloc_setitem_series(self): s.iloc[1] = 1 result = s.iloc[1] - self.assertEqual(result, 1) + assert result == 1 s.iloc[:4] = 0 expected = s.iloc[:4] diff --git a/pandas/tests/indexing/test_indexing.py b/pandas/tests/indexing/test_indexing.py index 5924dba488043..0759dc2333ad5 100644 --- a/pandas/tests/indexing/test_indexing.py +++ b/pandas/tests/indexing/test_indexing.py @@ -68,7 +68,7 @@ def test_setitem_dtype_upcast(self): # GH3216 df = DataFrame([{"a": 1}, {"a": 3, "b": 2}]) df['c'] = np.nan - self.assertEqual(df['c'].dtype, np.float64) + assert df['c'].dtype == np.float64 df.loc[0, 'c'] = 'foo' expected = DataFrame([{"a": 1, "c": 'foo'}, @@ -231,7 +231,7 @@ def test_indexing_mixed_frame_bug(self): idx = df['test'] == '_' temp = df.loc[idx, 'a'].apply(lambda x: '-----' if x == 'aaa' else x) df.loc[idx, 'test'] = temp - self.assertEqual(df.iloc[0, 2], '-----') + assert df.iloc[0, 2] == '-----' # if I look at df, then element [0,2] equals '_'. If instead I type # df.ix[idx,'test'], I get '-----', finally by typing df.iloc[0,2] I @@ -244,7 +244,7 @@ def test_multitype_list_index_access(self): with pytest.raises(KeyError): df[[22, 26, -8]] - self.assertEqual(df[21].shape[0], df.shape[0]) + assert df[21].shape[0] == df.shape[0] def test_set_index_nan(self): @@ -638,9 +638,9 @@ def test_float_index_non_scalar_assignment(self): def test_float_index_at_iat(self): s = pd.Series([1, 2, 3], index=[0.1, 0.2, 0.3]) for el, item in s.iteritems(): - self.assertEqual(s.at[el], item) + assert s.at[el] == item for i in range(len(s)): - self.assertEqual(s.iat[i], i + 1) + assert s.iat[i] == i + 1 def test_rhs_alignment(self): # GH8258, tests that both rows & columns are aligned to what is @@ -741,7 +741,7 @@ def test_indexing_dtypes_on_empty(self): with catch_warnings(record=True): df2 = df.ix[[], :] - self.assertEqual(df2.loc[:, 'a'].dtype, np.int64) + assert df2.loc[:, 'a'].dtype == np.int64 tm.assert_series_equal(df2.loc[:, 'a'], df2.iloc[:, 0]) with catch_warnings(record=True): tm.assert_series_equal(df2.loc[:, 'a'], df2.ix[:, 0]) @@ -791,13 +791,13 @@ def test_maybe_numeric_slice(self): df = pd.DataFrame({'A': [1, 2], 'B': ['c', 'd'], 'C': [True, False]}) result = _maybe_numeric_slice(df, slice_=None) expected = pd.IndexSlice[:, ['A']] - self.assertEqual(result, expected) + assert result == expected result = _maybe_numeric_slice(df, None, include_bool=True) expected = pd.IndexSlice[:, ['A', 'C']] result = _maybe_numeric_slice(df, [1]) expected = [1] - self.assertEqual(result, expected) + assert result == expected class TestSeriesNoneCoercion(tm.TestCase): diff --git a/pandas/tests/indexing/test_ix.py b/pandas/tests/indexing/test_ix.py index 433b44c952ca1..8290bc80edac1 100644 --- a/pandas/tests/indexing/test_ix.py +++ b/pandas/tests/indexing/test_ix.py @@ -82,7 +82,7 @@ def test_ix_loc_consistency(self): def compare(result, expected): if is_scalar(expected): - self.assertEqual(result, expected) + assert result == expected else: assert expected.equals(result) @@ -216,7 +216,7 @@ def test_ix_assign_column_mixed(self): indexer = i * 2 v = 1000 + i * 200 expected.loc[indexer, 'y'] = v - self.assertEqual(expected.loc[indexer, 'y'], v) + assert expected.loc[indexer, 'y'] == v df.loc[df.x % 2 == 0, 'y'] = df.loc[df.x % 2 == 0, 'y'] * 100 tm.assert_frame_equal(df, expected) @@ -252,21 +252,21 @@ def test_ix_get_set_consistency(self): index=['e', 7, 'f', 'g']) with catch_warnings(record=True): - self.assertEqual(df.ix['e', 8], 2) - self.assertEqual(df.loc['e', 8], 2) + assert df.ix['e', 8] == 2 + assert df.loc['e', 8] == 2 with catch_warnings(record=True): df.ix['e', 8] = 42 - self.assertEqual(df.ix['e', 8], 42) - self.assertEqual(df.loc['e', 8], 42) + assert df.ix['e', 8] == 42 + assert df.loc['e', 8] == 42 df.loc['e', 8] = 45 with catch_warnings(record=True): - self.assertEqual(df.ix['e', 8], 45) - self.assertEqual(df.loc['e', 8], 45) + assert df.ix['e', 8] == 45 + assert df.loc['e', 8] == 45 def test_ix_slicing_strings(self): - # GH3836 + # see gh-3836 data = {'Classification': ['SA EQUITY CFD', 'bbb', 'SA EQUITY', 'SA SSF', 'aaa'], 'Random': [1, 2, 3, 4, 5], diff --git a/pandas/tests/indexing/test_loc.py b/pandas/tests/indexing/test_loc.py index b430f458d48b5..410d01431ef5a 100644 --- a/pandas/tests/indexing/test_loc.py +++ b/pandas/tests/indexing/test_loc.py @@ -58,7 +58,7 @@ def test_loc_setitem_dups(self): indexer = tuple(['r', 'bar']) df = df_orig.copy() df.loc[indexer] *= 2.0 - self.assertEqual(df.loc[indexer], 2.0 * df_orig.loc[indexer]) + assert df.loc[indexer] == 2.0 * df_orig.loc[indexer] indexer = tuple(['t', ['bar', 'bar2']]) df = df_orig.copy() @@ -332,7 +332,7 @@ def test_loc_general(self): result = DataFrame({'a': [Timestamp('20130101')], 'b': [1]}).iloc[0] expected = Series([Timestamp('20130101'), 1], index=['a', 'b'], name=0) tm.assert_series_equal(result, expected) - self.assertEqual(result.dtype, object) + assert result.dtype == object def test_loc_setitem_consistency(self): # GH 6149 @@ -415,10 +415,10 @@ def test_loc_setitem_frame(self): df.loc['a', 'A'] = 1 result = df.loc['a', 'A'] - self.assertEqual(result, 1) + assert result == 1 result = df.iloc[0, 0] - self.assertEqual(result, 1) + assert result == 1 df.loc[:, 'B':'D'] = 0 expected = df.loc[:, 'B':'D'] @@ -608,14 +608,14 @@ def test_loc_name(self): df = DataFrame([[1, 1], [1, 1]]) df.index.name = 'index_name' result = df.iloc[[0, 1]].index.name - self.assertEqual(result, 'index_name') + assert result == 'index_name' with catch_warnings(record=True): result = df.ix[[0, 1]].index.name - self.assertEqual(result, 'index_name') + assert result == 'index_name' result = df.loc[[0, 1]].index.name - self.assertEqual(result, 'index_name') + assert result == 'index_name' def test_loc_empty_list_indexer_is_ok(self): from pandas.util.testing import makeCustomDataframe as mkdf diff --git a/pandas/tests/indexing/test_multiindex.py b/pandas/tests/indexing/test_multiindex.py index dbd0f5a9e6e1c..b8c34f9f28d83 100644 --- a/pandas/tests/indexing/test_multiindex.py +++ b/pandas/tests/indexing/test_multiindex.py @@ -30,7 +30,7 @@ def test_iloc_getitem_multiindex2(self): rs = df.iloc[2, 2] xp = df.values[2, 2] - self.assertEqual(rs, xp) + assert rs == xp # for multiple items # GH 5528 @@ -50,6 +50,9 @@ def test_setitem_multiindex(self): for index_fn in ('ix', 'loc'): + def assert_equal(a, b): + assert a == b + def check(target, indexers, value, compare_fn, expected=None): fn = getattr(target, index_fn) fn.__setitem__(indexers, value) @@ -66,28 +69,28 @@ def check(target, indexers, value, compare_fn, expected=None): 'X', 'd', 'profit'], index=index) check(target=df, indexers=((t, n), 'X'), value=0, - compare_fn=self.assertEqual) + compare_fn=assert_equal) df = DataFrame(-999, columns=['A', 'w', 'l', 'a', 'x', 'X', 'd', 'profit'], index=index) check(target=df, indexers=((t, n), 'X'), value=1, - compare_fn=self.assertEqual) + compare_fn=assert_equal) df = DataFrame(columns=['A', 'w', 'l', 'a', 'x', 'X', 'd', 'profit'], index=index) check(target=df, indexers=((t, n), 'X'), value=2, - compare_fn=self.assertEqual) + compare_fn=assert_equal) - # GH 7218, assinging with 0-dim arrays + # gh-7218: assigning with 0-dim arrays df = DataFrame(-999, columns=['A', 'w', 'l', 'a', 'x', 'X', 'd', 'profit'], index=index) check(target=df, indexers=((t, n), 'X'), value=np.array(3), - compare_fn=self.assertEqual, + compare_fn=assert_equal, expected=3, ) # GH5206 @@ -215,8 +218,8 @@ def test_iloc_getitem_multiindex(self): with catch_warnings(record=True): xp = mi_int.ix[4].ix[8] tm.assert_series_equal(rs, xp, check_names=False) - self.assertEqual(rs.name, (4, 8)) - self.assertEqual(xp.name, 8) + assert rs.name == (4, 8) + assert xp.name == 8 # 2nd (last) columns rs = mi_int.iloc[:, 2] @@ -228,13 +231,13 @@ def test_iloc_getitem_multiindex(self): rs = mi_int.iloc[2, 2] with catch_warnings(record=True): xp = mi_int.ix[:, 2].ix[2] - self.assertEqual(rs, xp) + assert rs == xp # this is basically regular indexing rs = mi_labels.iloc[2, 2] with catch_warnings(record=True): xp = mi_labels.ix['j'].ix[:, 'j'].ix[0, 0] - self.assertEqual(rs, xp) + assert rs == xp def test_loc_multiindex(self): @@ -572,7 +575,7 @@ def f(): ('functs', 'median')]), index=['function', 'name']) result = df.loc['function', ('functs', 'mean')] - self.assertEqual(result, np.mean) + assert result == np.mean def test_multiindex_assignment(self): @@ -798,9 +801,9 @@ def f(): tm.assert_frame_equal(result, expected) # not lexsorted - self.assertEqual(df.index.lexsort_depth, 2) + assert df.index.lexsort_depth == 2 df = df.sort_index(level=1, axis=0) - self.assertEqual(df.index.lexsort_depth, 0) + assert df.index.lexsort_depth == 0 with tm.assert_raises_regex( UnsortedIndexError, 'MultiIndex Slicing requires the index to be fully ' diff --git a/pandas/tests/indexing/test_panel.py b/pandas/tests/indexing/test_panel.py index 8aa35a163babc..b704e15b81502 100644 --- a/pandas/tests/indexing/test_panel.py +++ b/pandas/tests/indexing/test_panel.py @@ -27,7 +27,7 @@ def test_iloc_getitem_panel(self): result = p.iloc[1, 1, 1] expected = p.loc['B', 'b', 'two'] - self.assertEqual(result, expected) + assert result == expected # slice result = p.iloc[1:3] @@ -99,16 +99,16 @@ def f(): def test_iloc_panel_issue(self): with catch_warnings(record=True): - # GH 3617 + # see gh-3617 p = Panel(np.random.randn(4, 4, 4)) - self.assertEqual(p.iloc[:3, :3, :3].shape, (3, 3, 3)) - self.assertEqual(p.iloc[1, :3, :3].shape, (3, 3)) - self.assertEqual(p.iloc[:3, 1, :3].shape, (3, 3)) - self.assertEqual(p.iloc[:3, :3, 1].shape, (3, 3)) - self.assertEqual(p.iloc[1, 1, :3].shape, (3, )) - self.assertEqual(p.iloc[1, :3, 1].shape, (3, )) - self.assertEqual(p.iloc[:3, 1, 1].shape, (3, )) + assert p.iloc[:3, :3, :3].shape == (3, 3, 3) + assert p.iloc[1, :3, :3].shape == (3, 3) + assert p.iloc[:3, 1, :3].shape == (3, 3) + assert p.iloc[:3, :3, 1].shape == (3, 3) + assert p.iloc[1, 1, :3].shape == (3, ) + assert p.iloc[1, :3, 1].shape == (3, ) + assert p.iloc[:3, 1, 1].shape == (3, ) def test_panel_getitem(self): diff --git a/pandas/tests/indexing/test_partial.py b/pandas/tests/indexing/test_partial.py index 80d2d5729c610..20cec2a3aa7db 100644 --- a/pandas/tests/indexing/test_partial.py +++ b/pandas/tests/indexing/test_partial.py @@ -392,7 +392,7 @@ def f(): tm.assert_frame_equal(df, exp) tm.assert_index_equal(df.index, pd.Index(orig.index.tolist() + ['a'])) - self.assertEqual(df.index.dtype, 'object') + assert df.index.dtype == 'object' def test_partial_set_empty_series(self): diff --git a/pandas/tests/indexing/test_scalar.py b/pandas/tests/indexing/test_scalar.py index 70c7eaf7446db..fb40c539e16ba 100644 --- a/pandas/tests/indexing/test_scalar.py +++ b/pandas/tests/indexing/test_scalar.py @@ -77,7 +77,7 @@ def test_at_iat_coercion(self): result = s.at[dates[5]] xp = s.values[5] - self.assertEqual(result, xp) + assert result == xp # GH 7729 # make sure we are boxing the returns @@ -86,14 +86,14 @@ def test_at_iat_coercion(self): for r in [lambda: s.iat[1], lambda: s.iloc[1]]: result = r() - self.assertEqual(result, expected) + assert result == expected s = Series(['1 days', '2 days'], dtype='timedelta64[ns]') expected = Timedelta('2 days') for r in [lambda: s.iat[1], lambda: s.iloc[1]]: result = r() - self.assertEqual(result, expected) + assert result == expected def test_iat_invalid_args(self): pass @@ -105,9 +105,9 @@ def test_imethods_with_dups(self): s = Series(range(5), index=[1, 1, 2, 2, 3], dtype='int64') result = s.iloc[2] - self.assertEqual(result, 2) + assert result == 2 result = s.iat[2] - self.assertEqual(result, 2) + assert result == 2 pytest.raises(IndexError, lambda: s.iat[10]) pytest.raises(IndexError, lambda: s.iat[-10]) @@ -123,29 +123,29 @@ def test_imethods_with_dups(self): result = df.iat[2, 0] expected = 2 - self.assertEqual(result, 2) + assert result == 2 def test_at_to_fail(self): # at should not fallback # GH 7814 s = Series([1, 2, 3], index=list('abc')) result = s.at['a'] - self.assertEqual(result, 1) + assert result == 1 pytest.raises(ValueError, lambda: s.at[0]) df = DataFrame({'A': [1, 2, 3]}, index=list('abc')) result = df.at['a', 'A'] - self.assertEqual(result, 1) + assert result == 1 pytest.raises(ValueError, lambda: df.at['a', 0]) s = Series([1, 2, 3], index=[3, 2, 1]) result = s.at[1] - self.assertEqual(result, 3) + assert result == 3 pytest.raises(ValueError, lambda: s.at['a']) df = DataFrame({0: [1, 2, 3]}, index=[3, 2, 1]) result = df.at[1, 0] - self.assertEqual(result, 3) + assert result == 3 pytest.raises(ValueError, lambda: df.at['a', 0]) # GH 13822, incorrect error string with non-unique columns when missing diff --git a/pandas/tests/io/formats/test_eng_formatting.py b/pandas/tests/io/formats/test_eng_formatting.py index 41bb95964b4a2..e064d1200d672 100644 --- a/pandas/tests/io/formats/test_eng_formatting.py +++ b/pandas/tests/io/formats/test_eng_formatting.py @@ -18,7 +18,7 @@ def test_eng_float_formatter(self): '1 141.000E+00\n' '2 14.100E+03\n' '3 1.410E+06') - self.assertEqual(result, expected) + assert result == expected fmt.set_eng_float_format(use_eng_prefix=True) result = df.to_string() @@ -27,7 +27,7 @@ def test_eng_float_formatter(self): '1 141.000\n' '2 14.100k\n' '3 1.410M') - self.assertEqual(result, expected) + assert result == expected fmt.set_eng_float_format(accuracy=0) result = df.to_string() @@ -36,15 +36,13 @@ def test_eng_float_formatter(self): '1 141E+00\n' '2 14E+03\n' '3 1E+06') - self.assertEqual(result, expected) + assert result == expected tm.reset_display_options() def compare(self, formatter, input, output): formatted_input = formatter(input) - msg = ("formatting of %s results in '%s', expected '%s'" % - (str(input), formatted_input, output)) - self.assertEqual(formatted_input, output, msg) + assert formatted_input == output def compare_all(self, formatter, in_out): """ @@ -169,14 +167,14 @@ def test_rounding(self): formatter = fmt.EngFormatter(accuracy=3, use_eng_prefix=True) result = formatter(0) - self.assertEqual(result, u(' 0.000')) + assert result == u(' 0.000') def test_nan(self): # Issue #11981 formatter = fmt.EngFormatter(accuracy=1, use_eng_prefix=True) result = formatter(np.nan) - self.assertEqual(result, u('NaN')) + assert result == u('NaN') df = pd.DataFrame({'a': [1.5, 10.3, 20.5], 'b': [50.3, 60.67, 70.12], @@ -192,4 +190,4 @@ def test_inf(self): formatter = fmt.EngFormatter(accuracy=1, use_eng_prefix=True) result = formatter(np.inf) - self.assertEqual(result, u('inf')) + assert result == u('inf') diff --git a/pandas/tests/io/formats/test_format.py b/pandas/tests/io/formats/test_format.py index 6f19a4a126118..dee645e9d70ec 100644 --- a/pandas/tests/io/formats/test_format.py +++ b/pandas/tests/io/formats/test_format.py @@ -196,16 +196,16 @@ def test_repr_truncation(self): def test_repr_chop_threshold(self): df = DataFrame([[0.1, 0.5], [0.5, -0.1]]) pd.reset_option("display.chop_threshold") # default None - self.assertEqual(repr(df), ' 0 1\n0 0.1 0.5\n1 0.5 -0.1') + assert repr(df) == ' 0 1\n0 0.1 0.5\n1 0.5 -0.1' with option_context("display.chop_threshold", 0.2): - self.assertEqual(repr(df), ' 0 1\n0 0.0 0.5\n1 0.5 0.0') + assert repr(df) == ' 0 1\n0 0.0 0.5\n1 0.5 0.0' with option_context("display.chop_threshold", 0.6): - self.assertEqual(repr(df), ' 0 1\n0 0.0 0.0\n1 0.0 0.0') + assert repr(df) == ' 0 1\n0 0.0 0.0\n1 0.0 0.0' with option_context("display.chop_threshold", None): - self.assertEqual(repr(df), ' 0 1\n0 0.1 0.5\n1 0.5 -0.1') + assert repr(df) == ' 0 1\n0 0.1 0.5\n1 0.5 -0.1' def test_repr_obeys_max_seq_limit(self): with option_context("display.max_seq_items", 2000): @@ -215,7 +215,7 @@ def test_repr_obeys_max_seq_limit(self): assert len(printing.pprint_thing(lrange(1000))) < 100 def test_repr_set(self): - self.assertEqual(printing.pprint_thing(set([1])), '{1}') + assert printing.pprint_thing(set([1])) == '{1}' def test_repr_is_valid_construction_code(self): # for the case of Index, where the repr is traditional rather then @@ -389,7 +389,7 @@ def test_to_string_repr_unicode(self): except: pass if not line.startswith('dtype:'): - self.assertEqual(len(line), line_len) + assert len(line) == line_len # it works even if sys.stdin in None _stdin = sys.stdin @@ -441,11 +441,11 @@ def test_to_string_with_formatters(self): ('object', lambda x: '-%s-' % str(x))] result = df.to_string(formatters=dict(formatters)) result2 = df.to_string(formatters=lzip(*formatters)[1]) - self.assertEqual(result, (' int float object\n' - '0 0x1 [ 1.0] -(1, 2)-\n' - '1 0x2 [ 2.0] -True-\n' - '2 0x3 [ 3.0] -False-')) - self.assertEqual(result, result2) + assert result == (' int float object\n' + '0 0x1 [ 1.0] -(1, 2)-\n' + '1 0x2 [ 2.0] -True-\n' + '2 0x3 [ 3.0] -False-') + assert result == result2 def test_to_string_with_datetime64_monthformatter(self): months = [datetime(2016, 1, 1), datetime(2016, 2, 2)] @@ -455,7 +455,7 @@ def format_func(x): return x.strftime('%Y-%m') result = x.to_string(formatters={'months': format_func}) expected = 'months\n0 2016-01\n1 2016-02' - self.assertEqual(result.strip(), expected) + assert result.strip() == expected def test_to_string_with_datetime64_hourformatter(self): @@ -467,12 +467,12 @@ def format_func(x): result = x.to_string(formatters={'hod': format_func}) expected = 'hod\n0 10:10\n1 12:12' - self.assertEqual(result.strip(), expected) + assert result.strip() == expected def test_to_string_with_formatters_unicode(self): df = DataFrame({u('c/\u03c3'): [1, 2, 3]}) result = df.to_string(formatters={u('c/\u03c3'): lambda x: '%s' % x}) - self.assertEqual(result, u(' c/\u03c3\n') + '0 1\n1 2\n2 3') + assert result == u(' c/\u03c3\n') + '0 1\n1 2\n2 3' def test_east_asian_unicode_frame(self): if PY3: @@ -489,7 +489,7 @@ def test_east_asian_unicode_frame(self): expected = (u" a b\na あ 1\n" u"bb いいい 222\nc う 33333\n" u"ddd ええええええ 4") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # last col df = DataFrame({'a': [1, 222, 33333, 4], @@ -498,7 +498,7 @@ def test_east_asian_unicode_frame(self): expected = (u" a b\na 1 あ\n" u"bb 222 いいい\nc 33333 う\n" u"ddd 4 ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # all col df = DataFrame({'a': [u'あああああ', u'い', u'う', u'えええ'], @@ -507,7 +507,7 @@ def test_east_asian_unicode_frame(self): expected = (u" a b\na あああああ あ\n" u"bb い いいい\nc う う\n" u"ddd えええ ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # column name df = DataFrame({u'あああああ': [1, 222, 33333, 4], @@ -516,7 +516,7 @@ def test_east_asian_unicode_frame(self): expected = (u" b あああああ\na あ 1\n" u"bb いいい 222\nc う 33333\n" u"ddd ええええええ 4") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # index df = DataFrame({'a': [u'あああああ', u'い', u'う', u'えええ'], @@ -525,7 +525,7 @@ def test_east_asian_unicode_frame(self): expected = (u" a b\nあああ あああああ あ\n" u"いいいいいい い いいい\nうう う う\n" u"え えええ ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # index name df = DataFrame({'a': [u'あああああ', u'い', u'う', u'えええ'], @@ -538,7 +538,7 @@ def test_east_asian_unicode_frame(self): u"い い いいい\n" u"うう う う\n" u"え えええ ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # all df = DataFrame({u'あああ': [u'あああ', u'い', u'う', u'えええええ'], @@ -551,7 +551,7 @@ def test_east_asian_unicode_frame(self): u"いいい い いいい\n" u"うう う う\n" u"え えええええ ええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # MultiIndex idx = pd.MultiIndex.from_tuples([(u'あ', u'いい'), (u'う', u'え'), ( @@ -564,7 +564,7 @@ def test_east_asian_unicode_frame(self): u"う え い いいい\n" u"おおお かかかか う う\n" u"き くく えええ ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # truncate with option_context('display.max_rows', 3, 'display.max_columns', 3): @@ -577,13 +577,13 @@ def test_east_asian_unicode_frame(self): expected = (u" a ... ああああ\n0 あああああ ... さ\n" u".. ... ... ...\n3 えええ ... せ\n" u"\n[4 rows x 4 columns]") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected df.index = [u'あああ', u'いいいい', u'う', 'aaa'] expected = (u" a ... ああああ\nあああ あああああ ... さ\n" u".. ... ... ...\naaa えええ ... せ\n" u"\n[4 rows x 4 columns]") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # Emable Unicode option ----------------------------------------- with option_context('display.unicode.east_asian_width', True): @@ -595,7 +595,7 @@ def test_east_asian_unicode_frame(self): expected = (u" a b\na あ 1\n" u"bb いいい 222\nc う 33333\n" u"ddd ええええええ 4") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # last col df = DataFrame({'a': [1, 222, 33333, 4], @@ -604,7 +604,7 @@ def test_east_asian_unicode_frame(self): expected = (u" a b\na 1 あ\n" u"bb 222 いいい\nc 33333 う\n" u"ddd 4 ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # all col df = DataFrame({'a': [u'あああああ', u'い', u'う', u'えええ'], @@ -615,7 +615,7 @@ def test_east_asian_unicode_frame(self): u"bb い いいい\n" u"c う う\n" u"ddd えええ ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # column name df = DataFrame({u'あああああ': [1, 222, 33333, 4], @@ -626,7 +626,7 @@ def test_east_asian_unicode_frame(self): u"bb いいい 222\n" u"c う 33333\n" u"ddd ええええええ 4") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # index df = DataFrame({'a': [u'あああああ', u'い', u'う', u'えええ'], @@ -637,7 +637,7 @@ def test_east_asian_unicode_frame(self): u"いいいいいい い いいい\n" u"うう う う\n" u"え えええ ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # index name df = DataFrame({'a': [u'あああああ', u'い', u'う', u'えええ'], @@ -650,7 +650,7 @@ def test_east_asian_unicode_frame(self): u"い い いいい\n" u"うう う う\n" u"え えええ ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # all df = DataFrame({u'あああ': [u'あああ', u'い', u'う', u'えええええ'], @@ -663,7 +663,7 @@ def test_east_asian_unicode_frame(self): u"いいい い いいい\n" u"うう う う\n" u"え えええええ ええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # MultiIndex idx = pd.MultiIndex.from_tuples([(u'あ', u'いい'), (u'う', u'え'), ( @@ -676,7 +676,7 @@ def test_east_asian_unicode_frame(self): u"う え い いいい\n" u"おおお かかかか う う\n" u"き くく えええ ええええええ") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # truncate with option_context('display.max_rows', 3, 'display.max_columns', @@ -693,7 +693,7 @@ def test_east_asian_unicode_frame(self): u".. ... ... ...\n" u"3 えええ ... せ\n" u"\n[4 rows x 4 columns]") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected df.index = [u'あああ', u'いいいい', u'う', 'aaa'] expected = (u" a ... ああああ\n" @@ -701,7 +701,7 @@ def test_east_asian_unicode_frame(self): u"... ... ... ...\n" u"aaa えええ ... せ\n" u"\n[4 rows x 4 columns]") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected # ambiguous unicode df = DataFrame({u'あああああ': [1, 222, 33333, 4], @@ -712,7 +712,7 @@ def test_east_asian_unicode_frame(self): u"bb いいい 222\n" u"c ¡¡ 33333\n" u"¡¡¡ ええええええ 4") - self.assertEqual(_rep(df), expected) + assert _rep(df) == expected def test_to_string_buffer_all_unicode(self): buf = StringIO() @@ -738,7 +738,7 @@ def test_to_string_with_col_space(self): with_header = df.to_string(col_space=20) with_header_row1 = with_header.splitlines()[1] no_header = df.to_string(col_space=20, header=False) - self.assertEqual(len(with_header_row1), len(no_header)) + assert len(with_header_row1) == len(no_header) def test_to_string_truncate_indices(self): for index in [tm.makeStringIndex, tm.makeUnicodeIndex, tm.makeIntIndex, @@ -825,7 +825,7 @@ def test_datetimelike_frame(self): '8 NaT 9\n' '9 NaT 10\n\n' '[10 rows x 2 columns]') - self.assertEqual(repr(df), expected) + assert repr(df) == expected dts = [pd.NaT] * 5 + [pd.Timestamp('2011-01-01', tz='US/Eastern')] * 5 df = pd.DataFrame({"dt": dts, @@ -838,7 +838,7 @@ def test_datetimelike_frame(self): '8 2011-01-01 00:00:00-05:00 9\n' '9 2011-01-01 00:00:00-05:00 10\n\n' '[10 rows x 2 columns]') - self.assertEqual(repr(df), expected) + assert repr(df) == expected dts = ([pd.Timestamp('2011-01-01', tz='Asia/Tokyo')] * 5 + [pd.Timestamp('2011-01-01', tz='US/Eastern')] * 5) @@ -852,13 +852,13 @@ def test_datetimelike_frame(self): '8 2011-01-01 00:00:00-05:00 9\n' '9 2011-01-01 00:00:00-05:00 10\n\n' '[10 rows x 2 columns]') - self.assertEqual(repr(df), expected) + assert repr(df) == expected def test_nonunicode_nonascii_alignment(self): df = DataFrame([["aa\xc3\xa4\xc3\xa4", 1], ["bbbb", 2]]) rep_str = df.to_string() lines = rep_str.split('\n') - self.assertEqual(len(lines[1]), len(lines[2])) + assert len(lines[1]) == len(lines[2]) def test_unicode_problem_decoding_as_ascii(self): dm = DataFrame({u('c/\u03c3'): Series({'test': np.nan})}) @@ -890,25 +890,21 @@ def test_pprint_thing(self): if PY3: pytest.skip("doesn't work on Python 3") - self.assertEqual(pp_t('a'), u('a')) - self.assertEqual(pp_t(u('a')), u('a')) - self.assertEqual(pp_t(None), 'None') - self.assertEqual(pp_t(u('\u05d0'), quote_strings=True), u("u'\u05d0'")) - self.assertEqual(pp_t(u('\u05d0'), quote_strings=False), u('\u05d0')) - self.assertEqual(pp_t((u('\u05d0'), - u('\u05d1')), quote_strings=True), - u("(u'\u05d0', u'\u05d1')")) - self.assertEqual(pp_t((u('\u05d0'), (u('\u05d1'), - u('\u05d2'))), - quote_strings=True), - u("(u'\u05d0', (u'\u05d1', u'\u05d2'))")) - self.assertEqual(pp_t(('foo', u('\u05d0'), (u('\u05d0'), - u('\u05d0'))), - quote_strings=True), - u("(u'foo', u'\u05d0', (u'\u05d0', u'\u05d0'))")) - - # escape embedded tabs in string - # GH #2038 + assert pp_t('a') == u('a') + assert pp_t(u('a')) == u('a') + assert pp_t(None) == 'None' + assert pp_t(u('\u05d0'), quote_strings=True) == u("u'\u05d0'") + assert pp_t(u('\u05d0'), quote_strings=False) == u('\u05d0') + assert (pp_t((u('\u05d0'), u('\u05d1')), quote_strings=True) == + u("(u'\u05d0', u'\u05d1')")) + assert (pp_t((u('\u05d0'), (u('\u05d1'), u('\u05d2'))), + quote_strings=True) == u("(u'\u05d0', " + "(u'\u05d1', u'\u05d2'))")) + assert (pp_t(('foo', u('\u05d0'), (u('\u05d0'), u('\u05d0'))), + quote_strings=True) == u("(u'foo', u'\u05d0', " + "(u'\u05d0', u'\u05d0'))")) + + # gh-2038: escape embedded tabs in string assert "\t" not in pp_t("a\tb", escape_chars=("\t", )) def test_wide_repr(self): @@ -936,7 +932,7 @@ def test_wide_repr_wide_columns(self): columns=['a' * 90, 'b' * 90, 'c' * 90]) rep_str = repr(df) - self.assertEqual(len(rep_str.splitlines()), 20) + assert len(rep_str.splitlines()) == 20 def test_wide_repr_named(self): with option_context('mode.sim_interactive', True): @@ -1036,7 +1032,7 @@ def test_long_series(self): import re str_rep = str(s) nmatches = len(re.findall('dtype', str_rep)) - self.assertEqual(nmatches, 1) + assert nmatches == 1 def test_index_with_nan(self): # GH 2850 @@ -1055,7 +1051,7 @@ def test_index_with_nan(self): expected = u( ' value\nid1 id2 id3 \n' '1a3 NaN 78d 123\n9h4 d67 79d 64') - self.assertEqual(result, expected) + assert result == expected # index y = df.set_index('id2') @@ -1063,7 +1059,7 @@ def test_index_with_nan(self): expected = u( ' id1 id3 value\nid2 \n' 'NaN 1a3 78d 123\nd67 9h4 79d 64') - self.assertEqual(result, expected) + assert result == expected # with append (this failed in 0.12) y = df.set_index(['id1', 'id2']).set_index('id3', append=True) @@ -1071,7 +1067,7 @@ def test_index_with_nan(self): expected = u( ' value\nid1 id2 id3 \n' '1a3 NaN 78d 123\n9h4 d67 79d 64') - self.assertEqual(result, expected) + assert result == expected # all-nan in mi df2 = df.copy() @@ -1081,7 +1077,7 @@ def test_index_with_nan(self): expected = u( ' id1 id3 value\nid2 \n' 'NaN 1a3 78d 123\nNaN 9h4 79d 64') - self.assertEqual(result, expected) + assert result == expected # partial nan in mi df2 = df.copy() @@ -1091,7 +1087,7 @@ def test_index_with_nan(self): expected = u( ' id1 value\nid2 id3 \n' 'NaN 78d 1a3 123\n 79d 9h4 64') - self.assertEqual(result, expected) + assert result == expected df = DataFrame({'id1': {0: np.nan, 1: '9h4'}, @@ -1107,7 +1103,7 @@ def test_index_with_nan(self): expected = u( ' value\nid1 id2 id3 \n' 'NaN NaN NaN 123\n9h4 d67 79d 64') - self.assertEqual(result, expected) + assert result == expected def test_to_string(self): @@ -1123,7 +1119,7 @@ def test_to_string(self): buf = StringIO() retval = biggie.to_string(buf=buf) assert retval is None - self.assertEqual(buf.getvalue(), s) + assert buf.getvalue() == s assert isinstance(s, compat.string_types) @@ -1136,17 +1132,17 @@ def test_to_string(self): recons = read_table(StringIO(joined), names=header, header=None, sep=' ') tm.assert_series_equal(recons['B'], biggie['B']) - self.assertEqual(recons['A'].count(), biggie['A'].count()) + assert recons['A'].count() == biggie['A'].count() assert (np.abs(recons['A'].dropna() - biggie['A'].dropna()) < 0.1).all() # expected = ['B', 'A'] - # self.assertEqual(header, expected) + # assert header == expected result = biggie.to_string(columns=['A'], col_space=17) header = result.split('\n')[0].strip().split() expected = ['A'] - self.assertEqual(header, expected) + assert header == expected biggie.to_string(columns=['B', 'A'], formatters={'A': lambda x: '%.1f' % x}) @@ -1163,7 +1159,7 @@ def test_to_string_no_header(self): df_s = df.to_string(header=False) expected = "0 1 4\n1 2 5\n2 3 6" - self.assertEqual(df_s, expected) + assert df_s == expected def test_to_string_specified_header(self): df = DataFrame({'x': [1, 2, 3], 'y': [4, 5, 6]}) @@ -1171,7 +1167,7 @@ def test_to_string_specified_header(self): df_s = df.to_string(header=['X', 'Y']) expected = ' X Y\n0 1 4\n1 2 5\n2 3 6' - self.assertEqual(df_s, expected) + assert df_s == expected with pytest.raises(ValueError): df.to_string(header=['X']) @@ -1182,7 +1178,7 @@ def test_to_string_no_index(self): df_s = df.to_string(index=False) expected = "x y\n1 4\n2 5\n3 6" - self.assertEqual(df_s, expected) + assert df_s == expected def test_to_string_line_width_no_index(self): df = DataFrame({'x': [1, 2, 3], 'y': [4, 5, 6]}) @@ -1190,7 +1186,7 @@ def test_to_string_line_width_no_index(self): df_s = df.to_string(line_width=1, index=False) expected = "x \\\n1 \n2 \n3 \n\ny \n4 \n5 \n6" - self.assertEqual(df_s, expected) + assert df_s == expected def test_to_string_float_formatting(self): tm.reset_display_options() @@ -1214,16 +1210,16 @@ def test_to_string_float_formatting(self): '2 3.45600e+03\n3 1.20000e+46\n4 1.64000e+06\n' '5 1.70000e+08\n6 1.25346e+00\n7 3.14159e+00\n' '8 -1.00000e+06') - self.assertEqual(df_s, expected) + assert df_s == expected df = DataFrame({'x': [3234, 0.253]}) df_s = df.to_string() expected = (' x\n' '0 3234.000\n' '1 0.253') - self.assertEqual(df_s, expected) + assert df_s == expected tm.reset_display_options() - self.assertEqual(get_option("display.precision"), 6) + assert get_option("display.precision") == 6 df = DataFrame({'x': [1e9, 0.2512]}) df_s = df.to_string() @@ -1237,7 +1233,7 @@ def test_to_string_float_formatting(self): expected = (' x\n' '0 1.000000e+09\n' '1 2.512000e-01') - self.assertEqual(df_s, expected) + assert df_s == expected def test_to_string_small_float_values(self): df = DataFrame({'a': [1.5, 1e-17, -5.5e-7]}) @@ -1254,7 +1250,7 @@ def test_to_string_small_float_values(self): '0 1.500000e+00\n' '1 1.000000e-17\n' '2 -5.500000e-07') - self.assertEqual(result, expected) + assert result == expected # but not all exactly zero df = df * 0 @@ -1272,7 +1268,7 @@ def test_to_string_float_index(self): '3.0 2\n' '4.0 3\n' '5.0 4') - self.assertEqual(result, expected) + assert result == expected def test_to_string_ascii_error(self): data = [('0 ', u(' .gitignore '), u(' 5 '), @@ -1289,7 +1285,7 @@ def test_to_string_int_formatting(self): output = df.to_string() expected = (' x\n' '0 -15\n' '1 20\n' '2 25\n' '3 -35') - self.assertEqual(output, expected) + assert output == expected def test_to_string_index_formatter(self): df = DataFrame([lrange(5), lrange(5, 10), lrange(10, 15)]) @@ -1303,14 +1299,14 @@ def test_to_string_index_formatter(self): c 10 11 12 13 14\ """ - self.assertEqual(rs, xp) + assert rs == xp def test_to_string_left_justify_cols(self): tm.reset_display_options() df = DataFrame({'x': [3234, 0.253]}) df_s = df.to_string(justify='left') expected = (' x \n' '0 3234.000\n' '1 0.253') - self.assertEqual(df_s, expected) + assert df_s == expected def test_to_string_format_na(self): tm.reset_display_options() @@ -1324,7 +1320,7 @@ def test_to_string_format_na(self): '2 -2.1234 foooo\n' '3 3.0000 fooooo\n' '4 4.0000 bar') - self.assertEqual(result, expected) + assert result == expected df = DataFrame({'A': [np.nan, -1., -2., 3., 4.], 'B': [np.nan, 'foo', 'foooo', 'fooooo', 'bar']}) @@ -1336,12 +1332,12 @@ def test_to_string_format_na(self): '2 -2.0 foooo\n' '3 3.0 fooooo\n' '4 4.0 bar') - self.assertEqual(result, expected) + assert result == expected def test_to_string_line_width(self): df = DataFrame(123, lrange(10, 15), lrange(30)) s = df.to_string(line_width=80) - self.assertEqual(max(len(l) for l in s.split('\n')), 80) + assert max(len(l) for l in s.split('\n')) == 80 def test_show_dimensions(self): df = DataFrame(123, lrange(10, 15), lrange(30)) @@ -1596,7 +1592,7 @@ def test_period(self): exp = (" A B C\n0 2013-01 2011-01 a\n" "1 2013-02 2011-02-01 b\n2 2013-03 2011-03-01 09:00 c\n" "3 2013-04 2011-04 d") - self.assertEqual(str(df), exp) + assert str(df) == exp def gen_series_formatting(): @@ -1628,30 +1624,29 @@ def test_to_string(self): retval = self.ts.to_string(buf=buf) assert retval is None - self.assertEqual(buf.getvalue().strip(), s) + assert buf.getvalue().strip() == s # pass float_format format = '%.4f'.__mod__ result = self.ts.to_string(float_format=format) result = [x.split()[1] for x in result.split('\n')[:-1]] expected = [format(x) for x in self.ts] - self.assertEqual(result, expected) + assert result == expected # empty string result = self.ts[:0].to_string() - self.assertEqual(result, 'Series([], Freq: B)') + assert result == 'Series([], Freq: B)' result = self.ts[:0].to_string(length=0) - self.assertEqual(result, 'Series([], Freq: B)') + assert result == 'Series([], Freq: B)' # name and length cp = self.ts.copy() cp.name = 'foo' result = cp.to_string(length=True, name=True, dtype=True) last_line = result.split('\n')[-1].strip() - self.assertEqual(last_line, - "Freq: B, Name: foo, Length: %d, dtype: float64" % - len(cp)) + assert last_line == ("Freq: B, Name: foo, " + "Length: %d, dtype: float64" % len(cp)) def test_freq_name_separation(self): s = Series(np.random.randn(10), @@ -1665,18 +1660,18 @@ def test_to_string_mixed(self): result = s.to_string() expected = (u('0 foo\n') + u('1 NaN\n') + u('2 -1.23\n') + u('3 4.56')) - self.assertEqual(result, expected) + assert result == expected # but don't count NAs as floats s = Series(['foo', np.nan, 'bar', 'baz']) result = s.to_string() expected = (u('0 foo\n') + '1 NaN\n' + '2 bar\n' + '3 baz') - self.assertEqual(result, expected) + assert result == expected s = Series(['foo', 5, 'bar', 'baz']) result = s.to_string() expected = (u('0 foo\n') + '1 5\n' + '2 bar\n' + '3 baz') - self.assertEqual(result, expected) + assert result == expected def test_to_string_float_na_spacing(self): s = Series([0., 1.5678, 2., -3., 4.]) @@ -1685,14 +1680,14 @@ def test_to_string_float_na_spacing(self): result = s.to_string() expected = (u('0 NaN\n') + '1 1.5678\n' + '2 NaN\n' + '3 -3.0000\n' + '4 NaN') - self.assertEqual(result, expected) + assert result == expected def test_to_string_without_index(self): # GH 11729 Test index=False option s = Series([1, 2, 3, 4]) result = s.to_string(index=False) expected = (u('1\n') + '2\n' + '3\n' + '4') - self.assertEqual(result, expected) + assert result == expected def test_unicode_name_in_footer(self): s = Series([1, 2], name=u('\u05e2\u05d1\u05e8\u05d9\u05ea')) @@ -1711,21 +1706,21 @@ def test_east_asian_unicode_series(self): index=[u'あ', u'いい', u'ううう', u'ええええ']) expected = (u"あ a\nいい bb\nううう CCC\n" u"ええええ D\ndtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # unicode values s = Series([u'あ', u'いい', u'ううう', u'ええええ'], index=['a', 'bb', 'c', 'ddd']) expected = (u"a あ\nbb いい\nc ううう\n" u"ddd ええええ\ndtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # both s = Series([u'あ', u'いい', u'ううう', u'ええええ'], index=[u'ああ', u'いいいい', u'う', u'えええ']) expected = (u"ああ あ\nいいいい いい\nう ううう\n" u"えええ ええええ\ndtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # unicode footer s = Series([u'あ', u'いい', u'ううう', u'ええええ'], @@ -1733,7 +1728,7 @@ def test_east_asian_unicode_series(self): name=u'おおおおおおお') expected = (u"ああ あ\nいいいい いい\nう ううう\n" u"えええ ええええ\nName: おおおおおおお, dtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # MultiIndex idx = pd.MultiIndex.from_tuples([(u'あ', u'いい'), (u'う', u'え'), ( @@ -1743,13 +1738,13 @@ def test_east_asian_unicode_series(self): u"う え 22\n" u"おおお かかかか 3333\n" u"き くく 44444\ndtype: int64") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # object dtype, shorter than unicode repr s = Series([1, 22, 3333, 44444], index=[1, 'AB', np.nan, u'あああ']) expected = (u"1 1\nAB 22\nNaN 3333\n" u"あああ 44444\ndtype: int64") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # object dtype, longer than unicode repr s = Series([1, 22, 3333, 44444], @@ -1758,7 +1753,7 @@ def test_east_asian_unicode_series(self): u"AB 22\n" u"2011-01-01 00:00:00 3333\n" u"あああ 44444\ndtype: int64") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # truncate with option_context('display.max_rows', 3): @@ -1768,13 +1763,13 @@ def test_east_asian_unicode_series(self): expected = (u"0 あ\n ... \n" u"3 ええええ\n" u"Name: おおおおおおお, Length: 4, dtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected s.index = [u'ああ', u'いいいい', u'う', u'えええ'] expected = (u"ああ あ\n ... \n" u"えええ ええええ\n" u"Name: おおおおおおお, Length: 4, dtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # Emable Unicode option ----------------------------------------- with option_context('display.unicode.east_asian_width', True): @@ -1784,14 +1779,14 @@ def test_east_asian_unicode_series(self): index=[u'あ', u'いい', u'ううう', u'ええええ']) expected = (u"あ a\nいい bb\nううう CCC\n" u"ええええ D\ndtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # unicode values s = Series([u'あ', u'いい', u'ううう', u'ええええ'], index=['a', 'bb', 'c', 'ddd']) expected = (u"a あ\nbb いい\nc ううう\n" u"ddd ええええ\ndtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # both s = Series([u'あ', u'いい', u'ううう', u'ええええ'], @@ -1800,7 +1795,7 @@ def test_east_asian_unicode_series(self): u"いいいい いい\n" u"う ううう\n" u"えええ ええええ\ndtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # unicode footer s = Series([u'あ', u'いい', u'ううう', u'ええええ'], @@ -1811,7 +1806,7 @@ def test_east_asian_unicode_series(self): u"う ううう\n" u"えええ ええええ\n" u"Name: おおおおおおお, dtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # MultiIndex idx = pd.MultiIndex.from_tuples([(u'あ', u'いい'), (u'う', u'え'), ( @@ -1822,13 +1817,13 @@ def test_east_asian_unicode_series(self): u"おおお かかかか 3333\n" u"き くく 44444\n" u"dtype: int64") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # object dtype, shorter than unicode repr s = Series([1, 22, 3333, 44444], index=[1, 'AB', np.nan, u'あああ']) expected = (u"1 1\nAB 22\nNaN 3333\n" u"あああ 44444\ndtype: int64") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # object dtype, longer than unicode repr s = Series([1, 22, 3333, 44444], @@ -1837,7 +1832,7 @@ def test_east_asian_unicode_series(self): u"AB 22\n" u"2011-01-01 00:00:00 3333\n" u"あああ 44444\ndtype: int64") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # truncate with option_context('display.max_rows', 3): @@ -1846,14 +1841,14 @@ def test_east_asian_unicode_series(self): expected = (u"0 あ\n ... \n" u"3 ええええ\n" u"Name: おおおおおおお, Length: 4, dtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected s.index = [u'ああ', u'いいいい', u'う', u'えええ'] expected = (u"ああ あ\n" u" ... \n" u"えええ ええええ\n" u"Name: おおおおおおお, Length: 4, dtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected # ambiguous unicode s = Series([u'¡¡', u'い¡¡', u'ううう', u'ええええ'], @@ -1862,7 +1857,7 @@ def test_east_asian_unicode_series(self): u"¡¡¡¡いい い¡¡\n" u"¡¡ ううう\n" u"えええ ええええ\ndtype: object") - self.assertEqual(_rep(s), expected) + assert _rep(s) == expected def test_float_trim_zeros(self): vals = [2.08430917305e+10, 3.52205017305e+10, 2.30674817305e+10, @@ -1950,7 +1945,7 @@ def test_timedelta64(self): # no boxing of the actual elements td = Series(pd.timedelta_range('1 days', periods=3)) result = td.to_string() - self.assertEqual(result, u("0 1 days\n1 2 days\n2 3 days")) + assert result == u("0 1 days\n1 2 days\n2 3 days") def test_mixed_datetime64(self): df = DataFrame({'A': [1, 2], 'B': ['2012-01-01', '2012-01-02']}) @@ -1965,12 +1960,12 @@ def test_period(self): s = Series(np.arange(6, dtype='int64'), index=index) exp = ("2013-01 0\n2013-02 1\n2013-03 2\n2013-04 3\n" "2013-05 4\n2013-06 5\nFreq: M, dtype: int64") - self.assertEqual(str(s), exp) + assert str(s) == exp s = Series(index) exp = ("0 2013-01\n1 2013-02\n2 2013-03\n3 2013-04\n" "4 2013-05\n5 2013-06\ndtype: object") - self.assertEqual(str(s), exp) + assert str(s) == exp # periods with mixed freq s = Series([pd.Period('2011-01', freq='M'), @@ -1978,7 +1973,7 @@ def test_period(self): pd.Period('2011-03-01 09:00', freq='H')]) exp = ("0 2011-01\n1 2011-02-01\n" "2 2011-03-01 09:00\ndtype: object") - self.assertEqual(str(s), exp) + assert str(s) == exp def test_max_multi_index_display(self): # GH 7101 @@ -1993,29 +1988,29 @@ def test_max_multi_index_display(self): s = Series(np.random.randn(8), index=index) with option_context("display.max_rows", 10): - self.assertEqual(len(str(s).split('\n')), 10) + assert len(str(s).split('\n')) == 10 with option_context("display.max_rows", 3): - self.assertEqual(len(str(s).split('\n')), 5) + assert len(str(s).split('\n')) == 5 with option_context("display.max_rows", 2): - self.assertEqual(len(str(s).split('\n')), 5) + assert len(str(s).split('\n')) == 5 with option_context("display.max_rows", 1): - self.assertEqual(len(str(s).split('\n')), 4) + assert len(str(s).split('\n')) == 4 with option_context("display.max_rows", 0): - self.assertEqual(len(str(s).split('\n')), 10) + assert len(str(s).split('\n')) == 10 # index s = Series(np.random.randn(8), None) with option_context("display.max_rows", 10): - self.assertEqual(len(str(s).split('\n')), 9) + assert len(str(s).split('\n')) == 9 with option_context("display.max_rows", 3): - self.assertEqual(len(str(s).split('\n')), 4) + assert len(str(s).split('\n')) == 4 with option_context("display.max_rows", 2): - self.assertEqual(len(str(s).split('\n')), 4) + assert len(str(s).split('\n')) == 4 with option_context("display.max_rows", 1): - self.assertEqual(len(str(s).split('\n')), 3) + assert len(str(s).split('\n')) == 3 with option_context("display.max_rows", 0): - self.assertEqual(len(str(s).split('\n')), 9) + assert len(str(s).split('\n')) == 9 # Make sure #8532 is fixed def test_consistent_format(self): @@ -2027,7 +2022,7 @@ def test_consistent_format(self): '1.0000\n4 1.0000\n ... \n125 ' '1.0000\n126 1.0000\n127 0.9999\n128 ' '1.0000\n129 1.0000\ndtype: float64') - self.assertEqual(res, exp) + assert res == exp def chck_ncols(self, s): with option_context("display.max_rows", 10): @@ -2036,7 +2031,7 @@ def chck_ncols(self, s): lines = [line for line in repr(s).split('\n') if not re.match(r'[^\.]*\.+', line)][:-1] ncolsizes = len(set(len(line.strip()) for line in lines)) - self.assertEqual(ncolsizes, 1) + assert ncolsizes == 1 def test_format_explicit(self): test_sers = gen_series_formatting() @@ -2044,19 +2039,19 @@ def test_format_explicit(self): "display.show_dimensions", False): res = repr(test_sers['onel']) exp = '0 a\n1 a\n ..\n98 a\n99 a\ndtype: object' - self.assertEqual(exp, res) + assert exp == res res = repr(test_sers['twol']) exp = ('0 ab\n1 ab\n ..\n98 ab\n99 ab\ndtype:' ' object') - self.assertEqual(exp, res) + assert exp == res res = repr(test_sers['asc']) exp = ('0 a\n1 ab\n ... \n4 abcde\n5' ' abcdef\ndtype: object') - self.assertEqual(exp, res) + assert exp == res res = repr(test_sers['desc']) exp = ('5 abcdef\n4 abcde\n ... \n1 ab\n0' ' a\ndtype: object') - self.assertEqual(exp, res) + assert exp == res def test_ncols(self): test_sers = gen_series_formatting() @@ -2069,10 +2064,10 @@ def test_max_rows_eq_one(self): strrepr = repr(s).split('\n') exp1 = ['0', '0'] res1 = strrepr[0].split() - self.assertEqual(exp1, res1) + assert exp1 == res1 exp2 = ['..'] res2 = strrepr[1].split() - self.assertEqual(exp2, res2) + assert exp2 == res2 def test_truncate_ndots(self): def getndots(s): @@ -2081,12 +2076,12 @@ def getndots(s): s = Series([0, 2, 3, 6]) with option_context("display.max_rows", 2): strrepr = repr(s).replace('\n', '') - self.assertEqual(getndots(strrepr), 2) + assert getndots(strrepr) == 2 s = Series([0, 100, 200, 400]) with option_context("display.max_rows", 2): strrepr = repr(s).replace('\n', '') - self.assertEqual(getndots(strrepr), 3) + assert getndots(strrepr) == 3 def test_show_dimensions(self): # gh-7117 @@ -2109,48 +2104,48 @@ def test_to_string_name(self): s.name = 'myser' res = s.to_string(max_rows=2, name=True) exp = '0 0\n ..\n99 99\nName: myser' - self.assertEqual(res, exp) + assert res == exp res = s.to_string(max_rows=2, name=False) exp = '0 0\n ..\n99 99' - self.assertEqual(res, exp) + assert res == exp def test_to_string_dtype(self): s = Series(range(100), dtype='int64') res = s.to_string(max_rows=2, dtype=True) exp = '0 0\n ..\n99 99\ndtype: int64' - self.assertEqual(res, exp) + assert res == exp res = s.to_string(max_rows=2, dtype=False) exp = '0 0\n ..\n99 99' - self.assertEqual(res, exp) + assert res == exp def test_to_string_length(self): s = Series(range(100), dtype='int64') res = s.to_string(max_rows=2, length=True) exp = '0 0\n ..\n99 99\nLength: 100' - self.assertEqual(res, exp) + assert res == exp def test_to_string_na_rep(self): s = pd.Series(index=range(100)) res = s.to_string(na_rep='foo', max_rows=2) exp = '0 foo\n ..\n99 foo' - self.assertEqual(res, exp) + assert res == exp def test_to_string_float_format(self): s = pd.Series(range(10), dtype='float64') res = s.to_string(float_format=lambda x: '{0:2.1f}'.format(x), max_rows=2) exp = '0 0.0\n ..\n9 9.0' - self.assertEqual(res, exp) + assert res == exp def test_to_string_header(self): s = pd.Series(range(10), dtype='int64') s.index.name = 'foo' res = s.to_string(header=True, max_rows=2) exp = 'foo\n0 0\n ..\n9 9' - self.assertEqual(res, exp) + assert res == exp res = s.to_string(header=False, max_rows=2) exp = '0 0\n ..\n9 9' - self.assertEqual(res, exp) + assert res == exp def _three_digit_exp(): @@ -2167,8 +2162,8 @@ def test_misc(self): def test_format(self): obj = fmt.FloatArrayFormatter(np.array([12, 0], dtype=np.float64)) result = obj.get_result() - self.assertEqual(result[0], " 12.0") - self.assertEqual(result[1], " 0.0") + assert result[0] == " 12.0" + assert result[1] == " 0.0" def test_output_significant_digits(self): # Issue #9764 @@ -2228,7 +2223,7 @@ def test_output_significant_digits(self): } for (start, stop), v in expected_output.items(): - self.assertEqual(str(d[start:stop]), v) + assert str(d[start:stop]) == v def test_too_long(self): # GH 10451 @@ -2236,12 +2231,11 @@ def test_too_long(self): # need both a number > 1e6 and something that normally formats to # having length > display.precision + 6 df = pd.DataFrame(dict(x=[12345.6789])) - self.assertEqual(str(df), ' x\n0 12345.6789') + assert str(df) == ' x\n0 12345.6789' df = pd.DataFrame(dict(x=[2e6])) - self.assertEqual(str(df), ' x\n0 2000000.0') + assert str(df) == ' x\n0 2000000.0' df = pd.DataFrame(dict(x=[12345.6789, 2e6])) - self.assertEqual( - str(df), ' x\n0 1.2346e+04\n1 2.0000e+06') + assert str(df) == ' x\n0 1.2346e+04\n1 2.0000e+06' class TestRepr_timedelta64(tm.TestCase): @@ -2253,14 +2247,13 @@ def test_none(self): delta_500ms = pd.to_timedelta(500, unit='ms') drepr = lambda x: x._repr_base() - self.assertEqual(drepr(delta_1d), "1 days") - self.assertEqual(drepr(-delta_1d), "-1 days") - self.assertEqual(drepr(delta_0d), "0 days") - self.assertEqual(drepr(delta_1s), "0 days 00:00:01") - self.assertEqual(drepr(delta_500ms), "0 days 00:00:00.500000") - self.assertEqual(drepr(delta_1d + delta_1s), "1 days 00:00:01") - self.assertEqual( - drepr(delta_1d + delta_500ms), "1 days 00:00:00.500000") + assert drepr(delta_1d) == "1 days" + assert drepr(-delta_1d) == "-1 days" + assert drepr(delta_0d) == "0 days" + assert drepr(delta_1s) == "0 days 00:00:01" + assert drepr(delta_500ms) == "0 days 00:00:00.500000" + assert drepr(delta_1d + delta_1s) == "1 days 00:00:01" + assert drepr(delta_1d + delta_500ms) == "1 days 00:00:00.500000" def test_even_day(self): delta_1d = pd.to_timedelta(1, unit='D') @@ -2269,14 +2262,13 @@ def test_even_day(self): delta_500ms = pd.to_timedelta(500, unit='ms') drepr = lambda x: x._repr_base(format='even_day') - self.assertEqual(drepr(delta_1d), "1 days") - self.assertEqual(drepr(-delta_1d), "-1 days") - self.assertEqual(drepr(delta_0d), "0 days") - self.assertEqual(drepr(delta_1s), "0 days 00:00:01") - self.assertEqual(drepr(delta_500ms), "0 days 00:00:00.500000") - self.assertEqual(drepr(delta_1d + delta_1s), "1 days 00:00:01") - self.assertEqual( - drepr(delta_1d + delta_500ms), "1 days 00:00:00.500000") + assert drepr(delta_1d) == "1 days" + assert drepr(-delta_1d) == "-1 days" + assert drepr(delta_0d) == "0 days" + assert drepr(delta_1s) == "0 days 00:00:01" + assert drepr(delta_500ms) == "0 days 00:00:00.500000" + assert drepr(delta_1d + delta_1s) == "1 days 00:00:01" + assert drepr(delta_1d + delta_500ms) == "1 days 00:00:00.500000" def test_sub_day(self): delta_1d = pd.to_timedelta(1, unit='D') @@ -2285,14 +2277,13 @@ def test_sub_day(self): delta_500ms = pd.to_timedelta(500, unit='ms') drepr = lambda x: x._repr_base(format='sub_day') - self.assertEqual(drepr(delta_1d), "1 days") - self.assertEqual(drepr(-delta_1d), "-1 days") - self.assertEqual(drepr(delta_0d), "00:00:00") - self.assertEqual(drepr(delta_1s), "00:00:01") - self.assertEqual(drepr(delta_500ms), "00:00:00.500000") - self.assertEqual(drepr(delta_1d + delta_1s), "1 days 00:00:01") - self.assertEqual( - drepr(delta_1d + delta_500ms), "1 days 00:00:00.500000") + assert drepr(delta_1d) == "1 days" + assert drepr(-delta_1d) == "-1 days" + assert drepr(delta_0d) == "00:00:00" + assert drepr(delta_1s) == "00:00:01" + assert drepr(delta_500ms) == "00:00:00.500000" + assert drepr(delta_1d + delta_1s) == "1 days 00:00:01" + assert drepr(delta_1d + delta_500ms) == "1 days 00:00:00.500000" def test_long(self): delta_1d = pd.to_timedelta(1, unit='D') @@ -2301,14 +2292,13 @@ def test_long(self): delta_500ms = pd.to_timedelta(500, unit='ms') drepr = lambda x: x._repr_base(format='long') - self.assertEqual(drepr(delta_1d), "1 days 00:00:00") - self.assertEqual(drepr(-delta_1d), "-1 days +00:00:00") - self.assertEqual(drepr(delta_0d), "0 days 00:00:00") - self.assertEqual(drepr(delta_1s), "0 days 00:00:01") - self.assertEqual(drepr(delta_500ms), "0 days 00:00:00.500000") - self.assertEqual(drepr(delta_1d + delta_1s), "1 days 00:00:01") - self.assertEqual( - drepr(delta_1d + delta_500ms), "1 days 00:00:00.500000") + assert drepr(delta_1d) == "1 days 00:00:00" + assert drepr(-delta_1d) == "-1 days +00:00:00" + assert drepr(delta_0d) == "0 days 00:00:00" + assert drepr(delta_1s) == "0 days 00:00:01" + assert drepr(delta_500ms) == "0 days 00:00:00.500000" + assert drepr(delta_1d + delta_1s) == "1 days 00:00:01" + assert drepr(delta_1d + delta_500ms) == "1 days 00:00:00.500000" def test_all(self): delta_1d = pd.to_timedelta(1, unit='D') @@ -2316,9 +2306,9 @@ def test_all(self): delta_1ns = pd.to_timedelta(1, unit='ns') drepr = lambda x: x._repr_base(format='all') - self.assertEqual(drepr(delta_1d), "1 days 00:00:00.000000000") - self.assertEqual(drepr(delta_0d), "0 days 00:00:00.000000000") - self.assertEqual(drepr(delta_1ns), "0 days 00:00:00.000000001") + assert drepr(delta_1d) == "1 days 00:00:00.000000000" + assert drepr(delta_0d) == "0 days 00:00:00.000000000" + assert drepr(delta_1ns) == "0 days 00:00:00.000000001" class TestTimedelta64Formatter(tm.TestCase): @@ -2326,45 +2316,45 @@ class TestTimedelta64Formatter(tm.TestCase): def test_days(self): x = pd.to_timedelta(list(range(5)) + [pd.NaT], unit='D') result = fmt.Timedelta64Formatter(x, box=True).get_result() - self.assertEqual(result[0].strip(), "'0 days'") - self.assertEqual(result[1].strip(), "'1 days'") + assert result[0].strip() == "'0 days'" + assert result[1].strip() == "'1 days'" result = fmt.Timedelta64Formatter(x[1:2], box=True).get_result() - self.assertEqual(result[0].strip(), "'1 days'") + assert result[0].strip() == "'1 days'" result = fmt.Timedelta64Formatter(x, box=False).get_result() - self.assertEqual(result[0].strip(), "0 days") - self.assertEqual(result[1].strip(), "1 days") + assert result[0].strip() == "0 days" + assert result[1].strip() == "1 days" result = fmt.Timedelta64Formatter(x[1:2], box=False).get_result() - self.assertEqual(result[0].strip(), "1 days") + assert result[0].strip() == "1 days" def test_days_neg(self): x = pd.to_timedelta(list(range(5)) + [pd.NaT], unit='D') result = fmt.Timedelta64Formatter(-x, box=True).get_result() - self.assertEqual(result[0].strip(), "'0 days'") - self.assertEqual(result[1].strip(), "'-1 days'") + assert result[0].strip() == "'0 days'" + assert result[1].strip() == "'-1 days'" def test_subdays(self): y = pd.to_timedelta(list(range(5)) + [pd.NaT], unit='s') result = fmt.Timedelta64Formatter(y, box=True).get_result() - self.assertEqual(result[0].strip(), "'00:00:00'") - self.assertEqual(result[1].strip(), "'00:00:01'") + assert result[0].strip() == "'00:00:00'" + assert result[1].strip() == "'00:00:01'" def test_subdays_neg(self): y = pd.to_timedelta(list(range(5)) + [pd.NaT], unit='s') result = fmt.Timedelta64Formatter(-y, box=True).get_result() - self.assertEqual(result[0].strip(), "'00:00:00'") - self.assertEqual(result[1].strip(), "'-1 days +23:59:59'") + assert result[0].strip() == "'00:00:00'" + assert result[1].strip() == "'-1 days +23:59:59'" def test_zero(self): x = pd.to_timedelta(list(range(1)) + [pd.NaT], unit='D') result = fmt.Timedelta64Formatter(x, box=True).get_result() - self.assertEqual(result[0].strip(), "'0 days'") + assert result[0].strip() == "'0 days'" x = pd.to_timedelta(list(range(1)), unit='D') result = fmt.Timedelta64Formatter(x, box=True).get_result() - self.assertEqual(result[0].strip(), "'0 days'") + assert result[0].strip() == "'0 days'" class TestDatetime64Formatter(tm.TestCase): @@ -2372,19 +2362,19 @@ class TestDatetime64Formatter(tm.TestCase): def test_mixed(self): x = Series([datetime(2013, 1, 1), datetime(2013, 1, 1, 12), pd.NaT]) result = fmt.Datetime64Formatter(x).get_result() - self.assertEqual(result[0].strip(), "2013-01-01 00:00:00") - self.assertEqual(result[1].strip(), "2013-01-01 12:00:00") + assert result[0].strip() == "2013-01-01 00:00:00" + assert result[1].strip() == "2013-01-01 12:00:00" def test_dates(self): x = Series([datetime(2013, 1, 1), datetime(2013, 1, 2), pd.NaT]) result = fmt.Datetime64Formatter(x).get_result() - self.assertEqual(result[0].strip(), "2013-01-01") - self.assertEqual(result[1].strip(), "2013-01-02") + assert result[0].strip() == "2013-01-01" + assert result[1].strip() == "2013-01-02" def test_date_nanos(self): x = Series([Timestamp(200)]) result = fmt.Datetime64Formatter(x).get_result() - self.assertEqual(result[0].strip(), "1970-01-01 00:00:00.000000200") + assert result[0].strip() == "1970-01-01 00:00:00.000000200" def test_dates_display(self): @@ -2393,37 +2383,37 @@ def test_dates_display(self): x = Series(date_range('20130101 09:00:00', periods=5, freq='D')) x.iloc[1] = np.nan result = fmt.Datetime64Formatter(x).get_result() - self.assertEqual(result[0].strip(), "2013-01-01 09:00:00") - self.assertEqual(result[1].strip(), "NaT") - self.assertEqual(result[4].strip(), "2013-01-05 09:00:00") + assert result[0].strip() == "2013-01-01 09:00:00" + assert result[1].strip() == "NaT" + assert result[4].strip() == "2013-01-05 09:00:00" x = Series(date_range('20130101 09:00:00', periods=5, freq='s')) x.iloc[1] = np.nan result = fmt.Datetime64Formatter(x).get_result() - self.assertEqual(result[0].strip(), "2013-01-01 09:00:00") - self.assertEqual(result[1].strip(), "NaT") - self.assertEqual(result[4].strip(), "2013-01-01 09:00:04") + assert result[0].strip() == "2013-01-01 09:00:00" + assert result[1].strip() == "NaT" + assert result[4].strip() == "2013-01-01 09:00:04" x = Series(date_range('20130101 09:00:00', periods=5, freq='ms')) x.iloc[1] = np.nan result = fmt.Datetime64Formatter(x).get_result() - self.assertEqual(result[0].strip(), "2013-01-01 09:00:00.000") - self.assertEqual(result[1].strip(), "NaT") - self.assertEqual(result[4].strip(), "2013-01-01 09:00:00.004") + assert result[0].strip() == "2013-01-01 09:00:00.000" + assert result[1].strip() == "NaT" + assert result[4].strip() == "2013-01-01 09:00:00.004" x = Series(date_range('20130101 09:00:00', periods=5, freq='us')) x.iloc[1] = np.nan result = fmt.Datetime64Formatter(x).get_result() - self.assertEqual(result[0].strip(), "2013-01-01 09:00:00.000000") - self.assertEqual(result[1].strip(), "NaT") - self.assertEqual(result[4].strip(), "2013-01-01 09:00:00.000004") + assert result[0].strip() == "2013-01-01 09:00:00.000000" + assert result[1].strip() == "NaT" + assert result[4].strip() == "2013-01-01 09:00:00.000004" x = Series(date_range('20130101 09:00:00', periods=5, freq='N')) x.iloc[1] = np.nan result = fmt.Datetime64Formatter(x).get_result() - self.assertEqual(result[0].strip(), "2013-01-01 09:00:00.000000000") - self.assertEqual(result[1].strip(), "NaT") - self.assertEqual(result[4].strip(), "2013-01-01 09:00:00.000000004") + assert result[0].strip() == "2013-01-01 09:00:00.000000000" + assert result[1].strip() == "NaT" + assert result[4].strip() == "2013-01-01 09:00:00.000000004" def test_datetime64formatter_yearmonth(self): x = Series([datetime(2016, 1, 1), datetime(2016, 2, 2)]) @@ -2433,7 +2423,7 @@ def format_func(x): formatter = fmt.Datetime64Formatter(x, formatter=format_func) result = formatter.get_result() - self.assertEqual(result, ['2016-01', '2016-02']) + assert result == ['2016-01', '2016-02'] def test_datetime64formatter_hoursecond(self): @@ -2445,43 +2435,43 @@ def format_func(x): formatter = fmt.Datetime64Formatter(x, formatter=format_func) result = formatter.get_result() - self.assertEqual(result, ['10:10', '12:12']) + assert result == ['10:10', '12:12'] class TestNaTFormatting(tm.TestCase): def test_repr(self): - self.assertEqual(repr(pd.NaT), "NaT") + assert repr(pd.NaT) == "NaT" def test_str(self): - self.assertEqual(str(pd.NaT), "NaT") + assert str(pd.NaT) == "NaT" class TestDatetimeIndexFormat(tm.TestCase): def test_datetime(self): formatted = pd.to_datetime([datetime(2003, 1, 1, 12), pd.NaT]).format() - self.assertEqual(formatted[0], "2003-01-01 12:00:00") - self.assertEqual(formatted[1], "NaT") + assert formatted[0] == "2003-01-01 12:00:00" + assert formatted[1] == "NaT" def test_date(self): formatted = pd.to_datetime([datetime(2003, 1, 1), pd.NaT]).format() - self.assertEqual(formatted[0], "2003-01-01") - self.assertEqual(formatted[1], "NaT") + assert formatted[0] == "2003-01-01" + assert formatted[1] == "NaT" def test_date_tz(self): formatted = pd.to_datetime([datetime(2013, 1, 1)], utc=True).format() - self.assertEqual(formatted[0], "2013-01-01 00:00:00+00:00") + assert formatted[0] == "2013-01-01 00:00:00+00:00" formatted = pd.to_datetime( [datetime(2013, 1, 1), pd.NaT], utc=True).format() - self.assertEqual(formatted[0], "2013-01-01 00:00:00+00:00") + assert formatted[0] == "2013-01-01 00:00:00+00:00" def test_date_explict_date_format(self): formatted = pd.to_datetime([datetime(2003, 2, 1), pd.NaT]).format( date_format="%m-%d-%Y", na_rep="UT") - self.assertEqual(formatted[0], "02-01-2003") - self.assertEqual(formatted[1], "UT") + assert formatted[0] == "02-01-2003" + assert formatted[1] == "UT" class TestDatetimeIndexUnicode(tm.TestCase): @@ -2503,19 +2493,19 @@ class TestStringRepTimestamp(tm.TestCase): def test_no_tz(self): dt_date = datetime(2013, 1, 2) - self.assertEqual(str(dt_date), str(Timestamp(dt_date))) + assert str(dt_date) == str(Timestamp(dt_date)) dt_datetime = datetime(2013, 1, 2, 12, 1, 3) - self.assertEqual(str(dt_datetime), str(Timestamp(dt_datetime))) + assert str(dt_datetime) == str(Timestamp(dt_datetime)) dt_datetime_us = datetime(2013, 1, 2, 12, 1, 3, 45) - self.assertEqual(str(dt_datetime_us), str(Timestamp(dt_datetime_us))) + assert str(dt_datetime_us) == str(Timestamp(dt_datetime_us)) ts_nanos_only = Timestamp(200) - self.assertEqual(str(ts_nanos_only), "1970-01-01 00:00:00.000000200") + assert str(ts_nanos_only) == "1970-01-01 00:00:00.000000200" ts_nanos_micros = Timestamp(1200) - self.assertEqual(str(ts_nanos_micros), "1970-01-01 00:00:00.000001200") + assert str(ts_nanos_micros) == "1970-01-01 00:00:00.000001200" def test_tz_pytz(self): tm._skip_if_no_pytz() @@ -2523,13 +2513,13 @@ def test_tz_pytz(self): import pytz dt_date = datetime(2013, 1, 2, tzinfo=pytz.utc) - self.assertEqual(str(dt_date), str(Timestamp(dt_date))) + assert str(dt_date) == str(Timestamp(dt_date)) dt_datetime = datetime(2013, 1, 2, 12, 1, 3, tzinfo=pytz.utc) - self.assertEqual(str(dt_datetime), str(Timestamp(dt_datetime))) + assert str(dt_datetime) == str(Timestamp(dt_datetime)) dt_datetime_us = datetime(2013, 1, 2, 12, 1, 3, 45, tzinfo=pytz.utc) - self.assertEqual(str(dt_datetime_us), str(Timestamp(dt_datetime_us))) + assert str(dt_datetime_us) == str(Timestamp(dt_datetime_us)) def test_tz_dateutil(self): tm._skip_if_no_dateutil() @@ -2537,17 +2527,17 @@ def test_tz_dateutil(self): utc = dateutil.tz.tzutc() dt_date = datetime(2013, 1, 2, tzinfo=utc) - self.assertEqual(str(dt_date), str(Timestamp(dt_date))) + assert str(dt_date) == str(Timestamp(dt_date)) dt_datetime = datetime(2013, 1, 2, 12, 1, 3, tzinfo=utc) - self.assertEqual(str(dt_datetime), str(Timestamp(dt_datetime))) + assert str(dt_datetime) == str(Timestamp(dt_datetime)) dt_datetime_us = datetime(2013, 1, 2, 12, 1, 3, 45, tzinfo=utc) - self.assertEqual(str(dt_datetime_us), str(Timestamp(dt_datetime_us))) + assert str(dt_datetime_us) == str(Timestamp(dt_datetime_us)) def test_nat_representations(self): for f in (str, repr, methodcaller('isoformat')): - self.assertEqual(f(pd.NaT), 'NaT') + assert f(pd.NaT) == 'NaT' def test_format_percentiles(): diff --git a/pandas/tests/io/formats/test_printing.py b/pandas/tests/io/formats/test_printing.py index 63cd08545610f..7725b2063c7b6 100644 --- a/pandas/tests/io/formats/test_printing.py +++ b/pandas/tests/io/formats/test_printing.py @@ -44,13 +44,13 @@ def test_adjoin(self): adjoined = printing.adjoin(2, *data) - self.assertEqual(adjoined, expected) + assert adjoined == expected def test_adjoin_unicode(self): data = [[u'あ', 'b', 'c'], ['dd', u'ええ', 'ff'], ['ggg', 'hhh', u'いいい']] expected = u'あ dd ggg\nb ええ hhh\nc ff いいい' adjoined = printing.adjoin(2, *data) - self.assertEqual(adjoined, expected) + assert adjoined == expected adj = fmt.EastAsianTextAdjustment() @@ -59,22 +59,22 @@ def test_adjoin_unicode(self): c ff いいい""" adjoined = adj.adjoin(2, *data) - self.assertEqual(adjoined, expected) + assert adjoined == expected cols = adjoined.split('\n') - self.assertEqual(adj.len(cols[0]), 13) - self.assertEqual(adj.len(cols[1]), 13) - self.assertEqual(adj.len(cols[2]), 16) + assert adj.len(cols[0]) == 13 + assert adj.len(cols[1]) == 13 + assert adj.len(cols[2]) == 16 expected = u"""あ dd ggg b ええ hhh c ff いいい""" adjoined = adj.adjoin(7, *data) - self.assertEqual(adjoined, expected) + assert adjoined == expected cols = adjoined.split('\n') - self.assertEqual(adj.len(cols[0]), 23) - self.assertEqual(adj.len(cols[1]), 23) - self.assertEqual(adj.len(cols[2]), 26) + assert adj.len(cols[0]) == 23 + assert adj.len(cols[1]) == 23 + assert adj.len(cols[2]) == 26 def test_justify(self): adj = fmt.EastAsianTextAdjustment() @@ -83,45 +83,45 @@ def just(x, *args, **kwargs): # wrapper to test single str return adj.justify([x], *args, **kwargs)[0] - self.assertEqual(just('abc', 5, mode='left'), 'abc ') - self.assertEqual(just('abc', 5, mode='center'), ' abc ') - self.assertEqual(just('abc', 5, mode='right'), ' abc') - self.assertEqual(just(u'abc', 5, mode='left'), 'abc ') - self.assertEqual(just(u'abc', 5, mode='center'), ' abc ') - self.assertEqual(just(u'abc', 5, mode='right'), ' abc') + assert just('abc', 5, mode='left') == 'abc ' + assert just('abc', 5, mode='center') == ' abc ' + assert just('abc', 5, mode='right') == ' abc' + assert just(u'abc', 5, mode='left') == 'abc ' + assert just(u'abc', 5, mode='center') == ' abc ' + assert just(u'abc', 5, mode='right') == ' abc' - self.assertEqual(just(u'パンダ', 5, mode='left'), u'パンダ') - self.assertEqual(just(u'パンダ', 5, mode='center'), u'パンダ') - self.assertEqual(just(u'パンダ', 5, mode='right'), u'パンダ') + assert just(u'パンダ', 5, mode='left') == u'パンダ' + assert just(u'パンダ', 5, mode='center') == u'パンダ' + assert just(u'パンダ', 5, mode='right') == u'パンダ' - self.assertEqual(just(u'パンダ', 10, mode='left'), u'パンダ ') - self.assertEqual(just(u'パンダ', 10, mode='center'), u' パンダ ') - self.assertEqual(just(u'パンダ', 10, mode='right'), u' パンダ') + assert just(u'パンダ', 10, mode='left') == u'パンダ ' + assert just(u'パンダ', 10, mode='center') == u' パンダ ' + assert just(u'パンダ', 10, mode='right') == u' パンダ' def test_east_asian_len(self): adj = fmt.EastAsianTextAdjustment() - self.assertEqual(adj.len('abc'), 3) - self.assertEqual(adj.len(u'abc'), 3) + assert adj.len('abc') == 3 + assert adj.len(u'abc') == 3 - self.assertEqual(adj.len(u'パンダ'), 6) - self.assertEqual(adj.len(u'パンダ'), 5) - self.assertEqual(adj.len(u'パンダpanda'), 11) - self.assertEqual(adj.len(u'パンダpanda'), 10) + assert adj.len(u'パンダ') == 6 + assert adj.len(u'パンダ') == 5 + assert adj.len(u'パンダpanda') == 11 + assert adj.len(u'パンダpanda') == 10 def test_ambiguous_width(self): adj = fmt.EastAsianTextAdjustment() - self.assertEqual(adj.len(u'¡¡ab'), 4) + assert adj.len(u'¡¡ab') == 4 with cf.option_context('display.unicode.ambiguous_as_wide', True): adj = fmt.EastAsianTextAdjustment() - self.assertEqual(adj.len(u'¡¡ab'), 6) + assert adj.len(u'¡¡ab') == 6 data = [[u'あ', 'b', 'c'], ['dd', u'ええ', 'ff'], ['ggg', u'¡¡ab', u'いいい']] expected = u'あ dd ggg \nb ええ ¡¡ab\nc ff いいい' adjoined = adj.adjoin(2, *data) - self.assertEqual(adjoined, expected) + assert adjoined == expected class TestTableSchemaRepr(tm.TestCase): @@ -151,13 +151,13 @@ def test_publishes(self): for obj, expected in zip(objects, expected_keys): with opt, make_patch as mock_display: handle = obj._ipython_display_() - self.assertEqual(mock_display.call_count, 1) + assert mock_display.call_count == 1 assert handle is None args, kwargs = mock_display.call_args arg, = args # just one argument - self.assertEqual(kwargs, {"raw": True}) - self.assertEqual(set(arg.keys()), expected) + assert kwargs == {"raw": True} + assert set(arg.keys()) == expected with_latex = pd.option_context('display.latex.repr', True) @@ -168,7 +168,7 @@ def test_publishes(self): expected = {'text/plain', 'text/html', 'text/latex', 'application/vnd.dataresource+json'} - self.assertEqual(set(arg.keys()), expected) + assert set(arg.keys()) == expected def test_publishes_not_implemented(self): # column MultiIndex diff --git a/pandas/tests/io/formats/test_style.py b/pandas/tests/io/formats/test_style.py index 7d8ac6f81c31e..371cc2b61634a 100644 --- a/pandas/tests/io/formats/test_style.py +++ b/pandas/tests/io/formats/test_style.py @@ -39,7 +39,7 @@ def test_init_non_pandas(self): def test_init_series(self): result = Styler(pd.Series([1, 2])) - self.assertEqual(result.data.ndim, 2) + assert result.data.ndim == 2 def test_repr_html_ok(self): self.styler._repr_html_() @@ -48,7 +48,7 @@ def test_update_ctx(self): self.styler._update_ctx(self.attrs) expected = {(0, 0): ['color: red'], (1, 0): ['color: blue']} - self.assertEqual(self.styler.ctx, expected) + assert self.styler.ctx == expected def test_update_ctx_flatten_multi(self): attrs = DataFrame({"A": ['color: red; foo: bar', @@ -56,7 +56,7 @@ def test_update_ctx_flatten_multi(self): self.styler._update_ctx(attrs) expected = {(0, 0): ['color: red', ' foo: bar'], (1, 0): ['color: blue', ' foo: baz']} - self.assertEqual(self.styler.ctx, expected) + assert self.styler.ctx == expected def test_update_ctx_flatten_multi_traliing_semi(self): attrs = DataFrame({"A": ['color: red; foo: bar;', @@ -64,7 +64,7 @@ def test_update_ctx_flatten_multi_traliing_semi(self): self.styler._update_ctx(attrs) expected = {(0, 0): ['color: red', ' foo: bar'], (1, 0): ['color: blue', ' foo: baz']} - self.assertEqual(self.styler.ctx, expected) + assert self.styler.ctx == expected def test_copy(self): s2 = copy.copy(self.styler) @@ -74,8 +74,8 @@ def test_copy(self): self.styler._update_ctx(self.attrs) self.styler.highlight_max() - self.assertEqual(self.styler.ctx, s2.ctx) - self.assertEqual(self.styler._todo, s2._todo) + assert self.styler.ctx == s2.ctx + assert self.styler._todo == s2._todo def test_deepcopy(self): s2 = copy.deepcopy(self.styler) @@ -86,7 +86,7 @@ def test_deepcopy(self): self.styler._update_ctx(self.attrs) self.styler.highlight_max() self.assertNotEqual(self.styler.ctx, s2.ctx) - self.assertEqual(s2._todo, []) + assert s2._todo == [] self.assertNotEqual(self.styler._todo, s2._todo) def test_clear(self): @@ -119,16 +119,16 @@ def test_set_properties(self): # order is deterministic v = ["color: white", "size: 10px"] expected = {(0, 0): v, (1, 0): v} - self.assertEqual(result.keys(), expected.keys()) + assert result.keys() == expected.keys() for v1, v2 in zip(result.values(), expected.values()): - self.assertEqual(sorted(v1), sorted(v2)) + assert sorted(v1) == sorted(v2) def test_set_properties_subset(self): df = pd.DataFrame({'A': [0, 1]}) result = df.style.set_properties(subset=pd.IndexSlice[0, 'A'], color='white')._compute().ctx expected = {(0, 0): ['color: white']} - self.assertEqual(result, expected) + assert result == expected def test_empty_index_name_doesnt_display(self): # https://github.com/pandas-dev/pandas/pull/12090#issuecomment-180695902 @@ -156,7 +156,7 @@ def test_empty_index_name_doesnt_display(self): 'is_visible': True, }]] - self.assertEqual(result['head'], expected) + assert result['head'] == expected def test_index_name(self): # https://github.com/pandas-dev/pandas/issues/11655 @@ -174,7 +174,7 @@ def test_index_name(self): {'class': 'blank', 'type': 'th', 'value': ''}, {'class': 'blank', 'type': 'th', 'value': ''}]] - self.assertEqual(result['head'], expected) + assert result['head'] == expected def test_multiindex_name(self): # https://github.com/pandas-dev/pandas/issues/11655 @@ -194,7 +194,7 @@ def test_multiindex_name(self): 'value': 'B'}, {'class': 'blank', 'type': 'th', 'value': ''}]] - self.assertEqual(result['head'], expected) + assert result['head'] == expected def test_numeric_columns(self): # https://github.com/pandas-dev/pandas/issues/12125 @@ -206,21 +206,21 @@ def test_apply_axis(self): df = pd.DataFrame({'A': [0, 0], 'B': [1, 1]}) f = lambda x: ['val: %s' % x.max() for v in x] result = df.style.apply(f, axis=1) - self.assertEqual(len(result._todo), 1) - self.assertEqual(len(result.ctx), 0) + assert len(result._todo) == 1 + assert len(result.ctx) == 0 result._compute() expected = {(0, 0): ['val: 1'], (0, 1): ['val: 1'], (1, 0): ['val: 1'], (1, 1): ['val: 1']} - self.assertEqual(result.ctx, expected) + assert result.ctx == expected result = df.style.apply(f, axis=0) expected = {(0, 0): ['val: 0'], (0, 1): ['val: 1'], (1, 0): ['val: 0'], (1, 1): ['val: 1']} result._compute() - self.assertEqual(result.ctx, expected) + assert result.ctx == expected result = df.style.apply(f) # default result._compute() - self.assertEqual(result.ctx, expected) + assert result.ctx == expected def test_apply_subset(self): axes = [0, 1] @@ -236,7 +236,7 @@ def test_apply_subset(self): for c, col in enumerate(self.df.columns) if row in self.df.loc[slice_].index and col in self.df.loc[slice_].columns) - self.assertEqual(result, expected) + assert result == expected def test_applymap_subset(self): def f(x): @@ -253,7 +253,7 @@ def f(x): for c, col in enumerate(self.df.columns) if row in self.df.loc[slice_].index and col in self.df.loc[slice_].columns) - self.assertEqual(result, expected) + assert result == expected def test_empty(self): df = pd.DataFrame({'A': [1, 0]}) @@ -264,7 +264,7 @@ def test_empty(self): result = s._translate()['cellstyle'] expected = [{'props': [['color', ' red']], 'selector': 'row0_col0'}, {'props': [['', '']], 'selector': 'row1_col0'}] - self.assertEqual(result, expected) + assert result == expected def test_bar(self): df = pd.DataFrame({'A': [0, 1, 2]}) @@ -278,7 +278,7 @@ def test_bar(self): 'background: linear-gradient(' '90deg,#d65f5f 100.0%, transparent 0%)'] } - self.assertEqual(result, expected) + assert result == expected result = df.style.bar(color='red', width=50)._compute().ctx expected = { @@ -290,14 +290,14 @@ def test_bar(self): 'background: linear-gradient(' '90deg,red 50.0%, transparent 0%)'] } - self.assertEqual(result, expected) + assert result == expected df['C'] = ['a'] * len(df) result = df.style.bar(color='red', width=50)._compute().ctx - self.assertEqual(result, expected) + assert result == expected df['C'] = df['C'].astype('category') result = df.style.bar(color='red', width=50)._compute().ctx - self.assertEqual(result, expected) + assert result == expected def test_bar_0points(self): df = pd.DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) @@ -323,7 +323,7 @@ def test_bar_0points(self): (2, 2): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 100.0%' ', transparent 0%)']} - self.assertEqual(result, expected) + assert result == expected result = df.style.bar(axis=1)._compute().ctx expected = {(0, 0): ['width: 10em', ' height: 80%'], @@ -347,14 +347,14 @@ def test_bar_0points(self): (2, 2): ['width: 10em', ' height: 80%', 'background: linear-gradient(90deg,#d65f5f 100.0%' ', transparent 0%)']} - self.assertEqual(result, expected) + assert result == expected def test_highlight_null(self, null_color='red'): df = pd.DataFrame({'A': [0, np.nan]}) result = df.style.highlight_null()._compute().ctx expected = {(0, 0): [''], (1, 0): ['background-color: red']} - self.assertEqual(result, expected) + assert result == expected def test_nonunique_raises(self): df = pd.DataFrame([[1, 2]], columns=['A', 'A']) @@ -372,7 +372,7 @@ def test_caption(self): styler = self.df.style result = styler.set_caption('baz') assert styler is result - self.assertEqual(styler.caption, 'baz') + assert styler.caption == 'baz' def test_uuid(self): styler = Styler(self.df, uuid='abc123') @@ -382,7 +382,7 @@ def test_uuid(self): styler = self.df.style result = styler.set_uuid('aaa') assert result is styler - self.assertEqual(result.uuid, 'aaa') + assert result.uuid == 'aaa' def test_table_styles(self): style = [{'selector': 'th', 'props': [('foo', 'bar')]}] @@ -393,7 +393,7 @@ def test_table_styles(self): styler = self.df.style result = styler.set_table_styles(style) assert styler is result - self.assertEqual(styler.table_styles, style) + assert styler.table_styles == style def test_table_attributes(self): attributes = 'class="foo" data-bar' @@ -407,13 +407,13 @@ def test_table_attributes(self): def test_precision(self): with pd.option_context('display.precision', 10): s = Styler(self.df) - self.assertEqual(s.precision, 10) + assert s.precision == 10 s = Styler(self.df, precision=2) - self.assertEqual(s.precision, 2) + assert s.precision == 2 s2 = s.set_precision(4) assert s is s2 - self.assertEqual(s.precision, 4) + assert s.precision == 4 def test_apply_none(self): def f(x): @@ -421,14 +421,14 @@ def f(x): index=x.index, columns=x.columns) result = (pd.DataFrame([[1, 2], [3, 4]]) .style.apply(f, axis=None)._compute().ctx) - self.assertEqual(result[(1, 1)], ['color: red']) + assert result[(1, 1)] == ['color: red'] def test_trim(self): result = self.df.style.render() # trim=True - self.assertEqual(result.count('#'), 0) + assert result.count('#') == 0 result = self.df.style.highlight_max().render() - self.assertEqual(result.count('#'), len(self.df.columns)) + assert result.count('#') == len(self.df.columns) def test_highlight_max(self): df = pd.DataFrame([[1, 2], [3, 4]], columns=['A', 'B']) @@ -440,25 +440,25 @@ def test_highlight_max(self): df = -df attr = 'highlight_min' result = getattr(df.style, attr)()._compute().ctx - self.assertEqual(result[(1, 1)], ['background-color: yellow']) + assert result[(1, 1)] == ['background-color: yellow'] result = getattr(df.style, attr)(color='green')._compute().ctx - self.assertEqual(result[(1, 1)], ['background-color: green']) + assert result[(1, 1)] == ['background-color: green'] result = getattr(df.style, attr)(subset='A')._compute().ctx - self.assertEqual(result[(1, 0)], ['background-color: yellow']) + assert result[(1, 0)] == ['background-color: yellow'] result = getattr(df.style, attr)(axis=0)._compute().ctx expected = {(1, 0): ['background-color: yellow'], (1, 1): ['background-color: yellow'], (0, 1): [''], (0, 0): ['']} - self.assertEqual(result, expected) + assert result == expected result = getattr(df.style, attr)(axis=1)._compute().ctx expected = {(0, 1): ['background-color: yellow'], (1, 1): ['background-color: yellow'], (0, 0): [''], (1, 0): ['']} - self.assertEqual(result, expected) + assert result == expected # separate since we cant negate the strs df['C'] = ['a', 'b'] @@ -478,7 +478,7 @@ def test_export(self): result = style1.export() style2 = self.df.style style2.use(result) - self.assertEqual(style1._todo, style2._todo) + assert style1._todo == style2._todo style2.render() def test_display_format(self): @@ -503,48 +503,48 @@ def test_display_subset(self): ctx = df.style.format({"a": "{:0.1f}", "b": "{0:.2%}"}, subset=pd.IndexSlice[0, :])._translate() expected = '0.1' - self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][1][1]['display_value'], '1.1234') - self.assertEqual(ctx['body'][0][2]['display_value'], '12.34%') + assert ctx['body'][0][1]['display_value'] == expected + assert ctx['body'][1][1]['display_value'] == '1.1234' + assert ctx['body'][0][2]['display_value'] == '12.34%' raw_11 = '1.1234' ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[0, :])._translate() - self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][1][1]['display_value'], raw_11) + assert ctx['body'][0][1]['display_value'] == expected + assert ctx['body'][1][1]['display_value'] == raw_11 ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[0, :])._translate() - self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][1][1]['display_value'], raw_11) + assert ctx['body'][0][1]['display_value'] == expected + assert ctx['body'][1][1]['display_value'] == raw_11 ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice['a'])._translate() - self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][0][2]['display_value'], '0.1234') + assert ctx['body'][0][1]['display_value'] == expected + assert ctx['body'][0][2]['display_value'] == '0.1234' ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[0, 'a'])._translate() - self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][1][1]['display_value'], raw_11) + assert ctx['body'][0][1]['display_value'] == expected + assert ctx['body'][1][1]['display_value'] == raw_11 ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[[0, 1], ['a']])._translate() - self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][1][1]['display_value'], '1.1') - self.assertEqual(ctx['body'][0][2]['display_value'], '0.1234') - self.assertEqual(ctx['body'][1][2]['display_value'], '1.1234') + assert ctx['body'][0][1]['display_value'] == expected + assert ctx['body'][1][1]['display_value'] == '1.1' + assert ctx['body'][0][2]['display_value'] == '0.1234' + assert ctx['body'][1][2]['display_value'] == '1.1234' def test_display_dict(self): df = pd.DataFrame([[.1234, .1234], [1.1234, 1.1234]], columns=['a', 'b']) ctx = df.style.format({"a": "{:0.1f}", "b": "{0:.2%}"})._translate() - self.assertEqual(ctx['body'][0][1]['display_value'], '0.1') - self.assertEqual(ctx['body'][0][2]['display_value'], '12.34%') + assert ctx['body'][0][1]['display_value'] == '0.1' + assert ctx['body'][0][2]['display_value'] == '12.34%' df['c'] = ['aaa', 'bbb'] ctx = df.style.format({"a": "{:0.1f}", "c": str.upper})._translate() - self.assertEqual(ctx['body'][0][1]['display_value'], '0.1') - self.assertEqual(ctx['body'][0][3]['display_value'], 'AAA') + assert ctx['body'][0][1]['display_value'] == '0.1' + assert ctx['body'][0][3]['display_value'] == 'AAA' def test_bad_apply_shape(self): df = pd.DataFrame([[1, 2], [3, 4]]) @@ -629,7 +629,7 @@ def test_mi_sparse(self): 'is_visible': True, 'display_value': ''}, {'type': 'th', 'class': 'col_heading level0 col0', 'value': 'A', 'is_visible': True, 'display_value': 'A'}] - self.assertEqual(head, expected) + assert head == expected def test_mi_sparse_disabled(self): with pd.option_context('display.multi_sparse', False): @@ -655,7 +655,7 @@ def test_mi_sparse_index_names(self): 'type': 'th'}, {'class': 'blank', 'value': '', 'type': 'th'}] - self.assertEqual(head, expected) + assert head == expected def test_mi_sparse_column_names(self): df = pd.DataFrame( @@ -698,7 +698,7 @@ def test_mi_sparse_column_names(self): 'type': 'th', 'value': 0}, ] - self.assertEqual(head, expected) + assert head == expected @tm.mplskip @@ -706,16 +706,16 @@ class TestStylerMatplotlibDep(TestCase): def test_background_gradient(self): df = pd.DataFrame([[1, 2], [2, 4]], columns=['A', 'B']) - for axis in [0, 1, 'index', 'columns']: - for cmap in [None, 'YlOrRd']: - result = df.style.background_gradient(cmap=cmap)._compute().ctx - assert all("#" in x[0] for x in result.values()) - self.assertEqual(result[(0, 0)], result[(0, 1)]) - self.assertEqual(result[(1, 0)], result[(1, 1)]) - - result = (df.style.background_gradient(subset=pd.IndexSlice[1, 'A']) - ._compute().ctx) - self.assertEqual(result[(1, 0)], ['background-color: #fff7fb']) + + for c_map in [None, 'YlOrRd']: + result = df.style.background_gradient(cmap=c_map)._compute().ctx + assert all("#" in x[0] for x in result.values()) + assert result[(0, 0)] == result[(0, 1)] + assert result[(1, 0)] == result[(1, 1)] + + result = df.style.background_gradient( + subset=pd.IndexSlice[1, 'A'])._compute().ctx + assert result[(1, 0)] == ['background-color: #fff7fb'] def test_block_names(): diff --git a/pandas/tests/io/formats/test_to_csv.py b/pandas/tests/io/formats/test_to_csv.py index 02c73019b0f65..552fb77bb54cc 100644 --- a/pandas/tests/io/formats/test_to_csv.py +++ b/pandas/tests/io/formats/test_to_csv.py @@ -17,7 +17,7 @@ def test_to_csv_quotechar(self): with tm.ensure_clean('test.csv') as path: df.to_csv(path, quoting=1) # 1=QUOTE_ALL with open(path, 'r') as f: - self.assertEqual(f.read(), expected) + assert f.read() == expected expected = """\ $$,$col$ @@ -28,7 +28,7 @@ def test_to_csv_quotechar(self): with tm.ensure_clean('test.csv') as path: df.to_csv(path, quoting=1, quotechar="$") with open(path, 'r') as f: - self.assertEqual(f.read(), expected) + assert f.read() == expected with tm.ensure_clean('test.csv') as path: with tm.assert_raises_regex(TypeError, 'quotechar'): @@ -45,7 +45,7 @@ def test_to_csv_doublequote(self): with tm.ensure_clean('test.csv') as path: df.to_csv(path, quoting=1, doublequote=True) # QUOTE_ALL with open(path, 'r') as f: - self.assertEqual(f.read(), expected) + assert f.read() == expected from _csv import Error with tm.ensure_clean('test.csv') as path: @@ -63,7 +63,7 @@ def test_to_csv_escapechar(self): with tm.ensure_clean('test.csv') as path: # QUOTE_ALL df.to_csv(path, quoting=1, doublequote=False, escapechar='\\') with open(path, 'r') as f: - self.assertEqual(f.read(), expected) + assert f.read() == expected df = DataFrame({'col': ['a,a', ',bb,']}) expected = """\ @@ -75,76 +75,71 @@ def test_to_csv_escapechar(self): with tm.ensure_clean('test.csv') as path: df.to_csv(path, quoting=3, escapechar='\\') # QUOTE_NONE with open(path, 'r') as f: - self.assertEqual(f.read(), expected) + assert f.read() == expected def test_csv_to_string(self): df = DataFrame({'col': [1, 2]}) expected = ',col\n0,1\n1,2\n' - self.assertEqual(df.to_csv(), expected) + assert df.to_csv() == expected def test_to_csv_decimal(self): # GH 781 df = DataFrame({'col1': [1], 'col2': ['a'], 'col3': [10.1]}) expected_default = ',col1,col2,col3\n0,1,a,10.1\n' - self.assertEqual(df.to_csv(), expected_default) + assert df.to_csv() == expected_default expected_european_excel = ';col1;col2;col3\n0;1;a;10,1\n' - self.assertEqual( - df.to_csv(decimal=',', sep=';'), expected_european_excel) + assert df.to_csv(decimal=',', sep=';') == expected_european_excel expected_float_format_default = ',col1,col2,col3\n0,1,a,10.10\n' - self.assertEqual( - df.to_csv(float_format='%.2f'), expected_float_format_default) + assert df.to_csv(float_format='%.2f') == expected_float_format_default expected_float_format = ';col1;col2;col3\n0;1;a;10,10\n' - self.assertEqual( - df.to_csv(decimal=',', sep=';', - float_format='%.2f'), expected_float_format) + assert df.to_csv(decimal=',', sep=';', + float_format='%.2f') == expected_float_format # GH 11553: testing if decimal is taken into account for '0.0' df = pd.DataFrame({'a': [0, 1.1], 'b': [2.2, 3.3], 'c': 1}) expected = 'a,b,c\n0^0,2^2,1\n1^1,3^3,1\n' - self.assertEqual(df.to_csv(index=False, decimal='^'), expected) + assert df.to_csv(index=False, decimal='^') == expected # same but for an index - self.assertEqual(df.set_index('a').to_csv(decimal='^'), expected) + assert df.set_index('a').to_csv(decimal='^') == expected # same for a multi-index - self.assertEqual( - df.set_index(['a', 'b']).to_csv(decimal="^"), expected) + assert df.set_index(['a', 'b']).to_csv(decimal="^") == expected def test_to_csv_float_format(self): # testing if float_format is taken into account for the index # GH 11553 df = pd.DataFrame({'a': [0, 1], 'b': [2.2, 3.3], 'c': 1}) expected = 'a,b,c\n0,2.20,1\n1,3.30,1\n' - self.assertEqual( - df.set_index('a').to_csv(float_format='%.2f'), expected) + assert df.set_index('a').to_csv(float_format='%.2f') == expected # same for a multi-index - self.assertEqual( - df.set_index(['a', 'b']).to_csv(float_format='%.2f'), expected) + assert df.set_index(['a', 'b']).to_csv( + float_format='%.2f') == expected def test_to_csv_na_rep(self): # testing if NaN values are correctly represented in the index # GH 11553 df = DataFrame({'a': [0, np.NaN], 'b': [0, 1], 'c': [2, 3]}) expected = "a,b,c\n0.0,0,2\n_,1,3\n" - self.assertEqual(df.set_index('a').to_csv(na_rep='_'), expected) - self.assertEqual(df.set_index(['a', 'b']).to_csv(na_rep='_'), expected) + assert df.set_index('a').to_csv(na_rep='_') == expected + assert df.set_index(['a', 'b']).to_csv(na_rep='_') == expected # now with an index containing only NaNs df = DataFrame({'a': np.NaN, 'b': [0, 1], 'c': [2, 3]}) expected = "a,b,c\n_,0,2\n_,1,3\n" - self.assertEqual(df.set_index('a').to_csv(na_rep='_'), expected) - self.assertEqual(df.set_index(['a', 'b']).to_csv(na_rep='_'), expected) + assert df.set_index('a').to_csv(na_rep='_') == expected + assert df.set_index(['a', 'b']).to_csv(na_rep='_') == expected # check if na_rep parameter does not break anything when no NaN df = DataFrame({'a': 0, 'b': [0, 1], 'c': [2, 3]}) expected = "a,b,c\n0,0,2\n0,1,3\n" - self.assertEqual(df.set_index('a').to_csv(na_rep='_'), expected) - self.assertEqual(df.set_index(['a', 'b']).to_csv(na_rep='_'), expected) + assert df.set_index('a').to_csv(na_rep='_') == expected + assert df.set_index(['a', 'b']).to_csv(na_rep='_') == expected def test_to_csv_date_format(self): # GH 10209 @@ -157,26 +152,23 @@ def test_to_csv_date_format(self): '2013-01-01 00:00:01\n2,2013-01-01 00:00:02' '\n3,2013-01-01 00:00:03\n4,' '2013-01-01 00:00:04\n') - self.assertEqual(df_sec.to_csv(), expected_default_sec) + assert df_sec.to_csv() == expected_default_sec expected_ymdhms_day = (',A\n0,2013-01-01 00:00:00\n1,' '2013-01-02 00:00:00\n2,2013-01-03 00:00:00' '\n3,2013-01-04 00:00:00\n4,' '2013-01-05 00:00:00\n') - self.assertEqual( - df_day.to_csv( - date_format='%Y-%m-%d %H:%M:%S'), expected_ymdhms_day) + assert (df_day.to_csv(date_format='%Y-%m-%d %H:%M:%S') == + expected_ymdhms_day) expected_ymd_sec = (',A\n0,2013-01-01\n1,2013-01-01\n2,' '2013-01-01\n3,2013-01-01\n4,2013-01-01\n') - self.assertEqual( - df_sec.to_csv(date_format='%Y-%m-%d'), expected_ymd_sec) + assert df_sec.to_csv(date_format='%Y-%m-%d') == expected_ymd_sec expected_default_day = (',A\n0,2013-01-01\n1,2013-01-02\n2,' '2013-01-03\n3,2013-01-04\n4,2013-01-05\n') - self.assertEqual(df_day.to_csv(), expected_default_day) - self.assertEqual( - df_day.to_csv(date_format='%Y-%m-%d'), expected_default_day) + assert df_day.to_csv() == expected_default_day + assert df_day.to_csv(date_format='%Y-%m-%d') == expected_default_day # testing if date_format parameter is taken into account for # multi-indexed dataframes (GH 7791) @@ -184,33 +176,33 @@ def test_to_csv_date_format(self): df_sec['C'] = 1 expected_ymd_sec = 'A,B,C\n2013-01-01,0,1\n' df_sec_grouped = df_sec.groupby([pd.Grouper(key='A', freq='1h'), 'B']) - self.assertEqual(df_sec_grouped.mean().to_csv(date_format='%Y-%m-%d'), - expected_ymd_sec) + assert (df_sec_grouped.mean().to_csv(date_format='%Y-%m-%d') == + expected_ymd_sec) def test_to_csv_multi_index(self): # see gh-6618 df = DataFrame([1], columns=pd.MultiIndex.from_arrays([[1], [2]])) exp = ",1\n,2\n0,1\n" - self.assertEqual(df.to_csv(), exp) + assert df.to_csv() == exp exp = "1\n2\n1\n" - self.assertEqual(df.to_csv(index=False), exp) + assert df.to_csv(index=False) == exp df = DataFrame([1], columns=pd.MultiIndex.from_arrays([[1], [2]]), index=pd.MultiIndex.from_arrays([[1], [2]])) exp = ",,1\n,,2\n1,2,1\n" - self.assertEqual(df.to_csv(), exp) + assert df.to_csv() == exp exp = "1\n2\n1\n" - self.assertEqual(df.to_csv(index=False), exp) + assert df.to_csv(index=False) == exp df = DataFrame( [1], columns=pd.MultiIndex.from_arrays([['foo'], ['bar']])) exp = ",foo\n,bar\n0,1\n" - self.assertEqual(df.to_csv(), exp) + assert df.to_csv() == exp exp = "foo\nbar\n1\n" - self.assertEqual(df.to_csv(index=False), exp) + assert df.to_csv(index=False) == exp diff --git a/pandas/tests/io/formats/test_to_html.py b/pandas/tests/io/formats/test_to_html.py index fd9ae0851635a..4a4546dd807f1 100644 --- a/pandas/tests/io/formats/test_to_html.py +++ b/pandas/tests/io/formats/test_to_html.py @@ -50,10 +50,10 @@ def test_to_html_with_empty_string_label(self): def test_to_html_unicode(self): df = DataFrame({u('\u03c3'): np.arange(10.)}) expected = u'\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
\u03c3
00.0
11.0
22.0
33.0
44.0
55.0
66.0
77.0
88.0
99.0
' # noqa - self.assertEqual(df.to_html(), expected) + assert df.to_html() == expected df = DataFrame({'A': [u('\u03c3')]}) expected = u'\n \n \n \n \n \n \n \n \n \n \n \n \n
A
0\u03c3
' # noqa - self.assertEqual(df.to_html(), expected) + assert df.to_html() == expected def test_to_html_decimal(self): # GH 12031 @@ -81,7 +81,7 @@ def test_to_html_decimal(self): ' \n' ' \n' '') - self.assertEqual(result, expected) + assert result == expected def test_to_html_escaped(self): a = 'str """ - self.assertEqual(xp, rs) + assert xp == rs def test_to_html_escape_disabled(self): a = 'str """ - self.assertEqual(xp, rs) + assert xp == rs def test_to_html_multiindex_index_false(self): # issue 8452 @@ -189,11 +189,11 @@ def test_to_html_multiindex_index_false(self): """ - self.assertEqual(result, expected) + assert result == expected df.index = Index(df.index.values, name='idx') result = df.to_html(index=False) - self.assertEqual(result, expected) + assert result == expected def test_to_html_multiindex_sparsify_false_multi_sparse(self): with option_context('display.multi_sparse', False): @@ -247,7 +247,7 @@ def test_to_html_multiindex_sparsify_false_multi_sparse(self): """ - self.assertEqual(result, expected) + assert result == expected df = DataFrame([[0, 1], [2, 3], [4, 5], [6, 7]], columns=index[::2], index=index) @@ -303,7 +303,7 @@ def test_to_html_multiindex_sparsify_false_multi_sparse(self): """ - self.assertEqual(result, expected) + assert result == expected def test_to_html_multiindex_sparsify(self): index = MultiIndex.from_arrays([[0, 0, 1, 1], [0, 1, 0, 1]], @@ -353,7 +353,7 @@ def test_to_html_multiindex_sparsify(self): """ - self.assertEqual(result, expected) + assert result == expected df = DataFrame([[0, 1], [2, 3], [4, 5], [6, 7]], columns=index[::2], index=index) @@ -407,7 +407,7 @@ def test_to_html_multiindex_sparsify(self): """ - self.assertEqual(result, expected) + assert result == expected def test_to_html_multiindex_odd_even_truncate(self): # GH 14882 - Issue on truncation with odd length DataFrame @@ -692,7 +692,7 @@ def test_to_html_multiindex_odd_even_truncate(self): """ - self.assertEqual(result, expected) + assert result == expected # Test that ... appears in a middle level result = df.to_html(max_rows=56) @@ -955,7 +955,7 @@ def test_to_html_multiindex_odd_even_truncate(self): """ - self.assertEqual(result, expected) + assert result == expected def test_to_html_index_formatter(self): df = DataFrame([[0, 1], [2, 3], [4, 5], [6, 7]], columns=['foo', None], @@ -996,7 +996,7 @@ def test_to_html_index_formatter(self): """ - self.assertEqual(result, expected) + assert result == expected def test_to_html_datetime64_monthformatter(self): months = [datetime(2016, 1, 1), datetime(2016, 2, 2)] @@ -1024,7 +1024,7 @@ def format_func(x): """ - self.assertEqual(result, expected) + assert result == expected def test_to_html_datetime64_hourformatter(self): @@ -1053,7 +1053,7 @@ def format_func(x): """ - self.assertEqual(result, expected) + assert result == expected def test_to_html_regression_GH6098(self): df = DataFrame({ @@ -1164,7 +1164,7 @@ def test_to_html_truncate(self): '''.format(div_style) if compat.PY2: expected = expected.decode('utf-8') - self.assertEqual(result, expected) + assert result == expected def test_to_html_truncate_multi_index(self): pytest.skip("unreliable on travis") @@ -1281,7 +1281,7 @@ def test_to_html_truncate_multi_index(self): '''.format(div_style) if compat.PY2: expected = expected.decode('utf-8') - self.assertEqual(result, expected) + assert result == expected def test_to_html_truncate_multi_index_sparse_off(self): pytest.skip("unreliable on travis") @@ -1392,7 +1392,7 @@ def test_to_html_truncate_multi_index_sparse_off(self): '''.format(div_style) if compat.PY2: expected = expected.decode('utf-8') - self.assertEqual(result, expected) + assert result == expected def test_to_html_border(self): df = DataFrame({'A': [1, 2]}) @@ -1424,7 +1424,7 @@ def test_to_html(self): buf = StringIO() retval = biggie.to_html(buf=buf) assert retval is None - self.assertEqual(buf.getvalue(), s) + assert buf.getvalue() == s assert isinstance(s, compat.string_types) @@ -1450,13 +1450,13 @@ def test_to_html_filename(self): with open(path, 'r') as f: s = biggie.to_html() s2 = f.read() - self.assertEqual(s, s2) + assert s == s2 frame = DataFrame(index=np.arange(200)) with tm.ensure_clean('test.html') as path: frame.to_html(path) with open(path, 'r') as f: - self.assertEqual(frame.to_html(), f.read()) + assert frame.to_html() == f.read() def test_to_html_with_no_bold(self): x = DataFrame({'x': np.random.randn(5)}) @@ -1507,7 +1507,7 @@ def test_to_html_multiindex(self): ' \n' '') - self.assertEqual(result, expected) + assert result == expected columns = MultiIndex.from_tuples(list(zip( range(4), np.mod( @@ -1550,7 +1550,7 @@ def test_to_html_multiindex(self): ' \n' '') - self.assertEqual(result, expected) + assert result == expected def test_to_html_justify(self): df = DataFrame({'A': [6, 30000, 2], @@ -1588,7 +1588,7 @@ def test_to_html_justify(self): ' \n' ' \n' '') - self.assertEqual(result, expected) + assert result == expected result = df.to_html(justify='right') expected = ('\n' @@ -1621,7 +1621,7 @@ def test_to_html_justify(self): ' \n' ' \n' '
') - self.assertEqual(result, expected) + assert result == expected def test_to_html_index(self): index = ['foo', 'bar', 'baz'] @@ -1836,10 +1836,10 @@ def test_to_html_with_classes(self): """).strip() - self.assertEqual(result, expected) + assert result == expected result = df.to_html(classes=["sortable", "draggable"]) - self.assertEqual(result, expected) + assert result == expected def test_to_html_no_index_max_rows(self): # GH https://github.com/pandas-dev/pandas/issues/14998 @@ -1858,7 +1858,7 @@ def test_to_html_no_index_max_rows(self): """) - self.assertEqual(result, expected) + assert result == expected def test_to_html_notebook_has_style(self): df = pd.DataFrame({"A": [1, 2, 3]}) diff --git a/pandas/tests/io/json/test_json_table_schema.py b/pandas/tests/io/json/test_json_table_schema.py index 4ec13fa667452..0f77a886dd302 100644 --- a/pandas/tests/io/json/test_json_table_schema.py +++ b/pandas/tests/io/json/test_json_table_schema.py @@ -39,7 +39,7 @@ def test_build_table_schema(self): ], 'primaryKey': ['idx'] } - self.assertEqual(result, expected) + assert result == expected result = build_table_schema(self.df) assert "pandas_version" in result @@ -49,7 +49,7 @@ def test_series(self): expected = {'fields': [{'name': 'index', 'type': 'integer'}, {'name': 'foo', 'type': 'integer'}], 'primaryKey': ['index']} - self.assertEqual(result, expected) + assert result == expected result = build_table_schema(s) assert 'pandas_version' in result @@ -58,7 +58,7 @@ def tets_series_unnamed(self): expected = {'fields': [{'name': 'index', 'type': 'integer'}, {'name': 'values', 'type': 'integer'}], 'primaryKey': ['index']} - self.assertEqual(result, expected) + assert result == expected def test_multiindex(self): df = self.df.copy() @@ -76,13 +76,13 @@ def test_multiindex(self): ], 'primaryKey': ['level_0', 'level_1'] } - self.assertEqual(result, expected) + assert result == expected df.index.names = ['idx0', None] expected['fields'][0]['name'] = 'idx0' expected['primaryKey'] = ['idx0', 'level_1'] result = build_table_schema(df, version=False) - self.assertEqual(result, expected) + assert result == expected class TestTableSchemaType(tm.TestCase): @@ -91,23 +91,22 @@ def test_as_json_table_type_int_data(self): int_data = [1, 2, 3] int_types = [np.int, np.int16, np.int32, np.int64] for t in int_types: - self.assertEqual(as_json_table_type(np.array(int_data, dtype=t)), - 'integer') + assert as_json_table_type(np.array( + int_data, dtype=t)) == 'integer' def test_as_json_table_type_float_data(self): float_data = [1., 2., 3.] float_types = [np.float, np.float16, np.float32, np.float64] for t in float_types: - self.assertEqual(as_json_table_type(np.array(float_data, - dtype=t)), - 'number') + assert as_json_table_type(np.array( + float_data, dtype=t)) == 'number' def test_as_json_table_type_bool_data(self): bool_data = [True, False] bool_types = [bool, np.bool] for t in bool_types: - self.assertEqual(as_json_table_type(np.array(bool_data, dtype=t)), - 'boolean') + assert as_json_table_type(np.array( + bool_data, dtype=t)) == 'boolean' def test_as_json_table_type_date_data(self): date_data = [pd.to_datetime(['2016']), @@ -116,20 +115,19 @@ def test_as_json_table_type_date_data(self): pd.Series(pd.to_datetime(['2016'], utc=True)), pd.period_range('2016', freq='A', periods=3)] for arr in date_data: - self.assertEqual(as_json_table_type(arr), 'datetime') + assert as_json_table_type(arr) == 'datetime' def test_as_json_table_type_string_data(self): strings = [pd.Series(['a', 'b']), pd.Index(['a', 'b'])] for t in strings: - self.assertEqual(as_json_table_type(t), 'string') + assert as_json_table_type(t) == 'string' def test_as_json_table_type_categorical_data(self): - self.assertEqual(as_json_table_type(pd.Categorical(['a'])), 'any') - self.assertEqual(as_json_table_type(pd.Categorical([1])), 'any') - self.assertEqual(as_json_table_type( - pd.Series(pd.Categorical([1]))), 'any') - self.assertEqual(as_json_table_type(pd.CategoricalIndex([1])), 'any') - self.assertEqual(as_json_table_type(pd.Categorical([1])), 'any') + assert as_json_table_type(pd.Categorical(['a'])) == 'any' + assert as_json_table_type(pd.Categorical([1])) == 'any' + assert as_json_table_type(pd.Series(pd.Categorical([1]))) == 'any' + assert as_json_table_type(pd.CategoricalIndex([1])) == 'any' + assert as_json_table_type(pd.Categorical([1])) == 'any' # ------ # dtypes @@ -137,38 +135,38 @@ def test_as_json_table_type_categorical_data(self): def test_as_json_table_type_int_dtypes(self): integers = [np.int, np.int16, np.int32, np.int64] for t in integers: - self.assertEqual(as_json_table_type(t), 'integer') + assert as_json_table_type(t) == 'integer' def test_as_json_table_type_float_dtypes(self): floats = [np.float, np.float16, np.float32, np.float64] for t in floats: - self.assertEqual(as_json_table_type(t), 'number') + assert as_json_table_type(t) == 'number' def test_as_json_table_type_bool_dtypes(self): bools = [bool, np.bool] for t in bools: - self.assertEqual(as_json_table_type(t), 'boolean') + assert as_json_table_type(t) == 'boolean' def test_as_json_table_type_date_dtypes(self): # TODO: datedate.date? datetime.time? dates = [np.datetime64, np.dtype("= 0) frame.to_excel(path, @@ -1274,7 +1274,7 @@ def test_roundtrip_indexlabels(self): index_col=0, ).astype(np.int64) frame.index.names = ['test'] - self.assertEqual(frame.index.names, recons.index.names) + assert frame.index.names == recons.index.names frame = (DataFrame(np.random.randn(10, 2)) >= 0) frame.to_excel(path, @@ -1316,7 +1316,7 @@ def test_excel_roundtrip_indexname(self): index_col=0) tm.assert_frame_equal(result, df) - self.assertEqual(result.index.name, 'foo') + assert result.index.name == 'foo' def test_excel_roundtrip_datetime(self): _skip_if_no_xlrd() @@ -1463,7 +1463,7 @@ def test_to_excel_multiindex_dates(self): index_col=[0, 1]) tm.assert_frame_equal(tsframe, recons) - self.assertEqual(recons.index.names, ('time', 'foo')) + assert recons.index.names == ('time', 'foo') def test_to_excel_multiindex_no_write_index(self): _skip_if_no_xlrd() @@ -1577,21 +1577,20 @@ def test_to_excel_unicode_filename(self): # wbk = xlrd.open_workbook(filename, # formatting_info=True) - # self.assertEqual(["test1"], wbk.sheet_names()) + # assert ["test1"] == wbk.sheet_names() # ws = wbk.sheet_by_name('test1') - # self.assertEqual([(0, 1, 5, 7), (0, 1, 3, 5), (0, 1, 1, 3)], - # ws.merged_cells) + # assert [(0, 1, 5, 7), (0, 1, 3, 5), (0, 1, 1, 3)] == ws.merged_cells # for i in range(0, 2): # for j in range(0, 7): # xfx = ws.cell_xf_index(0, 0) # cell_xf = wbk.xf_list[xfx] # font = wbk.font_list - # self.assertEqual(1, font[cell_xf.font_index].bold) - # self.assertEqual(1, cell_xf.border.top_line_style) - # self.assertEqual(1, cell_xf.border.right_line_style) - # self.assertEqual(1, cell_xf.border.bottom_line_style) - # self.assertEqual(1, cell_xf.border.left_line_style) - # self.assertEqual(2, cell_xf.alignment.hor_align) + # assert 1 == font[cell_xf.font_index].bold + # assert 1 == cell_xf.border.top_line_style + # assert 1 == cell_xf.border.right_line_style + # assert 1 == cell_xf.border.bottom_line_style + # assert 1 == cell_xf.border.left_line_style + # assert 2 == cell_xf.alignment.hor_align # os.remove(filename) # def test_to_excel_header_styling_xlsx(self): # import StringIO @@ -1623,7 +1622,7 @@ def test_to_excel_unicode_filename(self): # filename = '__tmp_to_excel_header_styling_xlsx__.xlsx' # pdf.to_excel(filename, 'test1') # wbk = openpyxl.load_workbook(filename) - # self.assertEqual(["test1"], wbk.get_sheet_names()) + # assert ["test1"] == wbk.get_sheet_names() # ws = wbk.get_sheet_by_name('test1') # xlsaddrs = ["%s2" % chr(i) for i in range(ord('A'), ord('H'))] # xlsaddrs += ["A%s" % i for i in range(1, 6)] @@ -1631,16 +1630,16 @@ def test_to_excel_unicode_filename(self): # for xlsaddr in xlsaddrs: # cell = ws.cell(xlsaddr) # assert cell.style.font.bold - # self.assertEqual(openpyxl.style.Border.BORDER_THIN, - # cell.style.borders.top.border_style) - # self.assertEqual(openpyxl.style.Border.BORDER_THIN, - # cell.style.borders.right.border_style) - # self.assertEqual(openpyxl.style.Border.BORDER_THIN, - # cell.style.borders.bottom.border_style) - # self.assertEqual(openpyxl.style.Border.BORDER_THIN, - # cell.style.borders.left.border_style) - # self.assertEqual(openpyxl.style.Alignment.HORIZONTAL_CENTER, - # cell.style.alignment.horizontal) + # assert (openpyxl.style.Border.BORDER_THIN == + # cell.style.borders.top.border_style) + # assert (openpyxl.style.Border.BORDER_THIN == + # cell.style.borders.right.border_style) + # assert (openpyxl.style.Border.BORDER_THIN == + # cell.style.borders.bottom.border_style) + # assert (openpyxl.style.Border.BORDER_THIN == + # cell.style.borders.left.border_style) + # assert (openpyxl.style.Alignment.HORIZONTAL_CENTER == + # cell.style.alignment.horizontal) # mergedcells_addrs = ["C1", "E1", "G1"] # for maddr in mergedcells_addrs: # assert ws.cell(maddr).merged @@ -1681,10 +1680,10 @@ def roundtrip(df, header=True, parser_hdr=0, index=True): res = roundtrip(df, use_headers) if use_headers: - self.assertEqual(res.shape, (nrows, ncols + i)) + assert res.shape == (nrows, ncols + i) else: # first row taken as columns - self.assertEqual(res.shape, (nrows - 1, ncols + i)) + assert res.shape == (nrows - 1, ncols + i) # no nans for r in range(len(res.index)): @@ -1692,11 +1691,11 @@ def roundtrip(df, header=True, parser_hdr=0, index=True): assert res.iloc[r, c] is not np.nan res = roundtrip(DataFrame([0])) - self.assertEqual(res.shape, (1, 1)) + assert res.shape == (1, 1) assert res.iloc[0, 0] is not np.nan res = roundtrip(DataFrame([0]), False, None) - self.assertEqual(res.shape, (1, 2)) + assert res.shape == (1, 2) assert res.iloc[0, 0] is not np.nan def test_excel_010_hemstring_raises_NotImplementedError(self): @@ -1909,18 +1908,18 @@ def test_to_excel_styleconverter(self): xlsx_style = _Openpyxl1Writer._convert_to_style(hstyle) assert xlsx_style.font.bold - self.assertEqual(openpyxl.style.Border.BORDER_THIN, - xlsx_style.borders.top.border_style) - self.assertEqual(openpyxl.style.Border.BORDER_THIN, - xlsx_style.borders.right.border_style) - self.assertEqual(openpyxl.style.Border.BORDER_THIN, - xlsx_style.borders.bottom.border_style) - self.assertEqual(openpyxl.style.Border.BORDER_THIN, - xlsx_style.borders.left.border_style) - self.assertEqual(openpyxl.style.Alignment.HORIZONTAL_CENTER, - xlsx_style.alignment.horizontal) - self.assertEqual(openpyxl.style.Alignment.VERTICAL_TOP, - xlsx_style.alignment.vertical) + assert (openpyxl.style.Border.BORDER_THIN == + xlsx_style.borders.top.border_style) + assert (openpyxl.style.Border.BORDER_THIN == + xlsx_style.borders.right.border_style) + assert (openpyxl.style.Border.BORDER_THIN == + xlsx_style.borders.bottom.border_style) + assert (openpyxl.style.Border.BORDER_THIN == + xlsx_style.borders.left.border_style) + assert (openpyxl.style.Alignment.HORIZONTAL_CENTER == + xlsx_style.alignment.horizontal) + assert (openpyxl.style.Alignment.VERTICAL_TOP == + xlsx_style.alignment.vertical) def skip_openpyxl_gt21(cls): @@ -1999,12 +1998,12 @@ def test_to_excel_styleconverter(self): protection = styles.Protection(locked=True, hidden=False) kw = _Openpyxl20Writer._convert_to_style_kwargs(hstyle) - self.assertEqual(kw['font'], font) - self.assertEqual(kw['border'], border) - self.assertEqual(kw['alignment'], alignment) - self.assertEqual(kw['fill'], fill) - self.assertEqual(kw['number_format'], number_format) - self.assertEqual(kw['protection'], protection) + assert kw['font'] == font + assert kw['border'] == border + assert kw['alignment'] == alignment + assert kw['fill'] == fill + assert kw['number_format'] == number_format + assert kw['protection'] == protection def test_write_cells_merge_styled(self): from pandas.io.formats.excel import ExcelCell @@ -2036,8 +2035,8 @@ def test_write_cells_merge_styled(self): wks = writer.sheets[sheet_name] xcell_b1 = wks['B1'] xcell_a2 = wks['A2'] - self.assertEqual(xcell_b1.style, openpyxl_sty_merged) - self.assertEqual(xcell_a2.style, openpyxl_sty_merged) + assert xcell_b1.style == openpyxl_sty_merged + assert xcell_a2.style == openpyxl_sty_merged def skip_openpyxl_lt22(cls): @@ -2109,12 +2108,12 @@ def test_to_excel_styleconverter(self): protection = styles.Protection(locked=True, hidden=False) kw = _Openpyxl22Writer._convert_to_style_kwargs(hstyle) - self.assertEqual(kw['font'], font) - self.assertEqual(kw['border'], border) - self.assertEqual(kw['alignment'], alignment) - self.assertEqual(kw['fill'], fill) - self.assertEqual(kw['number_format'], number_format) - self.assertEqual(kw['protection'], protection) + assert kw['font'] == font + assert kw['border'] == border + assert kw['alignment'] == alignment + assert kw['fill'] == fill + assert kw['number_format'] == number_format + assert kw['protection'] == protection def test_write_cells_merge_styled(self): if not openpyxl_compat.is_compat(major_ver=2): @@ -2148,8 +2147,8 @@ def test_write_cells_merge_styled(self): wks = writer.sheets[sheet_name] xcell_b1 = wks['B1'] xcell_a2 = wks['A2'] - self.assertEqual(xcell_b1.font, openpyxl_sty_merged) - self.assertEqual(xcell_a2.font, openpyxl_sty_merged) + assert xcell_b1.font == openpyxl_sty_merged + assert xcell_a2.font == openpyxl_sty_merged class XlwtTests(ExcelWriterBase, tm.TestCase): @@ -2201,12 +2200,12 @@ def test_to_excel_styleconverter(self): xls_style = _XlwtWriter._convert_to_style(hstyle) assert xls_style.font.bold - self.assertEqual(xlwt.Borders.THIN, xls_style.borders.top) - self.assertEqual(xlwt.Borders.THIN, xls_style.borders.right) - self.assertEqual(xlwt.Borders.THIN, xls_style.borders.bottom) - self.assertEqual(xlwt.Borders.THIN, xls_style.borders.left) - self.assertEqual(xlwt.Alignment.HORZ_CENTER, xls_style.alignment.horz) - self.assertEqual(xlwt.Alignment.VERT_TOP, xls_style.alignment.vert) + assert xlwt.Borders.THIN == xls_style.borders.top + assert xlwt.Borders.THIN == xls_style.borders.right + assert xlwt.Borders.THIN == xls_style.borders.bottom + assert xlwt.Borders.THIN == xls_style.borders.left + assert xlwt.Alignment.HORZ_CENTER == xls_style.alignment.horz + assert xlwt.Alignment.VERT_TOP == xls_style.alignment.vert class XlsxWriterTests(ExcelWriterBase, tm.TestCase): @@ -2259,7 +2258,7 @@ def test_column_format(self): except: read_num_format = cell.style.number_format._format_code - self.assertEqual(read_num_format, num_format) + assert read_num_format == num_format class OpenpyxlTests_NoMerge(ExcelWriterBase, tm.TestCase): diff --git a/pandas/tests/io/test_gbq.py b/pandas/tests/io/test_gbq.py index 13529e7b54714..138def3ea1ac9 100644 --- a/pandas/tests/io/test_gbq.py +++ b/pandas/tests/io/test_gbq.py @@ -133,4 +133,4 @@ def test_roundtrip(self): .format(destination_table), project_id=_get_project_id(), private_key=_get_private_key_path()) - self.assertEqual(result['num_rows'][0], test_size) + assert result['num_rows'][0] == test_size diff --git a/pandas/tests/io/test_html.py b/pandas/tests/io/test_html.py index db6ab236ee793..0a79173df731c 100644 --- a/pandas/tests/io/test_html.py +++ b/pandas/tests/io/test_html.py @@ -144,16 +144,16 @@ def test_spam_no_types(self): df2 = self.read_html(self.spam_data, 'Unit') assert_framelist_equal(df1, df2) - self.assertEqual(df1[0].iloc[0, 0], 'Proximates') - self.assertEqual(df1[0].columns[0], 'Nutrient') + assert df1[0].iloc[0, 0] == 'Proximates' + assert df1[0].columns[0] == 'Nutrient' def test_spam_with_types(self): df1 = self.read_html(self.spam_data, '.*Water.*') df2 = self.read_html(self.spam_data, 'Unit') assert_framelist_equal(df1, df2) - self.assertEqual(df1[0].iloc[0, 0], 'Proximates') - self.assertEqual(df1[0].columns[0], 'Nutrient') + assert df1[0].iloc[0, 0] == 'Proximates' + assert df1[0].columns[0] == 'Nutrient' def test_spam_no_match(self): dfs = self.read_html(self.spam_data) @@ -167,7 +167,7 @@ def test_banklist_no_match(self): def test_spam_header(self): df = self.read_html(self.spam_data, '.*Water.*', header=1)[0] - self.assertEqual(df.columns[0], 'Proximates') + assert df.columns[0] == 'Proximates' assert not df.empty def test_skiprows_int(self): @@ -288,7 +288,7 @@ def test_invalid_url(self): self.read_html('http://www.a23950sdfa908sd.com', match='.*Water.*') except ValueError as e: - self.assertEqual(str(e), 'No tables found') + assert str(e) == 'No tables found' @tm.slow def test_file_url(self): @@ -368,7 +368,7 @@ def test_python_docs_table(self): url = 'https://docs.python.org/2/' dfs = self.read_html(url, match='Python') zz = [df.iloc[0, 0][0:4] for df in dfs] - self.assertEqual(sorted(zz), sorted(['Repo', 'What'])) + assert sorted(zz) == sorted(['Repo', 'What']) @tm.slow def test_thousands_macau_stats(self): @@ -518,7 +518,7 @@ def test_nyse_wsj_commas_table(self): columns = Index(['Issue(Roll over for charts and headlines)', 'Volume', 'Price', 'Chg', '% Chg']) nrows = 100 - self.assertEqual(df.shape[0], nrows) + assert df.shape[0] == nrows tm.assert_index_equal(df.columns, columns) @tm.slow @@ -536,7 +536,7 @@ def try_remove_ws(x): ground_truth = read_csv(os.path.join(DATA_PATH, 'banklist.csv'), converters={'Updated Date': Timestamp, 'Closing Date': Timestamp}) - self.assertEqual(df.shape, ground_truth.shape) + assert df.shape == ground_truth.shape old = ['First Vietnamese American BankIn Vietnamese', 'Westernbank Puerto RicoEn Espanol', 'R-G Premier Bank of Puerto RicoEn Espanol', @@ -663,7 +663,7 @@ def test_wikipedia_states_table(self): assert os.path.isfile(data), '%r is not a file' % data assert os.path.getsize(data), '%r is an empty file' % data result = self.read_html(data, 'Arizona', header=1)[0] - self.assertEqual(result['sq mi'].dtype, np.dtype('float64')) + assert result['sq mi'].dtype == np.dtype('float64') def test_decimal_rows(self): diff --git a/pandas/tests/io/test_packers.py b/pandas/tests/io/test_packers.py index ae1cadcd41496..451cce125e228 100644 --- a/pandas/tests/io/test_packers.py +++ b/pandas/tests/io/test_packers.py @@ -217,9 +217,10 @@ def test_dict_float(self): def test_dict_complex(self): x = {'foo': 1.0 + 1.0j, 'bar': 2.0 + 2.0j} x_rec = self.encode_decode(x) - self.assertEqual(x, x_rec) + tm.assert_dict_equal(x, x_rec) + for key in x: - self.assertEqual(type(x[key]), type(x_rec[key])) + tm.assert_class_equal(x[key], x_rec[key], obj="complex value") def test_dict_numpy_float(self): x = {'foo': np.float32(1.0), 'bar': np.float32(2.0)} @@ -230,9 +231,10 @@ def test_dict_numpy_complex(self): x = {'foo': np.complex128(1.0 + 1.0j), 'bar': np.complex128(2.0 + 2.0j)} x_rec = self.encode_decode(x) - self.assertEqual(x, x_rec) + tm.assert_dict_equal(x, x_rec) + for key in x: - self.assertEqual(type(x[key]), type(x_rec[key])) + tm.assert_class_equal(x[key], x_rec[key], obj="numpy complex128") def test_numpy_array_float(self): @@ -268,7 +270,7 @@ def test_timestamp(self): '20130101'), Timestamp('20130101', tz='US/Eastern'), Timestamp('201301010501')]: i_rec = self.encode_decode(i) - self.assertEqual(i, i_rec) + assert i == i_rec def test_nat(self): nat_rec = self.encode_decode(NaT) @@ -286,7 +288,7 @@ def test_datetimes(self): datetime.date(2013, 1, 1), np.datetime64(datetime.datetime(2013, 1, 5, 2, 15))]: i_rec = self.encode_decode(i) - self.assertEqual(i, i_rec) + assert i == i_rec def test_timedeltas(self): @@ -294,7 +296,7 @@ def test_timedeltas(self): datetime.timedelta(days=1, seconds=10), np.timedelta64(1000000)]: i_rec = self.encode_decode(i) - self.assertEqual(i, i_rec) + assert i == i_rec class TestIndex(TestPackers): @@ -668,16 +670,14 @@ def decompress(ob): for w in ws: # check the messages from our warnings - self.assertEqual( - str(w.message), - 'copying data after decompressing; this may mean that' - ' decompress is caching its result', - ) + assert str(w.message) == ('copying data after decompressing; ' + 'this may mean that decompress is ' + 'caching its result') for buf, control_buf in zip(not_garbage, control): # make sure none of our mutations above affected the # original buffers - self.assertEqual(buf, control_buf) + assert buf == control_buf def test_compression_warns_when_decompress_caches_zlib(self): if not _ZLIB_INSTALLED: @@ -710,7 +710,7 @@ def _test_small_strings_no_warn(self, compress): # we compare the ord of bytes b'a' with unicode u'a' because the should # always be the same (unless we were able to mutate the shared # character singleton in which case ord(b'a') == ord(b'b'). - self.assertEqual(ord(b'a'), ord(u'a')) + assert ord(b'a') == ord(u'a') tm.assert_numpy_array_equal( char_unpacked, np.array([ord(b'b')], dtype='uint8'), @@ -801,7 +801,7 @@ def test_default_encoding(self): for frame in compat.itervalues(self.frame): result = frame.to_msgpack() expected = frame.to_msgpack(encoding='utf8') - self.assertEqual(result, expected) + assert result == expected result = self.encode_decode(frame) assert_frame_equal(result, frame) diff --git a/pandas/tests/io/test_pytables.py b/pandas/tests/io/test_pytables.py index ae1b4137c354f..a268fa96175cf 100644 --- a/pandas/tests/io/test_pytables.py +++ b/pandas/tests/io/test_pytables.py @@ -165,8 +165,8 @@ def test_factory_fun(self): with catch_warnings(record=True): with get_store(path) as tbl: - self.assertEqual(len(tbl), 1) - self.assertEqual(type(tbl['a']), DataFrame) + assert len(tbl) == 1 + assert type(tbl['a']) == DataFrame finally: safe_remove(self.path) @@ -185,8 +185,8 @@ def test_context(self): tbl['a'] = tm.makeDataFrame() with HDFStore(path) as tbl: - self.assertEqual(len(tbl), 1) - self.assertEqual(type(tbl['a']), DataFrame) + assert len(tbl) == 1 + assert type(tbl['a']) == DataFrame finally: safe_remove(path) @@ -374,7 +374,7 @@ def test_keys(self): with catch_warnings(record=True): store['d'] = tm.makePanel() store['foo/bar'] = tm.makePanel() - self.assertEqual(len(store), 5) + assert len(store) == 5 expected = set(['/a', '/b', '/c', '/d', '/foo/bar']) assert set(store.keys()) == expected assert set(store) == expected @@ -461,9 +461,9 @@ def test_versioning(self): _maybe_remove(store, 'df1') store.append('df1', df[:10]) store.append('df1', df[10:]) - self.assertEqual(store.root.a._v_attrs.pandas_version, '0.15.2') - self.assertEqual(store.root.b._v_attrs.pandas_version, '0.15.2') - self.assertEqual(store.root.df1._v_attrs.pandas_version, '0.15.2') + assert store.root.a._v_attrs.pandas_version == '0.15.2' + assert store.root.b._v_attrs.pandas_version == '0.15.2' + assert store.root.df1._v_attrs.pandas_version == '0.15.2' # write a file and wipe its versioning _maybe_remove(store, 'df2') @@ -488,7 +488,7 @@ def check(mode): else: store = HDFStore(path, mode=mode) - self.assertEqual(store._handle.mode, mode) + assert store._handle.mode == mode store.close() with ensure_clean_path(self.path) as path: @@ -501,7 +501,7 @@ def f(): pytest.raises(IOError, f) else: with HDFStore(path, mode=mode) as store: - self.assertEqual(store._handle.mode, mode) + assert store._handle.mode == mode with ensure_clean_path(self.path) as path: @@ -550,7 +550,7 @@ def test_reopen_handle(self): # truncation ok here store.open('w') assert store.is_open - self.assertEqual(len(store), 0) + assert len(store) == 0 store.close() assert not store.is_open @@ -560,24 +560,24 @@ def test_reopen_handle(self): # reopen as read store.open('r') assert store.is_open - self.assertEqual(len(store), 1) - self.assertEqual(store._mode, 'r') + assert len(store) == 1 + assert store._mode == 'r' store.close() assert not store.is_open # reopen as append store.open('a') assert store.is_open - self.assertEqual(len(store), 1) - self.assertEqual(store._mode, 'a') + assert len(store) == 1 + assert store._mode == 'a' store.close() assert not store.is_open # reopen as append (again) store.open('a') assert store.is_open - self.assertEqual(len(store), 1) - self.assertEqual(store._mode, 'a') + assert len(store) == 1 + assert store._mode == 'a' store.close() assert not store.is_open @@ -889,7 +889,7 @@ def test_append_series(self): store.append('ns', ns) result = store['ns'] tm.assert_series_equal(result, ns) - self.assertEqual(result.name, ns.name) + assert result.name == ns.name # select on the values expected = ns[ns > 60] @@ -1300,8 +1300,8 @@ def test_append_with_strings(self): dict([(x, "%s_extra" % x) for x in wp.minor_axis]), axis=2) def check_col(key, name, size): - self.assertEqual(getattr(store.get_storer( - key).table.description, name).itemsize, size) + assert getattr(store.get_storer(key) + .table.description, name).itemsize == size store.append('s1', wp, min_itemsize=20) store.append('s1', wp2) @@ -1395,8 +1395,8 @@ def check_col(key, name, size): with ensure_clean_store(self.path) as store: def check_col(key, name, size): - self.assertEqual(getattr(store.get_storer( - key).table.description, name).itemsize, size) + assert getattr(store.get_storer(key) + .table.description, name).itemsize, size df = DataFrame(dict(A='foo', B='bar'), index=range(10)) @@ -1404,13 +1404,13 @@ def check_col(key, name, size): _maybe_remove(store, 'df') store.append('df', df, min_itemsize={'A': 200}) check_col('df', 'A', 200) - self.assertEqual(store.get_storer('df').data_columns, ['A']) + assert store.get_storer('df').data_columns == ['A'] # a min_itemsize that creates a data_column2 _maybe_remove(store, 'df') store.append('df', df, data_columns=['B'], min_itemsize={'A': 200}) check_col('df', 'A', 200) - self.assertEqual(store.get_storer('df').data_columns, ['B', 'A']) + assert store.get_storer('df').data_columns == ['B', 'A'] # a min_itemsize that creates a data_column2 _maybe_remove(store, 'df') @@ -1418,7 +1418,7 @@ def check_col(key, name, size): 'B'], min_itemsize={'values': 200}) check_col('df', 'B', 200) check_col('df', 'values_block_0', 200) - self.assertEqual(store.get_storer('df').data_columns, ['B']) + assert store.get_storer('df').data_columns == ['B'] # infer the .typ on subsequent appends _maybe_remove(store, 'df') @@ -1492,8 +1492,8 @@ def test_append_with_data_columns(self): # using min_itemsize and a data column def check_col(key, name, size): - self.assertEqual(getattr(store.get_storer( - key).table.description, name).itemsize, size) + assert getattr(store.get_storer(key) + .table.description, name).itemsize == size with ensure_clean_store(self.path) as store: _maybe_remove(store, 'df') @@ -1985,7 +1985,7 @@ def test_append_raise(self): # list in column df = tm.makeDataFrame() df['invalid'] = [['a']] * len(df) - self.assertEqual(df.dtypes['invalid'], np.object_) + assert df.dtypes['invalid'] == np.object_ pytest.raises(TypeError, store.append, 'df', df) # multiple invalid columns @@ -1999,7 +1999,7 @@ def test_append_raise(self): s = s.astype(object) s[0:5] = np.nan df['invalid'] = s - self.assertEqual(df.dtypes['invalid'], np.object_) + assert df.dtypes['invalid'] == np.object_ pytest.raises(TypeError, store.append, 'df', df) # directy ndarray @@ -2227,11 +2227,11 @@ def test_remove(self): store['a'] = ts store['b'] = df _maybe_remove(store, 'a') - self.assertEqual(len(store), 1) + assert len(store) == 1 tm.assert_frame_equal(df, store['b']) _maybe_remove(store, 'b') - self.assertEqual(len(store), 0) + assert len(store) == 0 # nonexistence pytest.raises(KeyError, store.remove, 'a_nonexistent_store') @@ -2241,19 +2241,19 @@ def test_remove(self): store['b/foo'] = df _maybe_remove(store, 'foo') _maybe_remove(store, 'b/foo') - self.assertEqual(len(store), 1) + assert len(store) == 1 store['a'] = ts store['b/foo'] = df _maybe_remove(store, 'b') - self.assertEqual(len(store), 1) + assert len(store) == 1 # __delitem__ store['a'] = ts store['b'] = df del store['a'] del store['b'] - self.assertEqual(len(store), 0) + assert len(store) == 0 def test_remove_where(self): @@ -3281,14 +3281,14 @@ def test_select_with_many_inputs(self): result = store.select('df', 'B=selector') expected = df[df.B.isin(selector)] tm.assert_frame_equal(expected, result) - self.assertEqual(len(result), 100) + assert len(result) == 100 # big selector along the index selector = Index(df.ts[0:100].values) result = store.select('df', 'ts=selector') expected = df[df.ts.isin(selector.values)] tm.assert_frame_equal(expected, result) - self.assertEqual(len(result), 100) + assert len(result) == 100 def test_select_iterator(self): @@ -3306,7 +3306,7 @@ def test_select_iterator(self): tm.assert_frame_equal(expected, result) results = [s for s in store.select('df', chunksize=100)] - self.assertEqual(len(results), 5) + assert len(results) == 5 result = concat(results) tm.assert_frame_equal(expected, result) @@ -3331,7 +3331,7 @@ def test_select_iterator(self): results = [s for s in read_hdf(path, 'df', chunksize=100)] result = concat(results) - self.assertEqual(len(results), 5) + assert len(results) == 5 tm.assert_frame_equal(result, df) tm.assert_frame_equal(result, read_hdf(path, 'df')) @@ -3484,7 +3484,7 @@ def test_select_iterator_non_complete_8014(self): where = "index > '%s'" % end_dt results = [s for s in store.select( 'df', where=where, chunksize=chunksize)] - self.assertEqual(0, len(results)) + assert 0 == len(results) def test_select_iterator_many_empty_frames(self): @@ -3563,8 +3563,8 @@ def test_retain_index_attributes(self): for attr in ['freq', 'tz', 'name']: for idx in ['index', 'columns']: - self.assertEqual(getattr(getattr(df, idx), attr, None), - getattr(getattr(result, idx), attr, None)) + assert (getattr(getattr(df, idx), attr, None) == + getattr(getattr(result, idx), attr, None)) # try to append a table with a different frequency with catch_warnings(record=True): @@ -3610,7 +3610,7 @@ def test_retain_index_attributes2(self): df = DataFrame(dict(A=Series(lrange(3), index=idx))) df.to_hdf(path, 'data', mode='w', append=True) - self.assertEqual(read_hdf(path, 'data').index.name, 'foo') + assert read_hdf(path, 'data').index.name == 'foo' with catch_warnings(record=True): @@ -3655,7 +3655,7 @@ def test_frame_select(self): date = df.index[len(df) // 2] crit1 = Term('index>=date') - self.assertEqual(crit1.env.scope['date'], date) + assert crit1.env.scope['date'] == date crit2 = ("columns=['A', 'D']") crit3 = ('columns=A') @@ -4481,7 +4481,7 @@ def do_copy(f=None, new_f=None, keys=None, # check keys if keys is None: keys = store.keys() - self.assertEqual(set(keys), set(tstore.keys())) + assert set(keys) == set(tstore.keys()) # check indicies & nrows for k in tstore.keys(): @@ -4489,7 +4489,7 @@ def do_copy(f=None, new_f=None, keys=None, new_t = tstore.get_storer(k) orig_t = store.get_storer(k) - self.assertEqual(orig_t.nrows, new_t.nrows) + assert orig_t.nrows == new_t.nrows # check propindixes if propindexes: @@ -4554,7 +4554,7 @@ def test_store_datetime_fractional_secs(self): dt = datetime.datetime(2012, 1, 2, 3, 4, 5, 123456) series = Series([0], [dt]) store['a'] = series - self.assertEqual(store['a'].index[0], dt) + assert store['a'].index[0] == dt def test_tseries_indices_series(self): @@ -4564,18 +4564,18 @@ def test_tseries_indices_series(self): store['a'] = ser result = store['a'] - assert_series_equal(result, ser) - self.assertEqual(type(result.index), type(ser.index)) - self.assertEqual(result.index.freq, ser.index.freq) + tm.assert_series_equal(result, ser) + assert result.index.freq == ser.index.freq + tm.assert_class_equal(result.index, ser.index, obj="series index") idx = tm.makePeriodIndex(10) ser = Series(np.random.randn(len(idx)), idx) store['a'] = ser result = store['a'] - assert_series_equal(result, ser) - self.assertEqual(type(result.index), type(ser.index)) - self.assertEqual(result.index.freq, ser.index.freq) + tm.assert_series_equal(result, ser) + assert result.index.freq == ser.index.freq + tm.assert_class_equal(result.index, ser.index, obj="series index") def test_tseries_indices_frame(self): @@ -4586,8 +4586,9 @@ def test_tseries_indices_frame(self): result = store['a'] assert_frame_equal(result, df) - self.assertEqual(type(result.index), type(df.index)) - self.assertEqual(result.index.freq, df.index.freq) + assert result.index.freq == df.index.freq + tm.assert_class_equal(result.index, df.index, + obj="dataframe index") idx = tm.makePeriodIndex(10) df = DataFrame(np.random.randn(len(idx), 3), idx) @@ -4595,8 +4596,9 @@ def test_tseries_indices_frame(self): result = store['a'] assert_frame_equal(result, df) - self.assertEqual(type(result.index), type(df.index)) - self.assertEqual(result.index.freq, df.index.freq) + assert result.index.freq == df.index.freq + tm.assert_class_equal(result.index, df.index, + obj="dataframe index") def test_unicode_index(self): @@ -5394,7 +5396,7 @@ def test_tseries_select_index_column(self): with ensure_clean_store(self.path) as store: store.append('frame', frame) result = store.select_column('frame', 'index') - self.assertEqual(rng.tz, DatetimeIndex(result.values).tz) + assert rng.tz == DatetimeIndex(result.values).tz # check utc rng = date_range('1/1/2000', '1/30/2000', tz='UTC') @@ -5403,7 +5405,7 @@ def test_tseries_select_index_column(self): with ensure_clean_store(self.path) as store: store.append('frame', frame) result = store.select_column('frame', 'index') - self.assertEqual(rng.tz, result.dt.tz) + assert rng.tz == result.dt.tz # double check non-utc rng = date_range('1/1/2000', '1/30/2000', tz='US/Eastern') @@ -5412,7 +5414,7 @@ def test_tseries_select_index_column(self): with ensure_clean_store(self.path) as store: store.append('frame', frame) result = store.select_column('frame', 'index') - self.assertEqual(rng.tz, result.dt.tz) + assert rng.tz == result.dt.tz def test_timezones_fixed(self): with ensure_clean_store(self.path) as store: @@ -5443,7 +5445,7 @@ def test_fixed_offset_tz(self): store['frame'] = frame recons = store['frame'] tm.assert_index_equal(recons.index, rng) - self.assertEqual(rng.tz, recons.index.tz) + assert rng.tz == recons.index.tz def test_store_timezone(self): # GH2852 diff --git a/pandas/tests/io/test_sql.py b/pandas/tests/io/test_sql.py index fd883c9c0ff00..52883a41b08c2 100644 --- a/pandas/tests/io/test_sql.py +++ b/pandas/tests/io/test_sql.py @@ -405,9 +405,7 @@ def _to_sql_replace(self): num_entries = len(self.test_frame1) num_rows = self._count_rows('test_frame1') - self.assertEqual( - num_rows, num_entries, "not the same number of rows as entries") - + assert num_rows == num_entries self.drop_table('test_frame1') def _to_sql_append(self): @@ -425,9 +423,7 @@ def _to_sql_append(self): num_entries = 2 * len(self.test_frame1) num_rows = self._count_rows('test_frame1') - self.assertEqual( - num_rows, num_entries, "not the same number of rows as entries") - + assert num_rows == num_entries self.drop_table('test_frame1') def _roundtrip(self): @@ -454,7 +450,7 @@ def _to_sql_save_index(self): columns=['A', 'B', 'C'], index=['A']) self.pandasSQL.to_sql(df, 'test_to_sql_saves_index') ix_cols = self._get_index_columns('test_to_sql_saves_index') - self.assertEqual(ix_cols, [['A', ], ]) + assert ix_cols == [['A', ], ] def _transaction_test(self): self.pandasSQL.execute("CREATE TABLE test_trans (A INT, B TEXT)") @@ -470,13 +466,13 @@ def _transaction_test(self): # ignore raised exception pass res = self.pandasSQL.read_query('SELECT * FROM test_trans') - self.assertEqual(len(res), 0) + assert len(res) == 0 # Make sure when transaction is committed, rows do get inserted with self.pandasSQL.run_transaction() as trans: trans.execute(ins_sql) res2 = self.pandasSQL.read_query('SELECT * FROM test_trans') - self.assertEqual(len(res2), 1) + assert len(res2) == 1 # ----------------------------------------------------------------------------- @@ -544,8 +540,7 @@ def test_to_sql_replace(self): num_entries = len(self.test_frame1) num_rows = self._count_rows('test_frame3') - self.assertEqual( - num_rows, num_entries, "not the same number of rows as entries") + assert num_rows == num_entries def test_to_sql_append(self): sql.to_sql(self.test_frame1, 'test_frame4', @@ -559,8 +554,7 @@ def test_to_sql_append(self): num_entries = 2 * len(self.test_frame1) num_rows = self._count_rows('test_frame4') - self.assertEqual( - num_rows, num_entries, "not the same number of rows as entries") + assert num_rows == num_entries def test_to_sql_type_mapping(self): sql.to_sql(self.test_frame3, 'test_frame5', self.conn, index=False) @@ -663,44 +657,39 @@ def test_to_sql_index_label(self): # no index name, defaults to 'index' sql.to_sql(temp_frame, 'test_index_label', self.conn) frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[0], 'index') + assert frame.columns[0] == 'index' # specifying index_label sql.to_sql(temp_frame, 'test_index_label', self.conn, if_exists='replace', index_label='other_label') frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[0], 'other_label', - "Specified index_label not written to database") + assert frame.columns[0] == "other_label" # using the index name temp_frame.index.name = 'index_name' sql.to_sql(temp_frame, 'test_index_label', self.conn, if_exists='replace') frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[0], 'index_name', - "Index name not written to database") + assert frame.columns[0] == "index_name" # has index name, but specifying index_label sql.to_sql(temp_frame, 'test_index_label', self.conn, if_exists='replace', index_label='other_label') frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[0], 'other_label', - "Specified index_label not written to database") + assert frame.columns[0] == "other_label" # index name is integer temp_frame.index.name = 0 sql.to_sql(temp_frame, 'test_index_label', self.conn, if_exists='replace') frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[0], '0', - "Integer index label not written to database") + assert frame.columns[0] == "0" temp_frame.index.name = None sql.to_sql(temp_frame, 'test_index_label', self.conn, if_exists='replace', index_label=0) frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[0], '0', - "Integer index label not written to database") + assert frame.columns[0] == "0" def test_to_sql_index_label_multiindex(self): temp_frame = DataFrame({'col1': range(4)}, @@ -710,30 +699,27 @@ def test_to_sql_index_label_multiindex(self): # no index name, defaults to 'level_0' and 'level_1' sql.to_sql(temp_frame, 'test_index_label', self.conn) frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[0], 'level_0') - self.assertEqual(frame.columns[1], 'level_1') + assert frame.columns[0] == 'level_0' + assert frame.columns[1] == 'level_1' # specifying index_label sql.to_sql(temp_frame, 'test_index_label', self.conn, if_exists='replace', index_label=['A', 'B']) frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[:2].tolist(), ['A', 'B'], - "Specified index_labels not written to database") + assert frame.columns[:2].tolist() == ['A', 'B'] # using the index name temp_frame.index.names = ['A', 'B'] sql.to_sql(temp_frame, 'test_index_label', self.conn, if_exists='replace') frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[:2].tolist(), ['A', 'B'], - "Index names not written to database") + assert frame.columns[:2].tolist() == ['A', 'B'] # has index name, but specifying index_label sql.to_sql(temp_frame, 'test_index_label', self.conn, if_exists='replace', index_label=['C', 'D']) frame = sql.read_sql_query('SELECT * FROM test_index_label', self.conn) - self.assertEqual(frame.columns[:2].tolist(), ['C', 'D'], - "Specified index_labels not written to database") + assert frame.columns[:2].tolist() == ['C', 'D'] # wrong length of index_label pytest.raises(ValueError, sql.to_sql, temp_frame, @@ -793,7 +779,7 @@ def test_chunksize_read(self): for chunk in sql.read_sql_query("select * from test_chunksize", self.conn, chunksize=5): res2 = concat([res2, chunk], ignore_index=True) - self.assertEqual(len(chunk), sizes[i]) + assert len(chunk) == sizes[i] i += 1 tm.assert_frame_equal(res1, res2) @@ -807,7 +793,7 @@ def test_chunksize_read(self): for chunk in sql.read_sql_table("test_chunksize", self.conn, chunksize=5): res3 = concat([res3, chunk], ignore_index=True) - self.assertEqual(len(chunk), sizes[i]) + assert len(chunk) == sizes[i] i += 1 tm.assert_frame_equal(res1, res3) @@ -856,29 +842,24 @@ def test_read_table_columns(self): cols = ['A', 'B'] result = sql.read_sql_table('test_frame', self.conn, columns=cols) - self.assertEqual(result.columns.tolist(), cols, - "Columns not correctly selected") + assert result.columns.tolist() == cols def test_read_table_index_col(self): # test columns argument in read_table sql.to_sql(self.test_frame1, 'test_frame', self.conn) result = sql.read_sql_table('test_frame', self.conn, index_col="index") - self.assertEqual(result.index.names, ["index"], - "index_col not correctly set") + assert result.index.names == ["index"] result = sql.read_sql_table( 'test_frame', self.conn, index_col=["A", "B"]) - self.assertEqual(result.index.names, ["A", "B"], - "index_col not correctly set") + assert result.index.names == ["A", "B"] result = sql.read_sql_table('test_frame', self.conn, index_col=["A", "B"], columns=["C", "D"]) - self.assertEqual(result.index.names, ["A", "B"], - "index_col not correctly set") - self.assertEqual(result.columns.tolist(), ["C", "D"], - "columns not set correctly whith index_col") + assert result.index.names == ["A", "B"] + assert result.columns.tolist() == ["C", "D"] def test_read_sql_delegate(self): iris_frame1 = sql.read_sql_query( @@ -905,10 +886,11 @@ def test_not_reflect_all_tables(self): sql.read_sql_table('other_table', self.conn) sql.read_sql_query('SELECT * FROM other_table', self.conn) # Verify some things - self.assertEqual(len(w), 0, "Warning triggered for other table") + assert len(w) == 0 def test_warning_case_insensitive_table_name(self): - # see GH7815. + # see gh-7815 + # # We can't test that this warning is triggered, a the database # configuration would have to be altered. But here we test that # the warning is certainly NOT triggered in a normal case. @@ -918,8 +900,7 @@ def test_warning_case_insensitive_table_name(self): # This should not trigger a Warning self.test_frame1.to_sql('CaseSensitive', self.conn) # Verify some things - self.assertEqual( - len(w), 0, "Warning triggered for writing a table") + assert len(w) == 0 def _get_index_columns(self, tbl_name): from sqlalchemy.engine import reflection @@ -981,7 +962,7 @@ def test_query_by_text_obj(self): iris_df = sql.read_sql(name_text, self.conn, params={ 'name': 'Iris-versicolor'}) all_names = set(iris_df['Name']) - self.assertEqual(all_names, set(['Iris-versicolor'])) + assert all_names == set(['Iris-versicolor']) def test_query_by_select_obj(self): # WIP : GH10846 @@ -992,7 +973,7 @@ def test_query_by_select_obj(self): iris_df = sql.read_sql(name_select, self.conn, params={'name': 'Iris-setosa'}) all_names = set(iris_df['Name']) - self.assertEqual(all_names, set(['Iris-setosa'])) + assert all_names == set(['Iris-setosa']) class _EngineToConnMixin(object): @@ -1094,8 +1075,7 @@ def test_sqlite_type_mapping(self): db = sql.SQLiteDatabase(self.conn) table = sql.SQLiteTable("test_type", db, frame=df) schema = table.sql_schema() - self.assertEqual(self._get_sqlite_column_type(schema, 'time'), - "TIMESTAMP") + assert self._get_sqlite_column_type(schema, 'time') == "TIMESTAMP" # ----------------------------------------------------------------------------- @@ -1264,24 +1244,22 @@ def check(col): # "2000-01-01 00:00:00-08:00" should convert to # "2000-01-01 08:00:00" - self.assertEqual(col[0], Timestamp('2000-01-01 08:00:00')) + assert col[0] == Timestamp('2000-01-01 08:00:00') # "2000-06-01 00:00:00-07:00" should convert to # "2000-06-01 07:00:00" - self.assertEqual(col[1], Timestamp('2000-06-01 07:00:00')) + assert col[1] == Timestamp('2000-06-01 07:00:00') elif is_datetime64tz_dtype(col.dtype): assert str(col.dt.tz) == 'UTC' # "2000-01-01 00:00:00-08:00" should convert to # "2000-01-01 08:00:00" - self.assertEqual(col[0], Timestamp( - '2000-01-01 08:00:00', tz='UTC')) + assert col[0] == Timestamp('2000-01-01 08:00:00', tz='UTC') # "2000-06-01 00:00:00-07:00" should convert to # "2000-06-01 07:00:00" - self.assertEqual(col[1], Timestamp( - '2000-06-01 07:00:00', tz='UTC')) + assert col[1] == Timestamp('2000-06-01 07:00:00', tz='UTC') else: raise AssertionError("DateCol loaded with incorrect type " @@ -1525,7 +1503,7 @@ def test_dtype(self): meta.reflect() sqltype = meta.tables['dtype_test3'].columns['B'].type assert isinstance(sqltype, sqlalchemy.String) - self.assertEqual(sqltype.length, 10) + assert sqltype.length == 10 # single dtype df.to_sql('single_dtype_test', self.conn, dtype=sqlalchemy.TEXT) @@ -1576,15 +1554,14 @@ def test_double_precision(self): res = sql.read_sql_table('test_dtypes', self.conn) # check precision of float64 - self.assertEqual(np.round(df['f64'].iloc[0], 14), - np.round(res['f64'].iloc[0], 14)) + assert (np.round(df['f64'].iloc[0], 14) == + np.round(res['f64'].iloc[0], 14)) # check sql types meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() col_dict = meta.tables['test_dtypes'].columns - self.assertEqual(str(col_dict['f32'].type), - str(col_dict['f64_as_f32'].type)) + assert str(col_dict['f32'].type) == str(col_dict['f64_as_f32'].type) assert isinstance(col_dict['f32'].type, sqltypes.Float) assert isinstance(col_dict['f64'].type, sqltypes.Float) assert isinstance(col_dict['i32'].type, sqltypes.Integer) @@ -1690,7 +1667,7 @@ def test_bigint_warning(self): with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") sql.read_sql_table('test_bigintwarning', self.conn) - self.assertEqual(len(w), 0, "Warning triggered for other table") + assert len(w) == 0 class _TestMySQLAlchemy(object): @@ -2002,20 +1979,20 @@ def test_dtype(self): df.to_sql('dtype_test2', self.conn, dtype={'B': 'STRING'}) # sqlite stores Boolean values as INTEGER - self.assertEqual(self._get_sqlite_column_type( - 'dtype_test', 'B'), 'INTEGER') + assert self._get_sqlite_column_type( + 'dtype_test', 'B') == 'INTEGER' - self.assertEqual(self._get_sqlite_column_type( - 'dtype_test2', 'B'), 'STRING') + assert self._get_sqlite_column_type( + 'dtype_test2', 'B') == 'STRING' pytest.raises(ValueError, df.to_sql, 'error', self.conn, dtype={'B': bool}) # single dtype df.to_sql('single_dtype_test', self.conn, dtype='STRING') - self.assertEqual( - self._get_sqlite_column_type('single_dtype_test', 'A'), 'STRING') - self.assertEqual( - self._get_sqlite_column_type('single_dtype_test', 'B'), 'STRING') + assert self._get_sqlite_column_type( + 'single_dtype_test', 'A') == 'STRING' + assert self._get_sqlite_column_type( + 'single_dtype_test', 'B') == 'STRING' def test_notnull_dtype(self): if self.flavor == 'mysql': @@ -2031,11 +2008,10 @@ def test_notnull_dtype(self): tbl = 'notnull_dtype_test' df.to_sql(tbl, self.conn) - self.assertEqual(self._get_sqlite_column_type(tbl, 'Bool'), 'INTEGER') - self.assertEqual(self._get_sqlite_column_type( - tbl, 'Date'), 'TIMESTAMP') - self.assertEqual(self._get_sqlite_column_type(tbl, 'Int'), 'INTEGER') - self.assertEqual(self._get_sqlite_column_type(tbl, 'Float'), 'REAL') + assert self._get_sqlite_column_type(tbl, 'Bool') == 'INTEGER' + assert self._get_sqlite_column_type(tbl, 'Date') == 'TIMESTAMP' + assert self._get_sqlite_column_type(tbl, 'Int') == 'INTEGER' + assert self._get_sqlite_column_type(tbl, 'Float') == 'REAL' def test_illegal_names(self): # For sqlite, these should work fine @@ -2251,7 +2227,7 @@ def test_onecolumn_of_integer(self): the_sum = sum([my_c0[0] for my_c0 in con_x.execute("select * from mono_df")]) # it should not fail, and gives 3 ( Issue #3628 ) - self.assertEqual(the_sum, 3) + assert the_sum == 3 result = sql.read_sql("select * from mono_df", con_x) tm.assert_frame_equal(result, mono_df) @@ -2292,23 +2268,21 @@ def clean_up(test_table_to_drop): # test if_exists='replace' sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name, if_exists='replace', index=False) - self.assertEqual(tquery(sql_select, con=self.conn), - [(1, 'A'), (2, 'B')]) + assert tquery(sql_select, con=self.conn) == [(1, 'A'), (2, 'B')] sql.to_sql(frame=df_if_exists_2, con=self.conn, name=table_name, if_exists='replace', index=False) - self.assertEqual(tquery(sql_select, con=self.conn), - [(3, 'C'), (4, 'D'), (5, 'E')]) + assert (tquery(sql_select, con=self.conn) == + [(3, 'C'), (4, 'D'), (5, 'E')]) clean_up(table_name) # test if_exists='append' sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name, if_exists='fail', index=False) - self.assertEqual(tquery(sql_select, con=self.conn), - [(1, 'A'), (2, 'B')]) + assert tquery(sql_select, con=self.conn) == [(1, 'A'), (2, 'B')] sql.to_sql(frame=df_if_exists_2, con=self.conn, name=table_name, if_exists='append', index=False) - self.assertEqual(tquery(sql_select, con=self.conn), - [(1, 'A'), (2, 'B'), (3, 'C'), (4, 'D'), (5, 'E')]) + assert (tquery(sql_select, con=self.conn) == + [(1, 'A'), (2, 'B'), (3, 'C'), (4, 'D'), (5, 'E')]) clean_up(table_name) @@ -2610,21 +2584,19 @@ def clean_up(test_table_to_drop): # test if_exists='replace' sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name, if_exists='replace', index=False) - self.assertEqual(tquery(sql_select, con=self.conn), - [(1, 'A'), (2, 'B')]) + assert tquery(sql_select, con=self.conn) == [(1, 'A'), (2, 'B')] sql.to_sql(frame=df_if_exists_2, con=self.conn, name=table_name, if_exists='replace', index=False) - self.assertEqual(tquery(sql_select, con=self.conn), - [(3, 'C'), (4, 'D'), (5, 'E')]) + assert (tquery(sql_select, con=self.conn) == + [(3, 'C'), (4, 'D'), (5, 'E')]) clean_up(table_name) # test if_exists='append' sql.to_sql(frame=df_if_exists_1, con=self.conn, name=table_name, if_exists='fail', index=False) - self.assertEqual(tquery(sql_select, con=self.conn), - [(1, 'A'), (2, 'B')]) + assert tquery(sql_select, con=self.conn) == [(1, 'A'), (2, 'B')] sql.to_sql(frame=df_if_exists_2, con=self.conn, name=table_name, if_exists='append', index=False) - self.assertEqual(tquery(sql_select, con=self.conn), - [(1, 'A'), (2, 'B'), (3, 'C'), (4, 'D'), (5, 'E')]) + assert (tquery(sql_select, con=self.conn) == + [(1, 'A'), (2, 'B'), (3, 'C'), (4, 'D'), (5, 'E')]) clean_up(table_name) diff --git a/pandas/tests/io/test_stata.py b/pandas/tests/io/test_stata.py index 72023c77e7c88..945f0b009a9da 100644 --- a/pandas/tests/io/test_stata.py +++ b/pandas/tests/io/test_stata.py @@ -181,7 +181,7 @@ def test_read_dta2(self): w = [x for x in w if x.category is UserWarning] # should get warning for each call to read_dta - self.assertEqual(len(w), 3) + assert len(w) == 3 # buggy test because of the NaT comparison on certain platforms # Format 113 test fails since it does not support tc and tC formats @@ -283,7 +283,7 @@ def test_read_dta18(self): u'Floats': u'float data'} tm.assert_dict_equal(vl, vl_expected) - self.assertEqual(rdr.data_label, u'This is a Ünicode data label') + assert rdr.data_label == u'This is a Ünicode data label' def test_read_write_dta5(self): original = DataFrame([(np.nan, np.nan, np.nan, np.nan, np.nan)], @@ -351,11 +351,11 @@ def test_encoding(self): if compat.PY3: expected = raw.kreis1849[0] - self.assertEqual(result, expected) + assert result == expected assert isinstance(result, compat.string_types) else: expected = raw.kreis1849.str.decode("latin-1")[0] - self.assertEqual(result, expected) + assert result == expected assert isinstance(result, unicode) # noqa with tm.ensure_clean() as path: @@ -377,7 +377,7 @@ def test_read_write_dta11(self): with warnings.catch_warnings(record=True) as w: original.to_stata(path, None) # should get a warning for that format. - self.assertEqual(len(w), 1) + assert len(w) == 1 written_and_read_again = self.read_dta(path) tm.assert_frame_equal( @@ -405,7 +405,7 @@ def test_read_write_dta12(self): with warnings.catch_warnings(record=True) as w: original.to_stata(path, None) # should get a warning for that format. - self.assertEqual(len(w), 1) + assert len(w) == 1 written_and_read_again = self.read_dta(path) tm.assert_frame_equal( @@ -904,7 +904,7 @@ def test_categorical_warnings_and_errors(self): with warnings.catch_warnings(record=True) as w: original.to_stata(path) # should get a warning for mixed content - self.assertEqual(len(w), 1) + assert len(w) == 1 def test_categorical_with_stata_missing_values(self): values = [['a' + str(i)] for i in range(120)] @@ -986,10 +986,10 @@ def test_categorical_ordering(self): for col in parsed_115: if not is_categorical_dtype(parsed_115[col]): continue - self.assertEqual(True, parsed_115[col].cat.ordered) - self.assertEqual(True, parsed_117[col].cat.ordered) - self.assertEqual(False, parsed_115_unordered[col].cat.ordered) - self.assertEqual(False, parsed_117_unordered[col].cat.ordered) + assert parsed_115[col].cat.ordered + assert parsed_117[col].cat.ordered + assert not parsed_115_unordered[col].cat.ordered + assert not parsed_117_unordered[col].cat.ordered def test_read_chunks_117(self): files_117 = [self.dta1_117, self.dta2_117, self.dta3_117, diff --git a/pandas/tests/plotting/common.py b/pandas/tests/plotting/common.py index 64bcb55cb4e6a..7d0c39dae6e4b 100644 --- a/pandas/tests/plotting/common.py +++ b/pandas/tests/plotting/common.py @@ -149,7 +149,7 @@ def check_line(xpl, rsl): rsdata = rsl.get_xydata() tm.assert_almost_equal(xpdata, rsdata) - self.assertEqual(len(xp_lines), len(rs_lines)) + assert len(xp_lines) == len(rs_lines) [check_line(xpl, rsl) for xpl, rsl in zip(xp_lines, rs_lines)] tm.close() @@ -170,7 +170,7 @@ def _check_visible(self, collections, visible=True): collections = [collections] for patch in collections: - self.assertEqual(patch.get_visible(), visible) + assert patch.get_visible() == visible def _get_colors_mapped(self, series, colors): unique = series.unique() @@ -208,7 +208,7 @@ def _check_colors(self, collections, linecolors=None, facecolors=None, linecolors = self._get_colors_mapped(mapping, linecolors) linecolors = linecolors[:len(collections)] - self.assertEqual(len(collections), len(linecolors)) + assert len(collections) == len(linecolors) for patch, color in zip(collections, linecolors): if isinstance(patch, Line2D): result = patch.get_color() @@ -220,7 +220,7 @@ def _check_colors(self, collections, linecolors=None, facecolors=None, result = patch.get_edgecolor() expected = conv.to_rgba(color) - self.assertEqual(result, expected) + assert result == expected if facecolors is not None: @@ -228,7 +228,7 @@ def _check_colors(self, collections, linecolors=None, facecolors=None, facecolors = self._get_colors_mapped(mapping, facecolors) facecolors = facecolors[:len(collections)] - self.assertEqual(len(collections), len(facecolors)) + assert len(collections) == len(facecolors) for patch, color in zip(collections, facecolors): if isinstance(patch, Collection): # returned as list of np.array @@ -240,7 +240,7 @@ def _check_colors(self, collections, linecolors=None, facecolors=None, result = tuple(result) expected = conv.to_rgba(color) - self.assertEqual(result, expected) + assert result == expected def _check_text_labels(self, texts, expected): """ @@ -254,12 +254,12 @@ def _check_text_labels(self, texts, expected): expected text label, or its list """ if not is_list_like(texts): - self.assertEqual(texts.get_text(), expected) + assert texts.get_text() == expected else: labels = [t.get_text() for t in texts] - self.assertEqual(len(labels), len(expected)) + assert len(labels) == len(expected) for l, e in zip(labels, expected): - self.assertEqual(l, e) + assert l == e def _check_ticks_props(self, axes, xlabelsize=None, xrot=None, ylabelsize=None, yrot=None): @@ -325,8 +325,8 @@ def _check_ax_scales(self, axes, xaxis='linear', yaxis='linear'): """ axes = self._flatten_visible(axes) for ax in axes: - self.assertEqual(ax.xaxis.get_scale(), xaxis) - self.assertEqual(ax.yaxis.get_scale(), yaxis) + assert ax.xaxis.get_scale() == xaxis + assert ax.yaxis.get_scale() == yaxis def _check_axes_shape(self, axes, axes_num=None, layout=None, figsize=None): @@ -349,14 +349,14 @@ def _check_axes_shape(self, axes, axes_num=None, layout=None, visible_axes = self._flatten_visible(axes) if axes_num is not None: - self.assertEqual(len(visible_axes), axes_num) + assert len(visible_axes) == axes_num for ax in visible_axes: # check something drawn on visible axes assert len(ax.get_children()) > 0 if layout is not None: result = self._get_axes_layout(_flatten(axes)) - self.assertEqual(result, layout) + assert result == layout tm.assert_numpy_array_equal( visible_axes[0].figure.get_size_inches(), @@ -409,8 +409,8 @@ def _check_has_errorbars(self, axes, xerr=0, yerr=0): xerr_count += 1 if has_yerr: yerr_count += 1 - self.assertEqual(xerr, xerr_count) - self.assertEqual(yerr, yerr_count) + assert xerr == xerr_count + assert yerr == yerr_count def _check_box_return_type(self, returned, return_type, expected_keys=None, check_ax_title=True): @@ -450,23 +450,23 @@ def _check_box_return_type(self, returned, return_type, expected_keys=None, assert isinstance(returned, Series) - self.assertEqual(sorted(returned.keys()), sorted(expected_keys)) + assert sorted(returned.keys()) == sorted(expected_keys) for key, value in iteritems(returned): assert isinstance(value, types[return_type]) # check returned dict has correct mapping if return_type == 'axes': if check_ax_title: - self.assertEqual(value.get_title(), key) + assert value.get_title() == key elif return_type == 'both': if check_ax_title: - self.assertEqual(value.ax.get_title(), key) + assert value.ax.get_title() == key assert isinstance(value.ax, Axes) assert isinstance(value.lines, dict) elif return_type == 'dict': line = value['medians'][0] axes = line.axes if self.mpl_ge_1_5_0 else line.get_axes() if check_ax_title: - self.assertEqual(axes.get_title(), key) + assert axes.get_title() == key else: raise AssertionError diff --git a/pandas/tests/plotting/test_boxplot_method.py b/pandas/tests/plotting/test_boxplot_method.py index fe6d5e5cf148f..1f70d408767f3 100644 --- a/pandas/tests/plotting/test_boxplot_method.py +++ b/pandas/tests/plotting/test_boxplot_method.py @@ -90,7 +90,7 @@ def test_boxplot_legacy(self): fig, ax = self.plt.subplots() d = df.boxplot(ax=ax, return_type='dict') lines = list(itertools.chain.from_iterable(d.values())) - self.assertEqual(len(ax.get_lines()), len(lines)) + assert len(ax.get_lines()) == len(lines) @slow def test_boxplot_return_type_none(self): @@ -138,7 +138,7 @@ def _check_ax_limits(col, ax): height_ax, weight_ax = df.boxplot(['height', 'weight'], by='category') _check_ax_limits(df['height'], height_ax) _check_ax_limits(df['weight'], weight_ax) - self.assertEqual(weight_ax._sharey, height_ax) + assert weight_ax._sharey == height_ax # Two rows, one partial p = df.boxplot(['height', 'weight', 'age'], by='category') @@ -148,8 +148,8 @@ def _check_ax_limits(col, ax): _check_ax_limits(df['height'], height_ax) _check_ax_limits(df['weight'], weight_ax) _check_ax_limits(df['age'], age_ax) - self.assertEqual(weight_ax._sharey, height_ax) - self.assertEqual(age_ax._sharey, height_ax) + assert weight_ax._sharey == height_ax + assert age_ax._sharey == height_ax assert dummy_ax._sharey is None @slow @@ -209,13 +209,13 @@ def test_grouped_plot_fignums(self): gb = df.groupby('gender') res = gb.plot() - self.assertEqual(len(self.plt.get_fignums()), 2) - self.assertEqual(len(res), 2) + assert len(self.plt.get_fignums()) == 2 + assert len(res) == 2 tm.close() res = gb.boxplot(return_type='axes') - self.assertEqual(len(self.plt.get_fignums()), 1) - self.assertEqual(len(res), 2) + assert len(self.plt.get_fignums()) == 1 + assert len(res) == 2 tm.close() # now works with GH 5610 as gender is excluded diff --git a/pandas/tests/plotting/test_converter.py b/pandas/tests/plotting/test_converter.py index 30eb3ef24fe30..e23bc2ef6c563 100644 --- a/pandas/tests/plotting/test_converter.py +++ b/pandas/tests/plotting/test_converter.py @@ -29,35 +29,35 @@ def test_convert_accepts_unicode(self): def test_conversion(self): rs = self.dtc.convert(['2012-1-1'], None, None)[0] xp = datetime(2012, 1, 1).toordinal() - self.assertEqual(rs, xp) + assert rs == xp rs = self.dtc.convert('2012-1-1', None, None) - self.assertEqual(rs, xp) + assert rs == xp rs = self.dtc.convert(date(2012, 1, 1), None, None) - self.assertEqual(rs, xp) + assert rs == xp rs = self.dtc.convert(datetime(2012, 1, 1).toordinal(), None, None) - self.assertEqual(rs, xp) + assert rs == xp rs = self.dtc.convert('2012-1-1', None, None) - self.assertEqual(rs, xp) + assert rs == xp rs = self.dtc.convert(Timestamp('2012-1-1'), None, None) - self.assertEqual(rs, xp) + assert rs == xp # also testing datetime64 dtype (GH8614) rs = self.dtc.convert(np_datetime64_compat('2012-01-01'), None, None) - self.assertEqual(rs, xp) + assert rs == xp rs = self.dtc.convert(np_datetime64_compat( '2012-01-01 00:00:00+0000'), None, None) - self.assertEqual(rs, xp) + assert rs == xp rs = self.dtc.convert(np.array([ np_datetime64_compat('2012-01-01 00:00:00+0000'), np_datetime64_compat('2012-01-02 00:00:00+0000')]), None, None) - self.assertEqual(rs[0], xp) + assert rs[0] == xp # we have a tz-aware date (constructed to that when we turn to utc it # is the same as our sample) @@ -66,17 +66,17 @@ def test_conversion(self): .tz_convert('US/Eastern') ) rs = self.dtc.convert(ts, None, None) - self.assertEqual(rs, xp) + assert rs == xp rs = self.dtc.convert(ts.to_pydatetime(), None, None) - self.assertEqual(rs, xp) + assert rs == xp rs = self.dtc.convert(Index([ts - Day(1), ts]), None, None) - self.assertEqual(rs[1], xp) + assert rs[1] == xp rs = self.dtc.convert(Index([ts - Day(1), ts]).to_pydatetime(), None, None) - self.assertEqual(rs[1], xp) + assert rs[1] == xp def test_conversion_float(self): decimals = 9 @@ -101,7 +101,7 @@ def test_conversion_outofbounds_datetime(self): tm.assert_numpy_array_equal(rs, xp) rs = self.dtc.convert(values[0], None, None) xp = converter.dates.date2num(values[0]) - self.assertEqual(rs, xp) + assert rs == xp values = [datetime(1677, 1, 1, 12), datetime(1677, 1, 2, 12)] rs = self.dtc.convert(values, None, None) @@ -109,7 +109,7 @@ def test_conversion_outofbounds_datetime(self): tm.assert_numpy_array_equal(rs, xp) rs = self.dtc.convert(values[0], None, None) xp = converter.dates.date2num(values[0]) - self.assertEqual(rs, xp) + assert rs == xp def test_time_formatter(self): self.tc(90000) @@ -165,44 +165,44 @@ def test_convert_accepts_unicode(self): def test_conversion(self): rs = self.pc.convert(['2012-1-1'], None, self.axis)[0] xp = Period('2012-1-1').ordinal - self.assertEqual(rs, xp) + assert rs == xp rs = self.pc.convert('2012-1-1', None, self.axis) - self.assertEqual(rs, xp) + assert rs == xp rs = self.pc.convert([date(2012, 1, 1)], None, self.axis)[0] - self.assertEqual(rs, xp) + assert rs == xp rs = self.pc.convert(date(2012, 1, 1), None, self.axis) - self.assertEqual(rs, xp) + assert rs == xp rs = self.pc.convert([Timestamp('2012-1-1')], None, self.axis)[0] - self.assertEqual(rs, xp) + assert rs == xp rs = self.pc.convert(Timestamp('2012-1-1'), None, self.axis) - self.assertEqual(rs, xp) + assert rs == xp # FIXME # rs = self.pc.convert( # np_datetime64_compat('2012-01-01'), None, self.axis) - # self.assertEqual(rs, xp) + # assert rs == xp # # rs = self.pc.convert( # np_datetime64_compat('2012-01-01 00:00:00+0000'), # None, self.axis) - # self.assertEqual(rs, xp) + # assert rs == xp # # rs = self.pc.convert(np.array([ # np_datetime64_compat('2012-01-01 00:00:00+0000'), # np_datetime64_compat('2012-01-02 00:00:00+0000')]), # None, self.axis) - # self.assertEqual(rs[0], xp) + # assert rs[0] == xp def test_integer_passthrough(self): # GH9012 rs = self.pc.convert([0, 1], None, self.axis) xp = [0, 1] - self.assertEqual(rs, xp) + assert rs == xp def test_convert_nested(self): data = ['2012-1-1', '2012-1-2'] diff --git a/pandas/tests/plotting/test_datetimelike.py b/pandas/tests/plotting/test_datetimelike.py index 30d67630afa41..ae8faa031174e 100644 --- a/pandas/tests/plotting/test_datetimelike.py +++ b/pandas/tests/plotting/test_datetimelike.py @@ -58,7 +58,7 @@ def test_fontsize_set_correctly(self): df = DataFrame(np.random.randn(10, 9), index=range(10)) ax = df.plot(fontsize=2) for label in (ax.get_xticklabels() + ax.get_yticklabels()): - self.assertEqual(label.get_fontsize(), 2) + assert label.get_fontsize() == 2 @slow def test_frame_inferred(self): @@ -95,7 +95,7 @@ def test_nonnumeric_exclude(self): df = DataFrame({'A': ["x", "y", "z"], 'B': [1, 2, 3]}, idx) ax = df.plot() # it works - self.assertEqual(len(ax.get_lines()), 1) # B was plotted + assert len(ax.get_lines()) == 1 # B was plotted plt.close(plt.gcf()) pytest.raises(TypeError, df['A'].plot) @@ -124,7 +124,7 @@ def test_tsplot(self): ax = ts.plot(style='k') color = (0., 0., 0., 1) if self.mpl_ge_2_0_0 else (0., 0., 0.) - self.assertEqual(color, ax.get_lines()[0].get_color()) + assert color == ax.get_lines()[0].get_color() def test_both_style_and_color(self): import matplotlib.pyplot as plt # noqa @@ -146,11 +146,11 @@ def test_high_freq(self): def test_get_datevalue(self): from pandas.plotting._converter import get_datevalue assert get_datevalue(None, 'D') is None - self.assertEqual(get_datevalue(1987, 'A'), 1987) - self.assertEqual(get_datevalue(Period(1987, 'A'), 'M'), - Period('1987-12', 'M').ordinal) - self.assertEqual(get_datevalue('1/1/1987', 'D'), - Period('1987-1-1', 'D').ordinal) + assert get_datevalue(1987, 'A') == 1987 + assert (get_datevalue(Period(1987, 'A'), 'M') == + Period('1987-12', 'M').ordinal) + assert (get_datevalue('1/1/1987', 'D') == + Period('1987-1-1', 'D').ordinal) @slow def test_ts_plot_format_coord(self): @@ -159,8 +159,7 @@ def check_format_of_first_point(ax, expected_string): first_x = first_line.get_xdata()[0].ordinal first_y = first_line.get_ydata()[0] try: - self.assertEqual(expected_string, - ax.format_coord(first_x, first_y)) + assert expected_string == ax.format_coord(first_x, first_y) except (ValueError): pytest.skip("skipping test because issue forming " "test comparison GH7664") @@ -261,7 +260,7 @@ def test_uhf(self): xp = conv._from_ordinal(loc).strftime('%H:%M:%S.%f') rs = str(label.get_text()) if len(rs): - self.assertEqual(xp, rs) + assert xp == rs @slow def test_irreg_hf(self): @@ -308,10 +307,9 @@ def test_business_freq(self): import matplotlib.pyplot as plt # noqa bts = tm.makePeriodSeries() ax = bts.plot() - self.assertEqual(ax.get_lines()[0].get_xydata()[0, 0], - bts.index[0].ordinal) + assert ax.get_lines()[0].get_xydata()[0, 0] == bts.index[0].ordinal idx = ax.get_lines()[0].get_xdata() - self.assertEqual(PeriodIndex(data=idx).freqstr, 'B') + assert PeriodIndex(data=idx).freqstr == 'B' @slow def test_business_freq_convert(self): @@ -321,10 +319,9 @@ def test_business_freq_convert(self): tm.N = n ts = bts.to_period('M') ax = bts.plot() - self.assertEqual(ax.get_lines()[0].get_xydata()[0, 0], - ts.index[0].ordinal) + assert ax.get_lines()[0].get_xydata()[0, 0] == ts.index[0].ordinal idx = ax.get_lines()[0].get_xdata() - self.assertEqual(PeriodIndex(data=idx).freqstr, 'M') + assert PeriodIndex(data=idx).freqstr == 'M' def test_nonzero_base(self): # GH2571 @@ -350,8 +347,8 @@ def _test(ax): ax.set_xlim(xlim[0] - 5, xlim[1] + 10) ax.get_figure().canvas.draw() result = ax.get_xlim() - self.assertEqual(result[0], xlim[0] - 5) - self.assertEqual(result[1], xlim[1] + 10) + assert result[0] == xlim[0] - 5 + assert result[1] == xlim[1] + 10 # string expected = (Period('1/1/2000', ax.freq), @@ -359,8 +356,8 @@ def _test(ax): ax.set_xlim('1/1/2000', '4/1/2000') ax.get_figure().canvas.draw() result = ax.get_xlim() - self.assertEqual(int(result[0]), expected[0].ordinal) - self.assertEqual(int(result[1]), expected[1].ordinal) + assert int(result[0]) == expected[0].ordinal + assert int(result[1]) == expected[1].ordinal # datetim expected = (Period('1/1/2000', ax.freq), @@ -368,8 +365,8 @@ def _test(ax): ax.set_xlim(datetime(2000, 1, 1), datetime(2000, 4, 1)) ax.get_figure().canvas.draw() result = ax.get_xlim() - self.assertEqual(int(result[0]), expected[0].ordinal) - self.assertEqual(int(result[1]), expected[1].ordinal) + assert int(result[0]) == expected[0].ordinal + assert int(result[1]) == expected[1].ordinal fig = ax.get_figure() plt.close(fig) @@ -390,12 +387,12 @@ def _test(ax): def test_get_finder(self): import pandas.plotting._converter as conv - self.assertEqual(conv.get_finder('B'), conv._daily_finder) - self.assertEqual(conv.get_finder('D'), conv._daily_finder) - self.assertEqual(conv.get_finder('M'), conv._monthly_finder) - self.assertEqual(conv.get_finder('Q'), conv._quarterly_finder) - self.assertEqual(conv.get_finder('A'), conv._annual_finder) - self.assertEqual(conv.get_finder('W'), conv._daily_finder) + assert conv.get_finder('B') == conv._daily_finder + assert conv.get_finder('D') == conv._daily_finder + assert conv.get_finder('M') == conv._monthly_finder + assert conv.get_finder('Q') == conv._quarterly_finder + assert conv.get_finder('A') == conv._annual_finder + assert conv.get_finder('W') == conv._daily_finder @slow def test_finder_daily(self): @@ -408,11 +405,11 @@ def test_finder_daily(self): ax = ser.plot() xaxis = ax.get_xaxis() rs = xaxis.get_majorticklocs()[0] - self.assertEqual(xp, rs) + assert xp == rs vmin, vmax = ax.get_xlim() ax.set_xlim(vmin + 0.9, vmax) rs = xaxis.get_majorticklocs()[0] - self.assertEqual(xp, rs) + assert xp == rs plt.close(ax.get_figure()) @slow @@ -426,11 +423,11 @@ def test_finder_quarterly(self): ax = ser.plot() xaxis = ax.get_xaxis() rs = xaxis.get_majorticklocs()[0] - self.assertEqual(rs, xp) + assert rs == xp (vmin, vmax) = ax.get_xlim() ax.set_xlim(vmin + 0.9, vmax) rs = xaxis.get_majorticklocs()[0] - self.assertEqual(xp, rs) + assert xp == rs plt.close(ax.get_figure()) @slow @@ -444,11 +441,11 @@ def test_finder_monthly(self): ax = ser.plot() xaxis = ax.get_xaxis() rs = xaxis.get_majorticklocs()[0] - self.assertEqual(rs, xp) + assert rs == xp vmin, vmax = ax.get_xlim() ax.set_xlim(vmin + 0.9, vmax) rs = xaxis.get_majorticklocs()[0] - self.assertEqual(xp, rs) + assert xp == rs plt.close(ax.get_figure()) def test_finder_monthly_long(self): @@ -458,7 +455,7 @@ def test_finder_monthly_long(self): xaxis = ax.get_xaxis() rs = xaxis.get_majorticklocs()[0] xp = Period('1989Q1', 'M').ordinal - self.assertEqual(rs, xp) + assert rs == xp @slow def test_finder_annual(self): @@ -470,7 +467,7 @@ def test_finder_annual(self): ax = ser.plot() xaxis = ax.get_xaxis() rs = xaxis.get_majorticklocs()[0] - self.assertEqual(rs, Period(xp[i], freq='A').ordinal) + assert rs == Period(xp[i], freq='A').ordinal plt.close(ax.get_figure()) @slow @@ -482,7 +479,7 @@ def test_finder_minutely(self): xaxis = ax.get_xaxis() rs = xaxis.get_majorticklocs()[0] xp = Period('1/1/1999', freq='Min').ordinal - self.assertEqual(rs, xp) + assert rs == xp def test_finder_hourly(self): nhours = 23 @@ -492,7 +489,7 @@ def test_finder_hourly(self): xaxis = ax.get_xaxis() rs = xaxis.get_majorticklocs()[0] xp = Period('1/1/1999', freq='H').ordinal - self.assertEqual(rs, xp) + assert rs == xp @slow def test_gaps(self): @@ -503,7 +500,7 @@ def test_gaps(self): ax = ts.plot() lines = ax.get_lines() tm._skip_if_mpl_1_5() - self.assertEqual(len(lines), 1) + assert len(lines) == 1 l = lines[0] data = l.get_xydata() assert isinstance(data, np.ma.core.MaskedArray) @@ -517,7 +514,7 @@ def test_gaps(self): ts[2:5] = np.nan ax = ts.plot() lines = ax.get_lines() - self.assertEqual(len(lines), 1) + assert len(lines) == 1 l = lines[0] data = l.get_xydata() assert isinstance(data, np.ma.core.MaskedArray) @@ -531,7 +528,7 @@ def test_gaps(self): ser[2:5] = np.nan ax = ser.plot() lines = ax.get_lines() - self.assertEqual(len(lines), 1) + assert len(lines) == 1 l = lines[0] data = l.get_xydata() assert isinstance(data, np.ma.core.MaskedArray) @@ -548,8 +545,8 @@ def test_gap_upsample(self): s = Series(np.random.randn(len(idxh)), idxh) s.plot(secondary_y=True) lines = ax.get_lines() - self.assertEqual(len(lines), 1) - self.assertEqual(len(ax.right_ax.get_lines()), 1) + assert len(lines) == 1 + assert len(ax.right_ax.get_lines()) == 1 l = lines[0] data = l.get_xydata() @@ -573,13 +570,13 @@ def test_secondary_y(self): l = ax.get_lines()[0] xp = Series(l.get_ydata(), l.get_xdata()) assert_series_equal(ser, xp) - self.assertEqual(ax.get_yaxis().get_ticks_position(), 'right') + assert ax.get_yaxis().get_ticks_position() == 'right' assert not axes[0].get_yaxis().get_visible() plt.close(fig) ax2 = ser2.plot() - self.assertEqual(ax2.get_yaxis().get_ticks_position(), - self.default_tick_position) + assert (ax2.get_yaxis().get_ticks_position() == + self.default_tick_position) plt.close(ax2.get_figure()) ax = ser2.plot() @@ -604,13 +601,13 @@ def test_secondary_y_ts(self): l = ax.get_lines()[0] xp = Series(l.get_ydata(), l.get_xdata()).to_timestamp() assert_series_equal(ser, xp) - self.assertEqual(ax.get_yaxis().get_ticks_position(), 'right') + assert ax.get_yaxis().get_ticks_position() == 'right' assert not axes[0].get_yaxis().get_visible() plt.close(fig) ax2 = ser2.plot() - self.assertEqual(ax2.get_yaxis().get_ticks_position(), - self.default_tick_position) + assert (ax2.get_yaxis().get_ticks_position() == + self.default_tick_position) plt.close(ax2.get_figure()) ax = ser2.plot() @@ -629,7 +626,7 @@ def test_secondary_kde(self): assert not hasattr(ax, 'right_ax') fig = ax.get_figure() axes = fig.get_axes() - self.assertEqual(axes[1].get_yaxis().get_ticks_position(), 'right') + assert axes[1].get_yaxis().get_ticks_position() == 'right' @slow def test_secondary_bar(self): @@ -637,25 +634,25 @@ def test_secondary_bar(self): ax = ser.plot(secondary_y=True, kind='bar') fig = ax.get_figure() axes = fig.get_axes() - self.assertEqual(axes[1].get_yaxis().get_ticks_position(), 'right') + assert axes[1].get_yaxis().get_ticks_position() == 'right' @slow def test_secondary_frame(self): df = DataFrame(np.random.randn(5, 3), columns=['a', 'b', 'c']) axes = df.plot(secondary_y=['a', 'c'], subplots=True) - self.assertEqual(axes[0].get_yaxis().get_ticks_position(), 'right') - self.assertEqual(axes[1].get_yaxis().get_ticks_position(), - self.default_tick_position) - self.assertEqual(axes[2].get_yaxis().get_ticks_position(), 'right') + assert axes[0].get_yaxis().get_ticks_position() == 'right' + assert (axes[1].get_yaxis().get_ticks_position() == + self.default_tick_position) + assert axes[2].get_yaxis().get_ticks_position() == 'right' @slow def test_secondary_bar_frame(self): df = DataFrame(np.random.randn(5, 3), columns=['a', 'b', 'c']) axes = df.plot(kind='bar', secondary_y=['a', 'c'], subplots=True) - self.assertEqual(axes[0].get_yaxis().get_ticks_position(), 'right') - self.assertEqual(axes[1].get_yaxis().get_ticks_position(), - self.default_tick_position) - self.assertEqual(axes[2].get_yaxis().get_ticks_position(), 'right') + assert axes[0].get_yaxis().get_ticks_position() == 'right' + assert (axes[1].get_yaxis().get_ticks_position() == + self.default_tick_position) + assert axes[2].get_yaxis().get_ticks_position() == 'right' def test_mixed_freq_regular_first(self): import matplotlib.pyplot as plt # noqa @@ -673,8 +670,8 @@ def test_mixed_freq_regular_first(self): assert idx2.equals(s2.index.to_period('B')) left, right = ax2.get_xlim() pidx = s1.index.to_period() - self.assertEqual(left, pidx[0].ordinal) - self.assertEqual(right, pidx[-1].ordinal) + assert left == pidx[0].ordinal + assert right == pidx[-1].ordinal @slow def test_mixed_freq_irregular_first(self): @@ -704,8 +701,8 @@ def test_mixed_freq_regular_first_df(self): assert idx2.equals(s2.index.to_period('B')) left, right = ax2.get_xlim() pidx = s1.index.to_period() - self.assertEqual(left, pidx[0].ordinal) - self.assertEqual(right, pidx[-1].ordinal) + assert left == pidx[0].ordinal + assert right == pidx[-1].ordinal @slow def test_mixed_freq_irregular_first_df(self): @@ -730,7 +727,7 @@ def test_mixed_freq_hf_first(self): high.plot() ax = low.plot() for l in ax.get_lines(): - self.assertEqual(PeriodIndex(data=l.get_xdata()).freq, 'D') + assert PeriodIndex(data=l.get_xdata()).freq == 'D' @slow def test_mixed_freq_alignment(self): @@ -743,8 +740,7 @@ def test_mixed_freq_alignment(self): ax = ts.plot() ts2.plot(style='r') - self.assertEqual(ax.lines[0].get_xdata()[0], - ax.lines[1].get_xdata()[0]) + assert ax.lines[0].get_xdata()[0] == ax.lines[1].get_xdata()[0] @slow def test_mixed_freq_lf_first(self): @@ -757,9 +753,9 @@ def test_mixed_freq_lf_first(self): low.plot(legend=True) ax = high.plot(legend=True) for l in ax.get_lines(): - self.assertEqual(PeriodIndex(data=l.get_xdata()).freq, 'D') + assert PeriodIndex(data=l.get_xdata()).freq == 'D' leg = ax.get_legend() - self.assertEqual(len(leg.texts), 2) + assert len(leg.texts) == 2 plt.close(ax.get_figure()) idxh = date_range('1/1/1999', periods=240, freq='T') @@ -769,7 +765,7 @@ def test_mixed_freq_lf_first(self): low.plot() ax = high.plot() for l in ax.get_lines(): - self.assertEqual(PeriodIndex(data=l.get_xdata()).freq, 'T') + assert PeriodIndex(data=l.get_xdata()).freq == 'T' def test_mixed_freq_irreg_period(self): ts = tm.makeTimeSeries() @@ -791,10 +787,10 @@ def test_mixed_freq_shared_ax(self): s1.plot(ax=ax1) s2.plot(ax=ax2) - self.assertEqual(ax1.freq, 'M') - self.assertEqual(ax2.freq, 'M') - self.assertEqual(ax1.lines[0].get_xydata()[0, 0], - ax2.lines[0].get_xydata()[0, 0]) + assert ax1.freq == 'M' + assert ax2.freq == 'M' + assert (ax1.lines[0].get_xydata()[0, 0] == + ax2.lines[0].get_xydata()[0, 0]) # using twinx fig, ax1 = self.plt.subplots() @@ -802,8 +798,8 @@ def test_mixed_freq_shared_ax(self): s1.plot(ax=ax1) s2.plot(ax=ax2) - self.assertEqual(ax1.lines[0].get_xydata()[0, 0], - ax2.lines[0].get_xydata()[0, 0]) + assert (ax1.lines[0].get_xydata()[0, 0] == + ax2.lines[0].get_xydata()[0, 0]) # TODO (GH14330, GH14322) # plotting the irregular first does not yet work @@ -811,8 +807,8 @@ def test_mixed_freq_shared_ax(self): # ax2 = ax1.twinx() # s2.plot(ax=ax1) # s1.plot(ax=ax2) - # self.assertEqual(ax1.lines[0].get_xydata()[0, 0], - # ax2.lines[0].get_xydata()[0, 0]) + # assert (ax1.lines[0].get_xydata()[0, 0] == + # ax2.lines[0].get_xydata()[0, 0]) @slow def test_to_weekly_resampling(self): @@ -823,7 +819,7 @@ def test_to_weekly_resampling(self): high.plot() ax = low.plot() for l in ax.get_lines(): - self.assertEqual(PeriodIndex(data=l.get_xdata()).freq, idxh.freq) + assert PeriodIndex(data=l.get_xdata()).freq == idxh.freq # tsplot from pandas.tseries.plotting import tsplot @@ -890,7 +886,7 @@ def test_from_resampling_area_line_mixed(self): expected_y = np.zeros(len(expected_x), dtype=np.float64) for i in range(3): l = ax.lines[i] - self.assertEqual(PeriodIndex(l.get_xdata()).freq, idxh.freq) + assert PeriodIndex(l.get_xdata()).freq == idxh.freq tm.assert_numpy_array_equal(l.get_xdata(orig=False), expected_x) # check stacked values are correct @@ -951,17 +947,17 @@ def test_mixed_freq_second_millisecond(self): # high to low high.plot() ax = low.plot() - self.assertEqual(len(ax.get_lines()), 2) + assert len(ax.get_lines()) == 2 for l in ax.get_lines(): - self.assertEqual(PeriodIndex(data=l.get_xdata()).freq, 'L') + assert PeriodIndex(data=l.get_xdata()).freq == 'L' tm.close() # low to high low.plot() ax = high.plot() - self.assertEqual(len(ax.get_lines()), 2) + assert len(ax.get_lines()) == 2 for l in ax.get_lines(): - self.assertEqual(PeriodIndex(data=l.get_xdata()).freq, 'L') + assert PeriodIndex(data=l.get_xdata()).freq == 'L' @slow def test_irreg_dtypes(self): @@ -995,7 +991,7 @@ def test_time(self): xp = l.get_text() if len(xp) > 0: rs = time(h, m, s).strftime('%H:%M:%S') - self.assertEqual(xp, rs) + assert xp == rs # change xlim ax.set_xlim('1:30', '5:00') @@ -1009,7 +1005,7 @@ def test_time(self): xp = l.get_text() if len(xp) > 0: rs = time(h, m, s).strftime('%H:%M:%S') - self.assertEqual(xp, rs) + assert xp == rs @slow def test_time_musec(self): @@ -1035,7 +1031,7 @@ def test_time_musec(self): xp = l.get_text() if len(xp) > 0: rs = time(h, m, s).strftime('%H:%M:%S.%f') - self.assertEqual(xp, rs) + assert xp == rs @slow def test_secondary_upsample(self): @@ -1046,11 +1042,11 @@ def test_secondary_upsample(self): low.plot() ax = high.plot(secondary_y=True) for l in ax.get_lines(): - self.assertEqual(PeriodIndex(l.get_xdata()).freq, 'D') + assert PeriodIndex(l.get_xdata()).freq == 'D' assert hasattr(ax, 'left_ax') assert not hasattr(ax, 'right_ax') for l in ax.left_ax.get_lines(): - self.assertEqual(PeriodIndex(l.get_xdata()).freq, 'D') + assert PeriodIndex(l.get_xdata()).freq == 'D' @slow def test_secondary_legend(self): @@ -1063,54 +1059,54 @@ def test_secondary_legend(self): df = tm.makeTimeDataFrame() ax = df.plot(secondary_y=['A', 'B']) leg = ax.get_legend() - self.assertEqual(len(leg.get_lines()), 4) - self.assertEqual(leg.get_texts()[0].get_text(), 'A (right)') - self.assertEqual(leg.get_texts()[1].get_text(), 'B (right)') - self.assertEqual(leg.get_texts()[2].get_text(), 'C') - self.assertEqual(leg.get_texts()[3].get_text(), 'D') + assert len(leg.get_lines()) == 4 + assert leg.get_texts()[0].get_text() == 'A (right)' + assert leg.get_texts()[1].get_text() == 'B (right)' + assert leg.get_texts()[2].get_text() == 'C' + assert leg.get_texts()[3].get_text() == 'D' assert ax.right_ax.get_legend() is None colors = set() for line in leg.get_lines(): colors.add(line.get_color()) # TODO: color cycle problems - self.assertEqual(len(colors), 4) + assert len(colors) == 4 plt.clf() ax = fig.add_subplot(211) ax = df.plot(secondary_y=['A', 'C'], mark_right=False) leg = ax.get_legend() - self.assertEqual(len(leg.get_lines()), 4) - self.assertEqual(leg.get_texts()[0].get_text(), 'A') - self.assertEqual(leg.get_texts()[1].get_text(), 'B') - self.assertEqual(leg.get_texts()[2].get_text(), 'C') - self.assertEqual(leg.get_texts()[3].get_text(), 'D') + assert len(leg.get_lines()) == 4 + assert leg.get_texts()[0].get_text() == 'A' + assert leg.get_texts()[1].get_text() == 'B' + assert leg.get_texts()[2].get_text() == 'C' + assert leg.get_texts()[3].get_text() == 'D' plt.clf() ax = df.plot(kind='bar', secondary_y=['A']) leg = ax.get_legend() - self.assertEqual(leg.get_texts()[0].get_text(), 'A (right)') - self.assertEqual(leg.get_texts()[1].get_text(), 'B') + assert leg.get_texts()[0].get_text() == 'A (right)' + assert leg.get_texts()[1].get_text() == 'B' plt.clf() ax = df.plot(kind='bar', secondary_y=['A'], mark_right=False) leg = ax.get_legend() - self.assertEqual(leg.get_texts()[0].get_text(), 'A') - self.assertEqual(leg.get_texts()[1].get_text(), 'B') + assert leg.get_texts()[0].get_text() == 'A' + assert leg.get_texts()[1].get_text() == 'B' plt.clf() ax = fig.add_subplot(211) df = tm.makeTimeDataFrame() ax = df.plot(secondary_y=['C', 'D']) leg = ax.get_legend() - self.assertEqual(len(leg.get_lines()), 4) + assert len(leg.get_lines()) == 4 assert ax.right_ax.get_legend() is None colors = set() for line in leg.get_lines(): colors.add(line.get_color()) # TODO: color cycle problems - self.assertEqual(len(colors), 4) + assert len(colors) == 4 # non-ts df = tm.makeDataFrame() @@ -1118,27 +1114,27 @@ def test_secondary_legend(self): ax = fig.add_subplot(211) ax = df.plot(secondary_y=['A', 'B']) leg = ax.get_legend() - self.assertEqual(len(leg.get_lines()), 4) + assert len(leg.get_lines()) == 4 assert ax.right_ax.get_legend() is None colors = set() for line in leg.get_lines(): colors.add(line.get_color()) # TODO: color cycle problems - self.assertEqual(len(colors), 4) + assert len(colors) == 4 plt.clf() ax = fig.add_subplot(211) ax = df.plot(secondary_y=['C', 'D']) leg = ax.get_legend() - self.assertEqual(len(leg.get_lines()), 4) + assert len(leg.get_lines()) == 4 assert ax.right_ax.get_legend() is None colors = set() for line in leg.get_lines(): colors.add(line.get_color()) # TODO: color cycle problems - self.assertEqual(len(colors), 4) + assert len(colors) == 4 def test_format_date_axis(self): rng = date_range('1/1/2012', periods=12, freq='M') @@ -1147,7 +1143,7 @@ def test_format_date_axis(self): xaxis = ax.get_xaxis() for l in xaxis.get_ticklabels(): if len(l.get_text()) > 0: - self.assertEqual(l.get_rotation(), 30) + assert l.get_rotation() == 30 @slow def test_ax_plot(self): @@ -1195,8 +1191,8 @@ def test_irregular_ts_shared_ax_xlim(self): # check that axis limits are correct left, right = ax.get_xlim() - self.assertEqual(left, ts_irregular.index.min().toordinal()) - self.assertEqual(right, ts_irregular.index.max().toordinal()) + assert left == ts_irregular.index.min().toordinal() + assert right == ts_irregular.index.max().toordinal() @slow def test_secondary_y_non_ts_xlim(self): @@ -1211,7 +1207,7 @@ def test_secondary_y_non_ts_xlim(self): s2.plot(secondary_y=True, ax=ax) left_after, right_after = ax.get_xlim() - self.assertEqual(left_before, left_after) + assert left_before == left_after assert right_before < right_after @slow @@ -1227,7 +1223,7 @@ def test_secondary_y_regular_ts_xlim(self): s2.plot(secondary_y=True, ax=ax) left_after, right_after = ax.get_xlim() - self.assertEqual(left_before, left_after) + assert left_before == left_after assert right_before < right_after @slow @@ -1242,8 +1238,8 @@ def test_secondary_y_mixed_freq_ts_xlim(self): left_after, right_after = ax.get_xlim() # a downsample should not have changed either limit - self.assertEqual(left_before, left_after) - self.assertEqual(right_before, right_after) + assert left_before == left_after + assert right_before == right_after @slow def test_secondary_y_irregular_ts_xlim(self): @@ -1258,8 +1254,8 @@ def test_secondary_y_irregular_ts_xlim(self): ts_irregular[:5].plot(ax=ax) left, right = ax.get_xlim() - self.assertEqual(left, ts_irregular.index.min().toordinal()) - self.assertEqual(right, ts_irregular.index.max().toordinal()) + assert left == ts_irregular.index.min().toordinal() + assert right == ts_irregular.index.max().toordinal() def test_plot_outofbounds_datetime(self): # 2579 - checking this does not raise @@ -1283,9 +1279,9 @@ def test_format_timedelta_ticks_narrow(self): fig = ax.get_figure() fig.canvas.draw() labels = ax.get_xticklabels() - self.assertEqual(len(labels), len(expected_labels)) + assert len(labels) == len(expected_labels) for l, l_expected in zip(labels, expected_labels): - self.assertEqual(l.get_text(), l_expected) + assert l.get_text() == l_expected def test_format_timedelta_ticks_wide(self): if is_platform_mac(): @@ -1309,9 +1305,9 @@ def test_format_timedelta_ticks_wide(self): fig = ax.get_figure() fig.canvas.draw() labels = ax.get_xticklabels() - self.assertEqual(len(labels), len(expected_labels)) + assert len(labels) == len(expected_labels) for l, l_expected in zip(labels, expected_labels): - self.assertEqual(l.get_text(), l_expected) + assert l.get_text() == l_expected def test_timedelta_plot(self): # test issue #8711 diff --git a/pandas/tests/plotting/test_frame.py b/pandas/tests/plotting/test_frame.py index c550504063b3e..7297e3548b956 100644 --- a/pandas/tests/plotting/test_frame.py +++ b/pandas/tests/plotting/test_frame.py @@ -134,7 +134,7 @@ def test_plot(self): # passed ax should be used: fig, ax = self.plt.subplots() axes = df.plot.bar(subplots=True, ax=ax) - self.assertEqual(len(axes), 1) + assert len(axes) == 1 if self.mpl_ge_1_5_0: result = ax.axes else: @@ -164,10 +164,10 @@ def test_color_and_style_arguments(self): ax = df.plot(color=['red', 'black'], style=['-', '--']) # check that the linestyles are correctly set: linestyle = [line.get_linestyle() for line in ax.lines] - self.assertEqual(linestyle, ['-', '--']) + assert linestyle == ['-', '--'] # check that the colors are correctly set: color = [line.get_color() for line in ax.lines] - self.assertEqual(color, ['red', 'black']) + assert color == ['red', 'black'] # passing both 'color' and 'style' arguments should not be allowed # if there is a color symbol in the style strings: with pytest.raises(ValueError): @@ -176,7 +176,7 @@ def test_color_and_style_arguments(self): def test_nonnumeric_exclude(self): df = DataFrame({'A': ["x", "y", "z"], 'B': [1, 2, 3]}) ax = df.plot() - self.assertEqual(len(ax.get_lines()), 1) # B was plotted + assert len(ax.get_lines()) == 1 # B was plotted @slow def test_implicit_label(self): @@ -190,7 +190,7 @@ def test_donot_overwrite_index_name(self): df = DataFrame(randn(2, 2), columns=['a', 'b']) df.index.name = 'NAME' df.plot(y='b', label='LABEL') - self.assertEqual(df.index.name, 'NAME') + assert df.index.name == 'NAME' @slow def test_plot_xy(self): @@ -303,7 +303,7 @@ def test_subplots(self): for kind in ['bar', 'barh', 'line', 'area']: axes = df.plot(kind=kind, subplots=True, sharex=True, legend=True) self._check_axes_shape(axes, axes_num=3, layout=(3, 1)) - self.assertEqual(axes.shape, (3, )) + assert axes.shape == (3, ) for ax, column in zip(axes, df.columns): self._check_legend_labels(ax, @@ -379,43 +379,43 @@ def test_subplots_layout(self): axes = df.plot(subplots=True, layout=(2, 2)) self._check_axes_shape(axes, axes_num=3, layout=(2, 2)) - self.assertEqual(axes.shape, (2, 2)) + assert axes.shape == (2, 2) axes = df.plot(subplots=True, layout=(-1, 2)) self._check_axes_shape(axes, axes_num=3, layout=(2, 2)) - self.assertEqual(axes.shape, (2, 2)) + assert axes.shape == (2, 2) axes = df.plot(subplots=True, layout=(2, -1)) self._check_axes_shape(axes, axes_num=3, layout=(2, 2)) - self.assertEqual(axes.shape, (2, 2)) + assert axes.shape == (2, 2) axes = df.plot(subplots=True, layout=(1, 4)) self._check_axes_shape(axes, axes_num=3, layout=(1, 4)) - self.assertEqual(axes.shape, (1, 4)) + assert axes.shape == (1, 4) axes = df.plot(subplots=True, layout=(-1, 4)) self._check_axes_shape(axes, axes_num=3, layout=(1, 4)) - self.assertEqual(axes.shape, (1, 4)) + assert axes.shape == (1, 4) axes = df.plot(subplots=True, layout=(4, -1)) self._check_axes_shape(axes, axes_num=3, layout=(4, 1)) - self.assertEqual(axes.shape, (4, 1)) + assert axes.shape == (4, 1) with pytest.raises(ValueError): - axes = df.plot(subplots=True, layout=(1, 1)) + df.plot(subplots=True, layout=(1, 1)) with pytest.raises(ValueError): - axes = df.plot(subplots=True, layout=(-1, -1)) + df.plot(subplots=True, layout=(-1, -1)) # single column df = DataFrame(np.random.rand(10, 1), index=list(string.ascii_letters[:10])) axes = df.plot(subplots=True) self._check_axes_shape(axes, axes_num=1, layout=(1, 1)) - self.assertEqual(axes.shape, (1, )) + assert axes.shape == (1, ) axes = df.plot(subplots=True, layout=(3, 3)) self._check_axes_shape(axes, axes_num=1, layout=(3, 3)) - self.assertEqual(axes.shape, (3, 3)) + assert axes.shape == (3, 3) @slow def test_subplots_warnings(self): @@ -442,13 +442,13 @@ def test_subplots_multiple_axes(self): returned = df.plot(subplots=True, ax=axes[0], sharex=False, sharey=False) self._check_axes_shape(returned, axes_num=3, layout=(1, 3)) - self.assertEqual(returned.shape, (3, )) + assert returned.shape == (3, ) assert returned[0].figure is fig # draw on second row returned = df.plot(subplots=True, ax=axes[1], sharex=False, sharey=False) self._check_axes_shape(returned, axes_num=3, layout=(1, 3)) - self.assertEqual(returned.shape, (3, )) + assert returned.shape == (3, ) assert returned[0].figure is fig self._check_axes_shape(axes, axes_num=6, layout=(2, 3)) tm.close() @@ -471,17 +471,17 @@ def test_subplots_multiple_axes(self): returned = df.plot(subplots=True, ax=axes, layout=(2, 1), sharex=False, sharey=False) self._check_axes_shape(returned, axes_num=4, layout=(2, 2)) - self.assertEqual(returned.shape, (4, )) + assert returned.shape == (4, ) returned = df.plot(subplots=True, ax=axes, layout=(2, -1), sharex=False, sharey=False) self._check_axes_shape(returned, axes_num=4, layout=(2, 2)) - self.assertEqual(returned.shape, (4, )) + assert returned.shape == (4, ) returned = df.plot(subplots=True, ax=axes, layout=(-1, 2), sharex=False, sharey=False) self._check_axes_shape(returned, axes_num=4, layout=(2, 2)) - self.assertEqual(returned.shape, (4, )) + assert returned.shape == (4, ) # single column fig, axes = self.plt.subplots(1, 1) @@ -490,7 +490,7 @@ def test_subplots_multiple_axes(self): axes = df.plot(subplots=True, ax=[axes], sharex=False, sharey=False) self._check_axes_shape(axes, axes_num=1, layout=(1, 1)) - self.assertEqual(axes.shape, (1, )) + assert axes.shape == (1, ) def test_subplots_ts_share_axes(self): # GH 3964 @@ -540,20 +540,20 @@ def test_subplots_dup_columns(self): axes = df.plot(subplots=True) for ax in axes: self._check_legend_labels(ax, labels=['a']) - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 tm.close() axes = df.plot(subplots=True, secondary_y='a') for ax in axes: # (right) is only attached when subplots=False self._check_legend_labels(ax, labels=['a']) - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 tm.close() ax = df.plot(secondary_y='a') self._check_legend_labels(ax, labels=['a (right)'] * 5) - self.assertEqual(len(ax.lines), 0) - self.assertEqual(len(ax.right_ax.lines), 5) + assert len(ax.lines) == 0 + assert len(ax.right_ax.lines) == 5 def test_negative_log(self): df = - DataFrame(rand(6, 4), @@ -651,14 +651,14 @@ def test_line_lim(self): ax = df.plot() xmin, xmax = ax.get_xlim() lines = ax.get_lines() - self.assertEqual(xmin, lines[0].get_data()[0][0]) - self.assertEqual(xmax, lines[0].get_data()[0][-1]) + assert xmin == lines[0].get_data()[0][0] + assert xmax == lines[0].get_data()[0][-1] ax = df.plot(secondary_y=True) xmin, xmax = ax.get_xlim() lines = ax.get_lines() - self.assertEqual(xmin, lines[0].get_data()[0][0]) - self.assertEqual(xmax, lines[0].get_data()[0][-1]) + assert xmin == lines[0].get_data()[0][0] + assert xmax == lines[0].get_data()[0][-1] axes = df.plot(secondary_y=True, subplots=True) self._check_axes_shape(axes, axes_num=3, layout=(3, 1)) @@ -667,8 +667,8 @@ def test_line_lim(self): assert not hasattr(ax, 'right_ax') xmin, xmax = ax.get_xlim() lines = ax.get_lines() - self.assertEqual(xmin, lines[0].get_data()[0][0]) - self.assertEqual(xmax, lines[0].get_data()[0][-1]) + assert xmin == lines[0].get_data()[0][0] + assert xmax == lines[0].get_data()[0][-1] def test_area_lim(self): df = DataFrame(rand(6, 4), columns=['x', 'y', 'z', 'four']) @@ -679,13 +679,13 @@ def test_area_lim(self): xmin, xmax = ax.get_xlim() ymin, ymax = ax.get_ylim() lines = ax.get_lines() - self.assertEqual(xmin, lines[0].get_data()[0][0]) - self.assertEqual(xmax, lines[0].get_data()[0][-1]) - self.assertEqual(ymin, 0) + assert xmin == lines[0].get_data()[0][0] + assert xmax == lines[0].get_data()[0][-1] + assert ymin == 0 ax = _check_plot_works(neg_df.plot.area, stacked=stacked) ymin, ymax = ax.get_ylim() - self.assertEqual(ymax, 0) + assert ymax == 0 @slow def test_bar_colors(self): @@ -730,19 +730,19 @@ def test_bar_linewidth(self): # regular ax = df.plot.bar(linewidth=2) for r in ax.patches: - self.assertEqual(r.get_linewidth(), 2) + assert r.get_linewidth() == 2 # stacked ax = df.plot.bar(stacked=True, linewidth=2) for r in ax.patches: - self.assertEqual(r.get_linewidth(), 2) + assert r.get_linewidth() == 2 # subplots axes = df.plot.bar(linewidth=2, subplots=True) self._check_axes_shape(axes, axes_num=5, layout=(5, 1)) for ax in axes: for r in ax.patches: - self.assertEqual(r.get_linewidth(), 2) + assert r.get_linewidth() == 2 @slow def test_bar_barwidth(self): @@ -753,34 +753,34 @@ def test_bar_barwidth(self): # regular ax = df.plot.bar(width=width) for r in ax.patches: - self.assertEqual(r.get_width(), width / len(df.columns)) + assert r.get_width() == width / len(df.columns) # stacked ax = df.plot.bar(stacked=True, width=width) for r in ax.patches: - self.assertEqual(r.get_width(), width) + assert r.get_width() == width # horizontal regular ax = df.plot.barh(width=width) for r in ax.patches: - self.assertEqual(r.get_height(), width / len(df.columns)) + assert r.get_height() == width / len(df.columns) # horizontal stacked ax = df.plot.barh(stacked=True, width=width) for r in ax.patches: - self.assertEqual(r.get_height(), width) + assert r.get_height() == width # subplots axes = df.plot.bar(width=width, subplots=True) for ax in axes: for r in ax.patches: - self.assertEqual(r.get_width(), width) + assert r.get_width() == width # horizontal subplots axes = df.plot.barh(width=width, subplots=True) for ax in axes: for r in ax.patches: - self.assertEqual(r.get_height(), width) + assert r.get_height() == width @slow def test_bar_barwidth_position(self): @@ -807,10 +807,10 @@ def test_bar_barwidth_position_int(self): ax = df.plot.bar(stacked=True, width=w) ticks = ax.xaxis.get_ticklocs() tm.assert_numpy_array_equal(ticks, np.array([0, 1, 2, 3, 4])) - self.assertEqual(ax.get_xlim(), (-0.75, 4.75)) + assert ax.get_xlim() == (-0.75, 4.75) # check left-edge of bars - self.assertEqual(ax.patches[0].get_x(), -0.5) - self.assertEqual(ax.patches[-1].get_x(), 3.5) + assert ax.patches[0].get_x() == -0.5 + assert ax.patches[-1].get_x() == 3.5 self._check_bar_alignment(df, kind='bar', stacked=True, width=1) self._check_bar_alignment(df, kind='barh', stacked=False, width=1) @@ -823,29 +823,29 @@ def test_bar_bottom_left(self): df = DataFrame(rand(5, 5)) ax = df.plot.bar(stacked=False, bottom=1) result = [p.get_y() for p in ax.patches] - self.assertEqual(result, [1] * 25) + assert result == [1] * 25 ax = df.plot.bar(stacked=True, bottom=[-1, -2, -3, -4, -5]) result = [p.get_y() for p in ax.patches[:5]] - self.assertEqual(result, [-1, -2, -3, -4, -5]) + assert result == [-1, -2, -3, -4, -5] ax = df.plot.barh(stacked=False, left=np.array([1, 1, 1, 1, 1])) result = [p.get_x() for p in ax.patches] - self.assertEqual(result, [1] * 25) + assert result == [1] * 25 ax = df.plot.barh(stacked=True, left=[1, 2, 3, 4, 5]) result = [p.get_x() for p in ax.patches[:5]] - self.assertEqual(result, [1, 2, 3, 4, 5]) + assert result == [1, 2, 3, 4, 5] axes = df.plot.bar(subplots=True, bottom=-1) for ax in axes: result = [p.get_y() for p in ax.patches] - self.assertEqual(result, [-1] * 5) + assert result == [-1] * 5 axes = df.plot.barh(subplots=True, left=np.array([1, 1, 1, 1, 1])) for ax in axes: result = [p.get_x() for p in ax.patches] - self.assertEqual(result, [1] * 5) + assert result == [1] * 5 @slow def test_bar_nan(self): @@ -855,15 +855,15 @@ def test_bar_nan(self): ax = df.plot.bar() expected = [10, 0, 20, 5, 10, 20, 1, 2, 3] result = [p.get_height() for p in ax.patches] - self.assertEqual(result, expected) + assert result == expected ax = df.plot.bar(stacked=True) result = [p.get_height() for p in ax.patches] - self.assertEqual(result, expected) + assert result == expected result = [p.get_y() for p in ax.patches] expected = [0.0, 0.0, 0.0, 10.0, 0.0, 20.0, 15.0, 10.0, 40.0] - self.assertEqual(result, expected) + assert result == expected @slow def test_bar_categorical(self): @@ -880,16 +880,16 @@ def test_bar_categorical(self): ax = df.plot.bar() ticks = ax.xaxis.get_ticklocs() tm.assert_numpy_array_equal(ticks, np.array([0, 1, 2, 3, 4, 5])) - self.assertEqual(ax.get_xlim(), (-0.5, 5.5)) + assert ax.get_xlim() == (-0.5, 5.5) # check left-edge of bars - self.assertEqual(ax.patches[0].get_x(), -0.25) - self.assertEqual(ax.patches[-1].get_x(), 5.15) + assert ax.patches[0].get_x() == -0.25 + assert ax.patches[-1].get_x() == 5.15 ax = df.plot.bar(stacked=True) tm.assert_numpy_array_equal(ticks, np.array([0, 1, 2, 3, 4, 5])) - self.assertEqual(ax.get_xlim(), (-0.5, 5.5)) - self.assertEqual(ax.patches[0].get_x(), -0.25) - self.assertEqual(ax.patches[-1].get_x(), 4.75) + assert ax.get_xlim() == (-0.5, 5.5) + assert ax.patches[0].get_x() == -0.25 + assert ax.patches[-1].get_x() == 4.75 @slow def test_plot_scatter(self): @@ -919,17 +919,17 @@ def test_plot_scatter_with_c(self): df.plot.scatter(x=0, y=1, c=2)] for ax in axes: # default to Greys - self.assertEqual(ax.collections[0].cmap.name, 'Greys') + assert ax.collections[0].cmap.name == 'Greys' if self.mpl_ge_1_3_1: # n.b. there appears to be no public method to get the colorbar # label - self.assertEqual(ax.collections[0].colorbar._label, 'z') + assert ax.collections[0].colorbar._label == 'z' cm = 'cubehelix' ax = df.plot.scatter(x='x', y='y', c='z', colormap=cm) - self.assertEqual(ax.collections[0].cmap.name, cm) + assert ax.collections[0].cmap.name == cm # verify turning off colorbar works ax = df.plot.scatter(x='x', y='y', c='z', colorbar=False) @@ -1167,7 +1167,7 @@ def test_boxplot(self): self._check_text_labels(ax.get_xticklabels(), labels) tm.assert_numpy_array_equal(ax.xaxis.get_ticklocs(), np.arange(1, len(numeric_cols) + 1)) - self.assertEqual(len(ax.lines), self.bp_n_objects * len(numeric_cols)) + assert len(ax.lines) == self.bp_n_objects * len(numeric_cols) # different warning on py3 if not PY3: @@ -1178,7 +1178,7 @@ def test_boxplot(self): self._check_ax_scales(axes, yaxis='log') for ax, label in zip(axes, labels): self._check_text_labels(ax.get_xticklabels(), [label]) - self.assertEqual(len(ax.lines), self.bp_n_objects) + assert len(ax.lines) == self.bp_n_objects axes = series.plot.box(rot=40) self._check_ticks_props(axes, xrot=40, yrot=0) @@ -1192,7 +1192,7 @@ def test_boxplot(self): labels = [pprint_thing(c) for c in numeric_cols] self._check_text_labels(ax.get_xticklabels(), labels) tm.assert_numpy_array_equal(ax.xaxis.get_ticklocs(), positions) - self.assertEqual(len(ax.lines), self.bp_n_objects * len(numeric_cols)) + assert len(ax.lines) == self.bp_n_objects * len(numeric_cols) @slow def test_boxplot_vertical(self): @@ -1204,7 +1204,7 @@ def test_boxplot_vertical(self): ax = df.plot.box(rot=50, fontsize=8, vert=False) self._check_ticks_props(ax, xrot=0, yrot=50, ylabelsize=8) self._check_text_labels(ax.get_yticklabels(), labels) - self.assertEqual(len(ax.lines), self.bp_n_objects * len(numeric_cols)) + assert len(ax.lines) == self.bp_n_objects * len(numeric_cols) # _check_plot_works adds an ax so catch warning. see GH #13188 with tm.assert_produces_warning(UserWarning): @@ -1214,13 +1214,13 @@ def test_boxplot_vertical(self): self._check_ax_scales(axes, xaxis='log') for ax, label in zip(axes, labels): self._check_text_labels(ax.get_yticklabels(), [label]) - self.assertEqual(len(ax.lines), self.bp_n_objects) + assert len(ax.lines) == self.bp_n_objects positions = np.array([3, 2, 8]) ax = df.plot.box(positions=positions, vert=False) self._check_text_labels(ax.get_yticklabels(), labels) tm.assert_numpy_array_equal(ax.yaxis.get_ticklocs(), positions) - self.assertEqual(len(ax.lines), self.bp_n_objects * len(numeric_cols)) + assert len(ax.lines) == self.bp_n_objects * len(numeric_cols) @slow def test_boxplot_return_type(self): @@ -1563,16 +1563,16 @@ def test_style_by_column(self): fig.add_subplot(111) ax = df.plot(style=markers) for i, l in enumerate(ax.get_lines()[:len(markers)]): - self.assertEqual(l.get_marker(), markers[i]) + assert l.get_marker() == markers[i] @slow def test_line_label_none(self): s = Series([1, 2]) ax = s.plot() - self.assertEqual(ax.get_legend(), None) + assert ax.get_legend() is None ax = s.plot(legend=True) - self.assertEqual(ax.get_legend().get_texts()[0].get_text(), 'None') + assert ax.get_legend().get_texts()[0].get_text() == 'None' @slow @tm.capture_stdout @@ -1591,7 +1591,7 @@ def test_line_colors(self): lines2 = ax2.get_lines() for l1, l2 in zip(ax.get_lines(), lines2): - self.assertEqual(l1.get_color(), l2.get_color()) + assert l1.get_color() == l2.get_color() tm.close() @@ -1630,7 +1630,7 @@ def test_line_colors(self): def test_dont_modify_colors(self): colors = ['r', 'g', 'b'] pd.DataFrame(np.random.rand(10, 2)).plot(color=colors) - self.assertEqual(len(colors), 3) + assert len(colors) == 3 @slow def test_line_colors_and_styles_subplots(self): @@ -1768,7 +1768,7 @@ def test_area_colors(self): linecolors = jet_colors self._check_colors(handles[:len(jet_colors)], linecolors=linecolors) for h in handles: - self.assertEqual(h.get_alpha(), 0.5) + assert h.get_alpha() == 0.5 @slow def test_hist_colors(self): @@ -2028,13 +2028,13 @@ def test_hexbin_basic(self): ax = df.plot.hexbin(x='A', y='B', gridsize=10) # TODO: need better way to test. This just does existence. - self.assertEqual(len(ax.collections), 1) + assert len(ax.collections) == 1 # GH 6951 axes = df.plot.hexbin(x='A', y='B', subplots=True) # hexbin should have 2 axes in the figure, 1 for plotting and another # is colorbar - self.assertEqual(len(axes[0].figure.axes), 2) + assert len(axes[0].figure.axes) == 2 # return value is single axes self._check_axes_shape(axes, axes_num=1, layout=(1, 1)) @@ -2043,10 +2043,10 @@ def test_hexbin_with_c(self): df = self.hexbin_df ax = df.plot.hexbin(x='A', y='B', C='C') - self.assertEqual(len(ax.collections), 1) + assert len(ax.collections) == 1 ax = df.plot.hexbin(x='A', y='B', C='C', reduce_C_function=np.std) - self.assertEqual(len(ax.collections), 1) + assert len(ax.collections) == 1 @slow def test_hexbin_cmap(self): @@ -2054,11 +2054,11 @@ def test_hexbin_cmap(self): # Default to BuGn ax = df.plot.hexbin(x='A', y='B') - self.assertEqual(ax.collections[0].cmap.name, 'BuGn') + assert ax.collections[0].cmap.name == 'BuGn' cm = 'cubehelix' ax = df.plot.hexbin(x='A', y='B', colormap=cm) - self.assertEqual(ax.collections[0].cmap.name, cm) + assert ax.collections[0].cmap.name == cm @slow def test_no_color_bar(self): @@ -2072,7 +2072,7 @@ def test_allow_cmap(self): df = self.hexbin_df ax = df.plot.hexbin(x='A', y='B', cmap='YlGn') - self.assertEqual(ax.collections[0].cmap.name, 'YlGn') + assert ax.collections[0].cmap.name == 'YlGn' with pytest.raises(TypeError): df.plot.hexbin(x='A', y='B', cmap='YlGn', colormap='BuGn') @@ -2094,11 +2094,11 @@ def test_pie_df(self): with tm.assert_produces_warning(UserWarning): axes = _check_plot_works(df.plot.pie, subplots=True) - self.assertEqual(len(axes), len(df.columns)) + assert len(axes) == len(df.columns) for ax in axes: self._check_text_labels(ax.texts, df.index) for ax, ylabel in zip(axes, df.columns): - self.assertEqual(ax.get_ylabel(), ylabel) + assert ax.get_ylabel() == ylabel labels = ['A', 'B', 'C', 'D', 'E'] color_args = ['r', 'g', 'b', 'c', 'm'] @@ -2106,7 +2106,7 @@ def test_pie_df(self): axes = _check_plot_works(df.plot.pie, subplots=True, labels=labels, colors=color_args) - self.assertEqual(len(axes), len(df.columns)) + assert len(axes) == len(df.columns) for ax in axes: self._check_text_labels(ax.texts, labels) @@ -2124,13 +2124,12 @@ def test_pie_df_nan(self): expected = list(base_expected) # force copy expected[i] = '' result = [x.get_text() for x in ax.texts] - self.assertEqual(result, expected) + assert result == expected # legend labels # NaN's not included in legend with subplots # see https://github.com/pandas-dev/pandas/issues/8390 - self.assertEqual([x.get_text() for x in - ax.get_legend().get_texts()], - base_expected[:i] + base_expected[i + 1:]) + assert ([x.get_text() for x in ax.get_legend().get_texts()] == + base_expected[:i] + base_expected[i + 1:]) @slow def test_errorbar_plot(self): @@ -2280,13 +2279,10 @@ def test_errorbar_asymmetrical(self): expected_0_0 = err[0, :, 0] * np.array([-1, 1]) tm.assert_almost_equal(yerr_0_0, expected_0_0) else: - self.assertEqual(ax.lines[7].get_ydata()[0], - data[0, 1] - err[1, 0, 0]) - self.assertEqual(ax.lines[8].get_ydata()[0], - data[0, 1] + err[1, 1, 0]) - - self.assertEqual(ax.lines[5].get_xdata()[0], -err[1, 0, 0] / 2) - self.assertEqual(ax.lines[6].get_xdata()[0], err[1, 1, 0] / 2) + assert ax.lines[7].get_ydata()[0] == data[0, 1] - err[1, 0, 0] + assert ax.lines[8].get_ydata()[0] == data[0, 1] + err[1, 1, 0] + assert ax.lines[5].get_xdata()[0] == -err[1, 0, 0] / 2 + assert ax.lines[6].get_xdata()[0] == err[1, 1, 0] / 2 with pytest.raises(ValueError): df.plot(yerr=err.T) @@ -2362,7 +2358,7 @@ def test_sharex_and_ax(self): def _check(axes): for ax in axes: - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 self._check_visible(ax.get_yticklabels(), visible=True) for ax in [axes[0], axes[2]]: self._check_visible(ax.get_xticklabels(), visible=False) @@ -2392,7 +2388,7 @@ def _check(axes): gs.tight_layout(plt.gcf()) for ax in axes: - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 self._check_visible(ax.get_yticklabels(), visible=True) self._check_visible(ax.get_xticklabels(), visible=True) self._check_visible(ax.get_xticklabels(minor=True), visible=True) @@ -2414,7 +2410,7 @@ def test_sharey_and_ax(self): def _check(axes): for ax in axes: - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 self._check_visible(ax.get_xticklabels(), visible=True) self._check_visible( ax.get_xticklabels(minor=True), visible=True) @@ -2444,7 +2440,7 @@ def _check(axes): gs.tight_layout(plt.gcf()) for ax in axes: - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 self._check_visible(ax.get_yticklabels(), visible=True) self._check_visible(ax.get_xticklabels(), visible=True) self._check_visible(ax.get_xticklabels(minor=True), visible=True) @@ -2494,7 +2490,7 @@ def test_df_subplots_patterns_minorticks(self): fig, axes = plt.subplots(2, 1, sharex=True) axes = df.plot(subplots=True, ax=axes) for ax in axes: - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 self._check_visible(ax.get_yticklabels(), visible=True) # xaxis of 1st ax must be hidden self._check_visible(axes[0].get_xticklabels(), visible=False) @@ -2507,7 +2503,7 @@ def test_df_subplots_patterns_minorticks(self): with tm.assert_produces_warning(UserWarning): axes = df.plot(subplots=True, ax=axes, sharex=True) for ax in axes: - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 self._check_visible(ax.get_yticklabels(), visible=True) # xaxis of 1st ax must be hidden self._check_visible(axes[0].get_xticklabels(), visible=False) @@ -2520,7 +2516,7 @@ def test_df_subplots_patterns_minorticks(self): fig, axes = plt.subplots(2, 1) axes = df.plot(subplots=True, ax=axes) for ax in axes: - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 self._check_visible(ax.get_yticklabels(), visible=True) self._check_visible(ax.get_xticklabels(), visible=True) self._check_visible(ax.get_xticklabels(minor=True), visible=True) @@ -2554,9 +2550,9 @@ def _get_horizontal_grid(): for ax1, ax2 in [_get_vertical_grid(), _get_horizontal_grid()]: ax1 = ts.plot(ax=ax1) - self.assertEqual(len(ax1.lines), 1) + assert len(ax1.lines) == 1 ax2 = df.plot(ax=ax2) - self.assertEqual(len(ax2.lines), 2) + assert len(ax2.lines) == 2 for ax in [ax1, ax2]: self._check_visible(ax.get_yticklabels(), visible=True) self._check_visible(ax.get_xticklabels(), visible=True) @@ -2567,8 +2563,8 @@ def _get_horizontal_grid(): # subplots=True for ax1, ax2 in [_get_vertical_grid(), _get_horizontal_grid()]: axes = df.plot(subplots=True, ax=[ax1, ax2]) - self.assertEqual(len(ax1.lines), 1) - self.assertEqual(len(ax2.lines), 1) + assert len(ax1.lines) == 1 + assert len(ax2.lines) == 1 for ax in axes: self._check_visible(ax.get_yticklabels(), visible=True) self._check_visible(ax.get_xticklabels(), visible=True) @@ -2581,8 +2577,8 @@ def _get_horizontal_grid(): with tm.assert_produces_warning(UserWarning): axes = df.plot(subplots=True, ax=[ax1, ax2], sharex=True, sharey=True) - self.assertEqual(len(axes[0].lines), 1) - self.assertEqual(len(axes[1].lines), 1) + assert len(axes[0].lines) == 1 + assert len(axes[1].lines) == 1 for ax in [ax1, ax2]: # yaxis are visible because there is only one column self._check_visible(ax.get_yticklabels(), visible=True) @@ -2598,8 +2594,8 @@ def _get_horizontal_grid(): with tm.assert_produces_warning(UserWarning): axes = df.plot(subplots=True, ax=[ax1, ax2], sharex=True, sharey=True) - self.assertEqual(len(axes[0].lines), 1) - self.assertEqual(len(axes[1].lines), 1) + assert len(axes[0].lines) == 1 + assert len(axes[1].lines) == 1 self._check_visible(axes[0].get_yticklabels(), visible=True) # yaxis of axes1 (right) are hidden self._check_visible(axes[1].get_yticklabels(), visible=False) @@ -2624,7 +2620,7 @@ def _get_boxed_grid(): index=ts.index, columns=list('ABCD')) axes = df.plot(subplots=True, ax=axes) for ax in axes: - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 # axis are visible because these are not shared self._check_visible(ax.get_yticklabels(), visible=True) self._check_visible(ax.get_xticklabels(), visible=True) @@ -2636,7 +2632,7 @@ def _get_boxed_grid(): with tm.assert_produces_warning(UserWarning): axes = df.plot(subplots=True, ax=axes, sharex=True, sharey=True) for ax in axes: - self.assertEqual(len(ax.lines), 1) + assert len(ax.lines) == 1 for ax in [axes[0], axes[2]]: # left column self._check_visible(ax.get_yticklabels(), visible=True) for ax in [axes[1], axes[3]]: # right column @@ -2710,8 +2706,7 @@ def test_passed_bar_colors(self): color_tuples = [(0.9, 0, 0, 1), (0, 0.9, 0, 1), (0, 0, 0.9, 1)] colormap = mpl.colors.ListedColormap(color_tuples) barplot = pd.DataFrame([[1, 2, 3]]).plot(kind="bar", cmap=colormap) - self.assertEqual(color_tuples, [c.get_facecolor() - for c in barplot.patches]) + assert color_tuples == [c.get_facecolor() for c in barplot.patches] def test_rcParams_bar_colors(self): import matplotlib as mpl @@ -2723,8 +2718,7 @@ def test_rcParams_bar_colors(self): except (AttributeError, KeyError): # mpl 1.4 with mpl.rc_context(rc={'axes.color_cycle': color_tuples}): barplot = pd.DataFrame([[1, 2, 3]]).plot(kind="bar") - self.assertEqual(color_tuples, [c.get_facecolor() - for c in barplot.patches]) + assert color_tuples == [c.get_facecolor() for c in barplot.patches] def _generate_4_axes_via_gridspec(): diff --git a/pandas/tests/plotting/test_groupby.py b/pandas/tests/plotting/test_groupby.py index 93efb3f994c38..121f2f9b75698 100644 --- a/pandas/tests/plotting/test_groupby.py +++ b/pandas/tests/plotting/test_groupby.py @@ -68,7 +68,7 @@ def test_plot_kwargs(self): res = df.groupby('z').plot(kind='scatter', x='x', y='y') # check that a scatter plot is effectively plotted: the axes should # contain a PathCollection from the scatter plot (GH11805) - self.assertEqual(len(res['a'].collections), 1) + assert len(res['a'].collections) == 1 res = df.groupby('z').plot.scatter(x='x', y='y') - self.assertEqual(len(res['a'].collections), 1) + assert len(res['a'].collections) == 1 diff --git a/pandas/tests/plotting/test_hist_method.py b/pandas/tests/plotting/test_hist_method.py index 7002321908ef0..39bab59242c22 100644 --- a/pandas/tests/plotting/test_hist_method.py +++ b/pandas/tests/plotting/test_hist_method.py @@ -54,7 +54,7 @@ def test_hist_legacy(self): def test_hist_bins_legacy(self): df = DataFrame(np.random.randn(10, 2)) ax = df.hist(bins=2)[0][0] - self.assertEqual(len(ax.patches), 2) + assert len(ax.patches) == 2 @slow def test_hist_layout(self): @@ -122,13 +122,13 @@ def test_hist_no_overlap(self): y.hist() fig = gcf() axes = fig.axes if self.mpl_ge_1_5_0 else fig.get_axes() - self.assertEqual(len(axes), 2) + assert len(axes) == 2 @slow def test_hist_by_no_extra_plots(self): df = self.hist_df axes = df.height.hist(by=df.gender) # noqa - self.assertEqual(len(self.plt.get_fignums()), 1) + assert len(self.plt.get_fignums()) == 1 @slow def test_plot_fails_when_ax_differs_from_figure(self): @@ -314,8 +314,8 @@ def test_grouped_hist_legacy2(self): 'gender': gender_int}) gb = df_int.groupby('gender') axes = gb.hist() - self.assertEqual(len(axes), 2) - self.assertEqual(len(self.plt.get_fignums()), 2) + assert len(axes) == 2 + assert len(self.plt.get_fignums()) == 2 tm.close() @slow diff --git a/pandas/tests/plotting/test_misc.py b/pandas/tests/plotting/test_misc.py index 9b8569e8680e4..3a9cb309db707 100644 --- a/pandas/tests/plotting/test_misc.py +++ b/pandas/tests/plotting/test_misc.py @@ -309,7 +309,7 @@ def test_subplot_titles(self): # Case len(title) == len(df) plot = df.plot(subplots=True, title=title) - self.assertEqual([p.get_title() for p in plot], title) + assert [p.get_title() for p in plot] == title # Case len(title) > len(df) pytest.raises(ValueError, df.plot, subplots=True, @@ -325,4 +325,4 @@ def test_subplot_titles(self): plot = df.drop('SepalWidth', axis=1).plot(subplots=True, layout=(2, 2), title=title[:-1]) title_list = [ax.get_title() for sublist in plot for ax in sublist] - self.assertEqual(title_list, title[:3] + ['']) + assert title_list == title[:3] + [''] diff --git a/pandas/tests/plotting/test_series.py b/pandas/tests/plotting/test_series.py index 8ae301a0b7b4c..d1325c7130d04 100644 --- a/pandas/tests/plotting/test_series.py +++ b/pandas/tests/plotting/test_series.py @@ -93,36 +93,36 @@ def test_dont_modify_rcParams(self): key = 'axes.color_cycle' colors = self.plt.rcParams[key] Series([1, 2, 3]).plot() - self.assertEqual(colors, self.plt.rcParams[key]) + assert colors == self.plt.rcParams[key] def test_ts_line_lim(self): ax = self.ts.plot() xmin, xmax = ax.get_xlim() lines = ax.get_lines() - self.assertEqual(xmin, lines[0].get_data(orig=False)[0][0]) - self.assertEqual(xmax, lines[0].get_data(orig=False)[0][-1]) + assert xmin == lines[0].get_data(orig=False)[0][0] + assert xmax == lines[0].get_data(orig=False)[0][-1] tm.close() ax = self.ts.plot(secondary_y=True) xmin, xmax = ax.get_xlim() lines = ax.get_lines() - self.assertEqual(xmin, lines[0].get_data(orig=False)[0][0]) - self.assertEqual(xmax, lines[0].get_data(orig=False)[0][-1]) + assert xmin == lines[0].get_data(orig=False)[0][0] + assert xmax == lines[0].get_data(orig=False)[0][-1] def test_ts_area_lim(self): ax = self.ts.plot.area(stacked=False) xmin, xmax = ax.get_xlim() line = ax.get_lines()[0].get_data(orig=False)[0] - self.assertEqual(xmin, line[0]) - self.assertEqual(xmax, line[-1]) + assert xmin == line[0] + assert xmax == line[-1] tm.close() # GH 7471 ax = self.ts.plot.area(stacked=False, x_compat=True) xmin, xmax = ax.get_xlim() line = ax.get_lines()[0].get_data(orig=False)[0] - self.assertEqual(xmin, line[0]) - self.assertEqual(xmax, line[-1]) + assert xmin == line[0] + assert xmax == line[-1] tm.close() tz_ts = self.ts.copy() @@ -130,15 +130,15 @@ def test_ts_area_lim(self): ax = tz_ts.plot.area(stacked=False, x_compat=True) xmin, xmax = ax.get_xlim() line = ax.get_lines()[0].get_data(orig=False)[0] - self.assertEqual(xmin, line[0]) - self.assertEqual(xmax, line[-1]) + assert xmin == line[0] + assert xmax == line[-1] tm.close() ax = tz_ts.plot.area(stacked=False, secondary_y=True) xmin, xmax = ax.get_xlim() line = ax.get_lines()[0].get_data(orig=False)[0] - self.assertEqual(xmin, line[0]) - self.assertEqual(xmax, line[-1]) + assert xmin == line[0] + assert xmax == line[-1] def test_label(self): s = Series([1, 2]) @@ -159,7 +159,7 @@ def test_label(self): self.plt.close() # Add lebel info, but don't draw ax = s.plot(legend=False, label='LABEL') - self.assertEqual(ax.get_legend(), None) # Hasn't been drawn + assert ax.get_legend() is None # Hasn't been drawn ax.legend() # draw it self._check_legend_labels(ax, labels=['LABEL']) @@ -190,10 +190,10 @@ def test_line_use_index_false(self): s.index.name = 'The Index' ax = s.plot(use_index=False) label = ax.get_xlabel() - self.assertEqual(label, '') + assert label == '' ax2 = s.plot.bar(use_index=False) label2 = ax2.get_xlabel() - self.assertEqual(label2, '') + assert label2 == '' @slow def test_bar_log(self): @@ -255,7 +255,7 @@ def test_irregular_datetime(self): ax = ser.plot() xp = datetime(1999, 1, 1).toordinal() ax.set_xlim('1/1/1999', '1/1/2001') - self.assertEqual(xp, ax.get_xlim()[0]) + assert xp == ax.get_xlim()[0] @slow def test_pie_series(self): @@ -265,7 +265,7 @@ def test_pie_series(self): index=['a', 'b', 'c', 'd', 'e'], name='YLABEL') ax = _check_plot_works(series.plot.pie) self._check_text_labels(ax.texts, series.index) - self.assertEqual(ax.get_ylabel(), 'YLABEL') + assert ax.get_ylabel() == 'YLABEL' # without wedge labels ax = _check_plot_works(series.plot.pie, labels=None) @@ -295,7 +295,7 @@ def test_pie_series(self): expected_texts = list(next(it) for it in itertools.cycle(iters)) self._check_text_labels(ax.texts, expected_texts) for t in ax.texts: - self.assertEqual(t.get_fontsize(), 7) + assert t.get_fontsize() == 7 # includes negative value with pytest.raises(ValueError): @@ -313,13 +313,13 @@ def test_pie_nan(self): ax = s.plot.pie(legend=True) expected = ['0', '', '2', '3'] result = [x.get_text() for x in ax.texts] - self.assertEqual(result, expected) + assert result == expected @slow def test_hist_df_kwargs(self): df = DataFrame(np.random.randn(10, 2)) ax = df.plot.hist(bins=5) - self.assertEqual(len(ax.patches), 10) + assert len(ax.patches) == 10 @slow def test_hist_df_with_nonnumerics(self): @@ -329,10 +329,10 @@ def test_hist_df_with_nonnumerics(self): np.random.randn(10, 4), columns=['A', 'B', 'C', 'D']) df['E'] = ['x', 'y'] * 5 ax = df.plot.hist(bins=5) - self.assertEqual(len(ax.patches), 20) + assert len(ax.patches) == 20 ax = df.plot.hist() # bins=10 - self.assertEqual(len(ax.patches), 40) + assert len(ax.patches) == 40 @slow def test_hist_legacy(self): @@ -364,7 +364,7 @@ def test_hist_legacy(self): def test_hist_bins_legacy(self): df = DataFrame(np.random.randn(10, 2)) ax = df.hist(bins=2)[0][0] - self.assertEqual(len(ax.patches), 2) + assert len(ax.patches) == 2 @slow def test_hist_layout(self): @@ -430,7 +430,7 @@ def test_hist_no_overlap(self): y.hist() fig = gcf() axes = fig.axes if self.mpl_ge_1_5_0 else fig.get_axes() - self.assertEqual(len(axes), 2) + assert len(axes) == 2 @slow def test_hist_secondary_legend(self): @@ -583,7 +583,7 @@ def test_kde_missing_vals(self): @slow def test_hist_kwargs(self): ax = self.ts.plot.hist(bins=5) - self.assertEqual(len(ax.patches), 5) + assert len(ax.patches) == 5 self._check_text_labels(ax.yaxis.get_label(), 'Frequency') tm.close() @@ -599,7 +599,7 @@ def test_hist_kwargs(self): def test_hist_kde_color(self): ax = self.ts.plot.hist(logy=True, bins=10, color='b') self._check_ax_scales(ax, yaxis='log') - self.assertEqual(len(ax.patches), 10) + assert len(ax.patches) == 10 self._check_colors(ax.patches, facecolors=['b'] * 10) tm._skip_if_no_scipy() @@ -607,7 +607,7 @@ def test_hist_kde_color(self): ax = self.ts.plot.kde(logy=True, color='r') self._check_ax_scales(ax, yaxis='log') lines = ax.get_lines() - self.assertEqual(len(lines), 1) + assert len(lines) == 1 self._check_colors(lines, ['r']) @slow @@ -729,16 +729,16 @@ def test_standard_colors(self): for c in ['r', 'red', 'green', '#FF0000']: result = _get_standard_colors(1, color=c) - self.assertEqual(result, [c]) + assert result == [c] result = _get_standard_colors(1, color=[c]) - self.assertEqual(result, [c]) + assert result == [c] result = _get_standard_colors(3, color=c) - self.assertEqual(result, [c] * 3) + assert result == [c] * 3 result = _get_standard_colors(3, color=[c]) - self.assertEqual(result, [c] * 3) + assert result == [c] * 3 @slow def test_standard_colors_all(self): @@ -748,30 +748,30 @@ def test_standard_colors_all(self): # multiple colors like mediumaquamarine for c in colors.cnames: result = _get_standard_colors(num_colors=1, color=c) - self.assertEqual(result, [c]) + assert result == [c] result = _get_standard_colors(num_colors=1, color=[c]) - self.assertEqual(result, [c]) + assert result == [c] result = _get_standard_colors(num_colors=3, color=c) - self.assertEqual(result, [c] * 3) + assert result == [c] * 3 result = _get_standard_colors(num_colors=3, color=[c]) - self.assertEqual(result, [c] * 3) + assert result == [c] * 3 # single letter colors like k for c in colors.ColorConverter.colors: result = _get_standard_colors(num_colors=1, color=c) - self.assertEqual(result, [c]) + assert result == [c] result = _get_standard_colors(num_colors=1, color=[c]) - self.assertEqual(result, [c]) + assert result == [c] result = _get_standard_colors(num_colors=3, color=c) - self.assertEqual(result, [c] * 3) + assert result == [c] * 3 result = _get_standard_colors(num_colors=3, color=[c]) - self.assertEqual(result, [c] * 3) + assert result == [c] * 3 def test_series_plot_color_kwargs(self): # GH1890 diff --git a/pandas/tests/reshape/test_concat.py b/pandas/tests/reshape/test_concat.py index 9854245cf1abd..2d4d0a09060de 100644 --- a/pandas/tests/reshape/test_concat.py +++ b/pandas/tests/reshape/test_concat.py @@ -65,14 +65,14 @@ def _check_expected_dtype(self, obj, label): """ if isinstance(obj, pd.Index): if label == 'bool': - self.assertEqual(obj.dtype, 'object') + assert obj.dtype == 'object' else: - self.assertEqual(obj.dtype, label) + assert obj.dtype == label elif isinstance(obj, pd.Series): if label.startswith('period'): - self.assertEqual(obj.dtype, 'object') + assert obj.dtype == 'object' else: - self.assertEqual(obj.dtype, label) + assert obj.dtype == label else: raise ValueError @@ -814,7 +814,7 @@ def test_append_preserve_index_name(self): df2 = df2.set_index(['A']) result = df1.append(df2) - self.assertEqual(result.index.name, 'A') + assert result.index.name == 'A' def test_append_dtype_coerce(self): @@ -849,8 +849,8 @@ def test_append_missing_column_proper_upcast(self): dtype=bool)}) appended = df1.append(df2, ignore_index=True) - self.assertEqual(appended['A'].dtype, 'f8') - self.assertEqual(appended['B'].dtype, 'O') + assert appended['A'].dtype == 'f8' + assert appended['B'].dtype == 'O' class TestConcatenate(ConcatenateBase): @@ -934,7 +934,7 @@ def test_concat_keys_specific_levels(self): tm.assert_index_equal(result.columns.levels[0], Index(level, name='group_key')) - self.assertEqual(result.columns.names[0], 'group_key') + assert result.columns.names[0] == 'group_key' def test_concat_dataframe_keys_bug(self): t1 = DataFrame({ @@ -945,8 +945,7 @@ def test_concat_dataframe_keys_bug(self): # it works result = concat([t1, t2], axis=1, keys=['t1', 't2']) - self.assertEqual(list(result.columns), [('t1', 'value'), - ('t2', 'value')]) + assert list(result.columns) == [('t1', 'value'), ('t2', 'value')] def test_concat_series_partial_columns_names(self): # GH10698 @@ -1020,10 +1019,10 @@ def test_concat_multiindex_with_keys(self): columns=Index(['A', 'B', 'C'], name='exp')) result = concat([frame, frame], keys=[0, 1], names=['iteration']) - self.assertEqual(result.index.names, ('iteration',) + index.names) + assert result.index.names == ('iteration',) + index.names tm.assert_frame_equal(result.loc[0], frame) tm.assert_frame_equal(result.loc[1], frame) - self.assertEqual(result.index.nlevels, 3) + assert result.index.nlevels == 3 def test_concat_multiindex_with_tz(self): # GH 6606 @@ -1088,22 +1087,21 @@ def test_concat_keys_and_levels(self): names=names + [None]) expected.index = exp_index - assert_frame_equal(result, expected) + tm.assert_frame_equal(result, expected) # no names - result = concat([df, df2, df, df2], keys=[('foo', 'one'), ('foo', 'two'), ('baz', 'one'), ('baz', 'two')], levels=levels) - self.assertEqual(result.index.names, (None,) * 3) + assert result.index.names == (None,) * 3 # no levels result = concat([df, df2, df, df2], keys=[('foo', 'one'), ('foo', 'two'), ('baz', 'one'), ('baz', 'two')], names=['first', 'second']) - self.assertEqual(result.index.names, ('first', 'second') + (None,)) + assert result.index.names == ('first', 'second') + (None,) tm.assert_index_equal(result.index.levels[0], Index(['baz', 'foo'], name='first')) @@ -1135,7 +1133,7 @@ def test_concat_rename_index(self): exp.index.set_names(names, inplace=True) tm.assert_frame_equal(result, exp) - self.assertEqual(result.index.names, exp.index.names) + assert result.index.names == exp.index.names def test_crossed_dtypes_weird_corner(self): columns = ['A', 'B', 'C', 'D'] @@ -1160,7 +1158,7 @@ def test_crossed_dtypes_weird_corner(self): df2 = DataFrame(np.random.randn(1, 4), index=['b']) result = concat( [df, df2], keys=['one', 'two'], names=['first', 'second']) - self.assertEqual(result.index.names, ('first', 'second')) + assert result.index.names == ('first', 'second') def test_dups_index(self): # GH 4771 @@ -1442,7 +1440,7 @@ def test_concat_series(self): result = concat(pieces) tm.assert_series_equal(result, ts) - self.assertEqual(result.name, ts.name) + assert result.name == ts.name result = concat(pieces, keys=[0, 1, 2]) expected = ts.copy() @@ -1549,7 +1547,7 @@ def test_concat_bug_1719(self): left = concat([ts1, ts2], join='outer', axis=1) right = concat([ts2, ts1], join='outer', axis=1) - self.assertEqual(len(left), len(right)) + assert len(left) == len(right) def test_concat_bug_2972(self): ts0 = Series(np.zeros(5)) @@ -1706,8 +1704,7 @@ def test_concat_tz_frame(self): assert_frame_equal(df2, df3) def test_concat_tz_series(self): - # GH 11755 - # tz and no tz + # gh-11755: tz and no tz x = Series(date_range('20151124 08:00', '20151124 09:00', freq='1h', tz='UTC')) @@ -1717,8 +1714,7 @@ def test_concat_tz_series(self): result = concat([x, y], ignore_index=True) tm.assert_series_equal(result, expected) - # GH 11887 - # concat tz and object + # gh-11887: concat tz and object x = Series(date_range('20151124 08:00', '20151124 09:00', freq='1h', tz='UTC')) @@ -1728,10 +1724,8 @@ def test_concat_tz_series(self): result = concat([x, y], ignore_index=True) tm.assert_series_equal(result, expected) - # 12217 - # 12306 fixed I think - - # Concat'ing two UTC times + # see gh-12217 and gh-12306 + # Concatenating two UTC times first = pd.DataFrame([[datetime(2016, 1, 1)]]) first[0] = first[0].dt.tz_localize('UTC') @@ -1739,9 +1733,9 @@ def test_concat_tz_series(self): second[0] = second[0].dt.tz_localize('UTC') result = pd.concat([first, second]) - self.assertEqual(result[0].dtype, 'datetime64[ns, UTC]') + assert result[0].dtype == 'datetime64[ns, UTC]' - # Concat'ing two London times + # Concatenating two London times first = pd.DataFrame([[datetime(2016, 1, 1)]]) first[0] = first[0].dt.tz_localize('Europe/London') @@ -1749,9 +1743,9 @@ def test_concat_tz_series(self): second[0] = second[0].dt.tz_localize('Europe/London') result = pd.concat([first, second]) - self.assertEqual(result[0].dtype, 'datetime64[ns, Europe/London]') + assert result[0].dtype == 'datetime64[ns, Europe/London]' - # Concat'ing 2+1 London times + # Concatenating 2+1 London times first = pd.DataFrame([[datetime(2016, 1, 1)], [datetime(2016, 1, 2)]]) first[0] = first[0].dt.tz_localize('Europe/London') @@ -1759,7 +1753,7 @@ def test_concat_tz_series(self): second[0] = second[0].dt.tz_localize('Europe/London') result = pd.concat([first, second]) - self.assertEqual(result[0].dtype, 'datetime64[ns, Europe/London]') + assert result[0].dtype == 'datetime64[ns, Europe/London]' # Concat'ing 1+2 London times first = pd.DataFrame([[datetime(2016, 1, 1)]]) @@ -1769,11 +1763,10 @@ def test_concat_tz_series(self): second[0] = second[0].dt.tz_localize('Europe/London') result = pd.concat([first, second]) - self.assertEqual(result[0].dtype, 'datetime64[ns, Europe/London]') + assert result[0].dtype == 'datetime64[ns, Europe/London]' def test_concat_tz_series_with_datetimelike(self): - # GH 12620 - # tz and timedelta + # see gh-12620: tz and timedelta x = [pd.Timestamp('2011-01-01', tz='US/Eastern'), pd.Timestamp('2011-02-01', tz='US/Eastern')] y = [pd.Timedelta('1 day'), pd.Timedelta('2 day')] @@ -1786,16 +1779,18 @@ def test_concat_tz_series_with_datetimelike(self): tm.assert_series_equal(result, pd.Series(x + y, dtype='object')) def test_concat_tz_series_tzlocal(self): - # GH 13583 + # see gh-13583 tm._skip_if_no_dateutil() import dateutil + x = [pd.Timestamp('2011-01-01', tz=dateutil.tz.tzlocal()), pd.Timestamp('2011-02-01', tz=dateutil.tz.tzlocal())] y = [pd.Timestamp('2012-01-01', tz=dateutil.tz.tzlocal()), pd.Timestamp('2012-02-01', tz=dateutil.tz.tzlocal())] + result = concat([pd.Series(x), pd.Series(y)], ignore_index=True) tm.assert_series_equal(result, pd.Series(x + y)) - self.assertEqual(result.dtype, 'datetime64[ns, tzlocal()]') + assert result.dtype == 'datetime64[ns, tzlocal()]' def test_concat_period_series(self): x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) @@ -1803,7 +1798,7 @@ def test_concat_period_series(self): expected = Series([x[0], x[1], y[0], y[1]], dtype='object') result = concat([x, y], ignore_index=True) tm.assert_series_equal(result, expected) - self.assertEqual(result.dtype, 'object') + assert result.dtype == 'object' # different freq x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) @@ -1811,14 +1806,14 @@ def test_concat_period_series(self): expected = Series([x[0], x[1], y[0], y[1]], dtype='object') result = concat([x, y], ignore_index=True) tm.assert_series_equal(result, expected) - self.assertEqual(result.dtype, 'object') + assert result.dtype == 'object' x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) y = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='M')) expected = Series([x[0], x[1], y[0], y[1]], dtype='object') result = concat([x, y], ignore_index=True) tm.assert_series_equal(result, expected) - self.assertEqual(result.dtype, 'object') + assert result.dtype == 'object' # non-period x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) @@ -1826,14 +1821,14 @@ def test_concat_period_series(self): expected = Series([x[0], x[1], y[0], y[1]], dtype='object') result = concat([x, y], ignore_index=True) tm.assert_series_equal(result, expected) - self.assertEqual(result.dtype, 'object') + assert result.dtype == 'object' x = Series(pd.PeriodIndex(['2015-11-01', '2015-12-01'], freq='D')) y = Series(['A', 'B']) expected = Series([x[0], x[1], y[0], y[1]], dtype='object') result = concat([x, y], ignore_index=True) tm.assert_series_equal(result, expected) - self.assertEqual(result.dtype, 'object') + assert result.dtype == 'object' def test_concat_empty_series(self): # GH 11082 diff --git a/pandas/tests/reshape/test_hashing.py b/pandas/tests/reshape/test_hashing.py index f19f6b1374978..85807da33e38d 100644 --- a/pandas/tests/reshape/test_hashing.py +++ b/pandas/tests/reshape/test_hashing.py @@ -76,7 +76,7 @@ def test_hash_tuples(self): tm.assert_numpy_array_equal(result, expected) result = hash_tuples(tups[0]) - self.assertEqual(result, expected[0]) + assert result == expected[0] def test_hash_tuples_err(self): diff --git a/pandas/tests/reshape/test_join.py b/pandas/tests/reshape/test_join.py index 1da187788e99d..cda343175fd0a 100644 --- a/pandas/tests/reshape/test_join.py +++ b/pandas/tests/reshape/test_join.py @@ -257,7 +257,7 @@ def test_join_with_len0(self): merged2 = self.target.join(self.source.reindex([]), on='C', how='inner') tm.assert_index_equal(merged2.columns, merged.columns) - self.assertEqual(len(merged2), 0) + assert len(merged2) == 0 def test_join_on_inner(self): df = DataFrame({'key': ['a', 'a', 'd', 'b', 'b', 'c']}) @@ -301,8 +301,8 @@ def test_join_index_mixed(self): df1 = DataFrame({'A': 1., 'B': 2, 'C': 'foo', 'D': True}, index=np.arange(10), columns=['A', 'B', 'C', 'D']) - self.assertEqual(df1['B'].dtype, np.int64) - self.assertEqual(df1['D'].dtype, np.bool_) + assert df1['B'].dtype == np.int64 + assert df1['D'].dtype == np.bool_ df2 = DataFrame({'A': 1., 'B': 2, 'C': 'foo', 'D': True}, index=np.arange(0, 10, 2), @@ -374,7 +374,7 @@ def test_join_multiindex(self): expected = df1.reindex(ex_index).join(df2.reindex(ex_index)) expected.index.names = index1.names assert_frame_equal(joined, expected) - self.assertEqual(joined.index.names, index1.names) + assert joined.index.names == index1.names df1 = df1.sort_index(level=1) df2 = df2.sort_index(level=1) @@ -385,7 +385,7 @@ def test_join_multiindex(self): expected.index.names = index1.names assert_frame_equal(joined, expected) - self.assertEqual(joined.index.names, index1.names) + assert joined.index.names == index1.names def test_join_inner_multiindex(self): key1 = ['bar', 'bar', 'bar', 'foo', 'foo', 'baz', 'baz', 'qux', @@ -445,9 +445,9 @@ def test_join_float64_float32(self): a = DataFrame(randn(10, 2), columns=['a', 'b'], dtype=np.float64) b = DataFrame(randn(10, 1), columns=['c'], dtype=np.float32) joined = a.join(b) - self.assertEqual(joined.dtypes['a'], 'float64') - self.assertEqual(joined.dtypes['b'], 'float64') - self.assertEqual(joined.dtypes['c'], 'float32') + assert joined.dtypes['a'] == 'float64' + assert joined.dtypes['b'] == 'float64' + assert joined.dtypes['c'] == 'float32' a = np.random.randint(0, 5, 100).astype('int64') b = np.random.random(100).astype('float64') @@ -456,10 +456,10 @@ def test_join_float64_float32(self): xpdf = DataFrame({'a': a, 'b': b, 'c': c}) s = DataFrame(np.random.random(5).astype('float32'), columns=['md']) rs = df.merge(s, left_on='a', right_index=True) - self.assertEqual(rs.dtypes['a'], 'int64') - self.assertEqual(rs.dtypes['b'], 'float64') - self.assertEqual(rs.dtypes['c'], 'float32') - self.assertEqual(rs.dtypes['md'], 'float32') + assert rs.dtypes['a'] == 'int64' + assert rs.dtypes['b'] == 'float64' + assert rs.dtypes['c'] == 'float32' + assert rs.dtypes['md'] == 'float32' xp = xpdf.merge(s, left_on='a', right_index=True) assert_frame_equal(rs, xp) diff --git a/pandas/tests/reshape/test_merge.py b/pandas/tests/reshape/test_merge.py index 86580e5a84d92..db0e4631381f1 100644 --- a/pandas/tests/reshape/test_merge.py +++ b/pandas/tests/reshape/test_merge.py @@ -127,7 +127,7 @@ def test_index_and_on_parameters_confusion(self): def test_merge_overlap(self): merged = merge(self.left, self.left, on='key') exp_len = (self.left['key'].value_counts() ** 2).sum() - self.assertEqual(len(merged), exp_len) + assert len(merged) == exp_len assert 'v1_x' in merged assert 'v1_y' in merged @@ -202,7 +202,7 @@ def test_merge_join_key_dtype_cast(self): df1 = DataFrame({'key': [1], 'v1': [10]}) df2 = DataFrame({'key': [2], 'v1': [20]}) df = merge(df1, df2, how='outer') - self.assertEqual(df['key'].dtype, 'int64') + assert df['key'].dtype == 'int64' df1 = DataFrame({'key': [True], 'v1': [1]}) df2 = DataFrame({'key': [False], 'v1': [0]}) @@ -210,14 +210,14 @@ def test_merge_join_key_dtype_cast(self): # GH13169 # this really should be bool - self.assertEqual(df['key'].dtype, 'object') + assert df['key'].dtype == 'object' df1 = DataFrame({'val': [1]}) df2 = DataFrame({'val': [2]}) lkey = np.array([1]) rkey = np.array([2]) df = merge(df1, df2, left_on=lkey, right_on=rkey, how='outer') - self.assertEqual(df['key_0'].dtype, 'int64') + assert df['key_0'].dtype == 'int64' def test_handle_join_key_pass_array(self): left = DataFrame({'key': [1, 1, 2, 2, 3], @@ -499,7 +499,7 @@ def test_other_datetime_unit(self): df2 = s.astype(dtype).to_frame('days') # coerces to datetime64[ns], thus sholuld not be affected - self.assertEqual(df2['days'].dtype, 'datetime64[ns]') + assert df2['days'].dtype == 'datetime64[ns]' result = df1.merge(df2, left_on='entity_id', right_index=True) @@ -519,7 +519,7 @@ def test_other_timedelta_unit(self): 'timedelta64[ns]']: df2 = s.astype(dtype).to_frame('days') - self.assertEqual(df2['days'].dtype, dtype) + assert df2['days'].dtype == dtype result = df1.merge(df2, left_on='entity_id', right_index=True) @@ -582,8 +582,8 @@ def test_merge_on_datetime64tz(self): 'key': [1, 2, 3]}) result = pd.merge(left, right, on='key', how='outer') assert_frame_equal(result, expected) - self.assertEqual(result['value_x'].dtype, 'datetime64[ns, US/Eastern]') - self.assertEqual(result['value_y'].dtype, 'datetime64[ns, US/Eastern]') + assert result['value_x'].dtype == 'datetime64[ns, US/Eastern]' + assert result['value_y'].dtype == 'datetime64[ns, US/Eastern]' def test_merge_on_periods(self): left = pd.DataFrame({'key': pd.period_range('20151010', periods=2, @@ -614,8 +614,8 @@ def test_merge_on_periods(self): 'key': [1, 2, 3]}) result = pd.merge(left, right, on='key', how='outer') assert_frame_equal(result, expected) - self.assertEqual(result['value_x'].dtype, 'object') - self.assertEqual(result['value_y'].dtype, 'object') + assert result['value_x'].dtype == 'object' + assert result['value_y'].dtype == 'object' def test_indicator(self): # PR #10054. xref #7412 and closes #8790. diff --git a/pandas/tests/reshape/test_pivot.py b/pandas/tests/reshape/test_pivot.py index 3b3b4fe247b72..df679966e0002 100644 --- a/pandas/tests/reshape/test_pivot.py +++ b/pandas/tests/reshape/test_pivot.py @@ -45,14 +45,14 @@ def test_pivot_table(self): pivot_table(self.data, values='D', index=index) if len(index) > 1: - self.assertEqual(table.index.names, tuple(index)) + assert table.index.names == tuple(index) else: - self.assertEqual(table.index.name, index[0]) + assert table.index.name == index[0] if len(columns) > 1: - self.assertEqual(table.columns.names, columns) + assert table.columns.names == columns else: - self.assertEqual(table.columns.name, columns[0]) + assert table.columns.name == columns[0] expected = self.data.groupby( index + [columns])['D'].agg(np.mean).unstack() @@ -148,7 +148,7 @@ def test_pivot_dtypes(self): # can convert dtypes f = DataFrame({'a': ['cat', 'bat', 'cat', 'bat'], 'v': [ 1, 2, 3, 4], 'i': ['a', 'b', 'a', 'b']}) - self.assertEqual(f.dtypes['v'], 'int64') + assert f.dtypes['v'] == 'int64' z = pivot_table(f, values='v', index=['a'], columns=[ 'i'], fill_value=0, aggfunc=np.sum) @@ -159,7 +159,7 @@ def test_pivot_dtypes(self): # cannot convert dtypes f = DataFrame({'a': ['cat', 'bat', 'cat', 'bat'], 'v': [ 1.5, 2.5, 3.5, 4.5], 'i': ['a', 'b', 'a', 'b']}) - self.assertEqual(f.dtypes['v'], 'float64') + assert f.dtypes['v'] == 'float64' z = pivot_table(f, values='v', index=['a'], columns=[ 'i'], fill_value=0, aggfunc=np.mean) @@ -249,10 +249,10 @@ def test_pivot_index_with_nan(self): df.loc[1, 'b'] = df.loc[4, 'b'] = nan pv = df.pivot('a', 'b', 'c') - self.assertEqual(pv.notnull().values.sum(), len(df)) + assert pv.notnull().values.sum() == len(df) for _, row in df.iterrows(): - self.assertEqual(pv.loc[row['a'], row['b']], row['c']) + assert pv.loc[row['a'], row['b']] == row['c'] tm.assert_frame_equal(df.pivot('b', 'a', 'c'), pv.T) @@ -341,7 +341,7 @@ def _check_output(result, values_col, index=['A', 'B'], expected_col_margins = self.data.groupby(index)[values_col].mean() tm.assert_series_equal(col_margins, expected_col_margins, check_names=False) - self.assertEqual(col_margins.name, margins_col) + assert col_margins.name == margins_col result = result.sort_index() index_margins = result.loc[(margins_col, '')].iloc[:-1] @@ -349,11 +349,11 @@ def _check_output(result, values_col, index=['A', 'B'], expected_ix_margins = self.data.groupby(columns)[values_col].mean() tm.assert_series_equal(index_margins, expected_ix_margins, check_names=False) - self.assertEqual(index_margins.name, (margins_col, '')) + assert index_margins.name == (margins_col, '') grand_total_margins = result.loc[(margins_col, ''), margins_col] expected_total_margins = self.data[values_col].mean() - self.assertEqual(grand_total_margins, expected_total_margins) + assert grand_total_margins == expected_total_margins # column specified result = self.data.pivot_table(values='D', index=['A', 'B'], @@ -382,7 +382,7 @@ def _check_output(result, values_col, index=['A', 'B'], aggfunc=np.mean) for value_col in table.columns: totals = table.loc[('All', ''), value_col] - self.assertEqual(totals, self.data[value_col].mean()) + assert totals == self.data[value_col].mean() # no rows rtable = self.data.pivot_table(columns=['AA', 'BB'], margins=True, @@ -393,7 +393,7 @@ def _check_output(result, values_col, index=['A', 'B'], aggfunc='mean') for item in ['DD', 'EE', 'FF']: totals = table.loc[('All', ''), item] - self.assertEqual(totals, self.data[item].mean()) + assert totals == self.data[item].mean() # issue number #8349: pivot_table with margins and dictionary aggfunc data = [ @@ -528,21 +528,21 @@ def test_margins_no_values_no_cols(self): result = self.data[['A', 'B']].pivot_table( index=['A', 'B'], aggfunc=len, margins=True) result_list = result.tolist() - self.assertEqual(sum(result_list[:-1]), result_list[-1]) + assert sum(result_list[:-1]) == result_list[-1] def test_margins_no_values_two_rows(self): # Regression test on pivot table: no values passed but rows are a # multi-index result = self.data[['A', 'B', 'C']].pivot_table( index=['A', 'B'], columns='C', aggfunc=len, margins=True) - self.assertEqual(result.All.tolist(), [3.0, 1.0, 4.0, 3.0, 11.0]) + assert result.All.tolist() == [3.0, 1.0, 4.0, 3.0, 11.0] def test_margins_no_values_one_row_one_col(self): # Regression test on pivot table: no values passed but row and col # defined result = self.data[['A', 'B']].pivot_table( index='A', columns='B', aggfunc=len, margins=True) - self.assertEqual(result.All.tolist(), [4.0, 7.0, 11.0]) + assert result.All.tolist() == [4.0, 7.0, 11.0] def test_margins_no_values_two_row_two_cols(self): # Regression test on pivot table: no values passed but rows and cols @@ -551,10 +551,10 @@ def test_margins_no_values_two_row_two_cols(self): 'e', 'f', 'g', 'h', 'i', 'j', 'k'] result = self.data[['A', 'B', 'C', 'D']].pivot_table( index=['A', 'B'], columns=['C', 'D'], aggfunc=len, margins=True) - self.assertEqual(result.All.tolist(), [3.0, 1.0, 4.0, 3.0, 11.0]) + assert result.All.tolist() == [3.0, 1.0, 4.0, 3.0, 11.0] def test_pivot_table_with_margins_set_margin_name(self): - # GH 3335 + # see gh-3335 for margin_name in ['foo', 'one', 666, None, ['a', 'b']]: with pytest.raises(ValueError): # multi-index index @@ -1037,8 +1037,8 @@ def test_crosstab_ndarray(self): # assign arbitrary names result = crosstab(self.df['A'].values, self.df['C'].values) - self.assertEqual(result.index.name, 'row_0') - self.assertEqual(result.columns.name, 'col_0') + assert result.index.name == 'row_0' + assert result.columns.name == 'col_0' def test_crosstab_margins(self): a = np.random.randint(0, 7, size=100) @@ -1050,8 +1050,8 @@ def test_crosstab_margins(self): result = crosstab(a, [b, c], rownames=['a'], colnames=('b', 'c'), margins=True) - self.assertEqual(result.index.names, ('a',)) - self.assertEqual(result.columns.names, ['b', 'c']) + assert result.index.names == ('a',) + assert result.columns.names == ['b', 'c'] all_cols = result['All', ''] exp_cols = df.groupby(['a']).size().astype('i8') @@ -1420,7 +1420,7 @@ def test_daily(self): result = annual[i].dropna() tm.assert_series_equal(result, subset, check_names=False) - self.assertEqual(result.name, i) + assert result.name == i # check leap days leaps = ts[(ts.index.month == 2) & (ts.index.day == 29)] @@ -1453,7 +1453,7 @@ def test_hourly(self): result = annual[i].dropna() tm.assert_series_equal(result, subset, check_names=False) - self.assertEqual(result.name, i) + assert result.name == i leaps = ts_hourly[(ts_hourly.index.month == 2) & ( ts_hourly.index.day == 29) & (ts_hourly.index.hour == 0)] @@ -1478,7 +1478,7 @@ def test_monthly(self): subset.index = [x.year for x in subset.index] result = annual[i].dropna() tm.assert_series_equal(result, subset, check_names=False) - self.assertEqual(result.name, i) + assert result.name == i def test_period_monthly(self): pass diff --git a/pandas/tests/reshape/test_reshape.py b/pandas/tests/reshape/test_reshape.py index 87f16cfaf31ec..87cd0637f1125 100644 --- a/pandas/tests/reshape/test_reshape.py +++ b/pandas/tests/reshape/test_reshape.py @@ -35,7 +35,7 @@ def setUp(self): def test_top_level_method(self): result = melt(self.df) - self.assertEqual(result.columns.tolist(), ['variable', 'value']) + assert result.columns.tolist() == ['variable', 'value'] def test_method_signatures(self): tm.assert_frame_equal(self.df.melt(), @@ -58,19 +58,17 @@ def test_method_signatures(self): def test_default_col_names(self): result = self.df.melt() - self.assertEqual(result.columns.tolist(), ['variable', 'value']) + assert result.columns.tolist() == ['variable', 'value'] result1 = self.df.melt(id_vars=['id1']) - self.assertEqual(result1.columns.tolist(), ['id1', 'variable', 'value' - ]) + assert result1.columns.tolist() == ['id1', 'variable', 'value'] result2 = self.df.melt(id_vars=['id1', 'id2']) - self.assertEqual(result2.columns.tolist(), ['id1', 'id2', 'variable', - 'value']) + assert result2.columns.tolist() == ['id1', 'id2', 'variable', 'value'] def test_value_vars(self): result3 = self.df.melt(id_vars=['id1', 'id2'], value_vars='A') - self.assertEqual(len(result3), 10) + assert len(result3) == 10 result4 = self.df.melt(id_vars=['id1', 'id2'], value_vars=['A', 'B']) expected4 = DataFrame({'id1': self.df['id1'].tolist() * 2, @@ -122,19 +120,17 @@ def test_tuple_vars_fail_with_multiindex(self): def test_custom_var_name(self): result5 = self.df.melt(var_name=self.var_name) - self.assertEqual(result5.columns.tolist(), ['var', 'value']) + assert result5.columns.tolist() == ['var', 'value'] result6 = self.df.melt(id_vars=['id1'], var_name=self.var_name) - self.assertEqual(result6.columns.tolist(), ['id1', 'var', 'value']) + assert result6.columns.tolist() == ['id1', 'var', 'value'] result7 = self.df.melt(id_vars=['id1', 'id2'], var_name=self.var_name) - self.assertEqual(result7.columns.tolist(), ['id1', 'id2', 'var', - 'value']) + assert result7.columns.tolist() == ['id1', 'id2', 'var', 'value'] result8 = self.df.melt(id_vars=['id1', 'id2'], value_vars='A', var_name=self.var_name) - self.assertEqual(result8.columns.tolist(), ['id1', 'id2', 'var', - 'value']) + assert result8.columns.tolist() == ['id1', 'id2', 'var', 'value'] result9 = self.df.melt(id_vars=['id1', 'id2'], value_vars=['A', 'B'], var_name=self.var_name) @@ -148,20 +144,18 @@ def test_custom_var_name(self): def test_custom_value_name(self): result10 = self.df.melt(value_name=self.value_name) - self.assertEqual(result10.columns.tolist(), ['variable', 'val']) + assert result10.columns.tolist() == ['variable', 'val'] result11 = self.df.melt(id_vars=['id1'], value_name=self.value_name) - self.assertEqual(result11.columns.tolist(), ['id1', 'variable', 'val']) + assert result11.columns.tolist() == ['id1', 'variable', 'val'] result12 = self.df.melt(id_vars=['id1', 'id2'], value_name=self.value_name) - self.assertEqual(result12.columns.tolist(), ['id1', 'id2', 'variable', - 'val']) + assert result12.columns.tolist() == ['id1', 'id2', 'variable', 'val'] result13 = self.df.melt(id_vars=['id1', 'id2'], value_vars='A', value_name=self.value_name) - self.assertEqual(result13.columns.tolist(), ['id1', 'id2', 'variable', - 'val']) + assert result13.columns.tolist() == ['id1', 'id2', 'variable', 'val'] result14 = self.df.melt(id_vars=['id1', 'id2'], value_vars=['A', 'B'], value_name=self.value_name) @@ -178,23 +172,21 @@ def test_custom_var_and_value_name(self): result15 = self.df.melt(var_name=self.var_name, value_name=self.value_name) - self.assertEqual(result15.columns.tolist(), ['var', 'val']) + assert result15.columns.tolist() == ['var', 'val'] result16 = self.df.melt(id_vars=['id1'], var_name=self.var_name, value_name=self.value_name) - self.assertEqual(result16.columns.tolist(), ['id1', 'var', 'val']) + assert result16.columns.tolist() == ['id1', 'var', 'val'] result17 = self.df.melt(id_vars=['id1', 'id2'], var_name=self.var_name, value_name=self.value_name) - self.assertEqual(result17.columns.tolist(), ['id1', 'id2', 'var', 'val' - ]) + assert result17.columns.tolist() == ['id1', 'id2', 'var', 'val'] result18 = self.df.melt(id_vars=['id1', 'id2'], value_vars='A', var_name=self.var_name, value_name=self.value_name) - self.assertEqual(result18.columns.tolist(), ['id1', 'id2', 'var', 'val' - ]) + assert result18.columns.tolist() == ['id1', 'id2', 'var', 'val'] result19 = self.df.melt(id_vars=['id1', 'id2'], value_vars=['A', 'B'], var_name=self.var_name, @@ -211,17 +203,17 @@ def test_custom_var_and_value_name(self): df20 = self.df.copy() df20.columns.name = 'foo' result20 = df20.melt() - self.assertEqual(result20.columns.tolist(), ['foo', 'value']) + assert result20.columns.tolist() == ['foo', 'value'] def test_col_level(self): res1 = self.df1.melt(col_level=0) res2 = self.df1.melt(col_level='CAP') - self.assertEqual(res1.columns.tolist(), ['CAP', 'value']) - self.assertEqual(res2.columns.tolist(), ['CAP', 'value']) + assert res1.columns.tolist() == ['CAP', 'value'] + assert res2.columns.tolist() == ['CAP', 'value'] def test_multiindex(self): res = self.df1.melt() - self.assertEqual(res.columns.tolist(), ['CAP', 'low', 'value']) + assert res.columns.tolist() == ['CAP', 'low', 'value'] class TestGetDummies(tm.TestCase): @@ -298,13 +290,13 @@ def test_just_na(self): res_series_index = get_dummies(just_na_series_index, sparse=self.sparse) - self.assertEqual(res_list.empty, True) - self.assertEqual(res_series.empty, True) - self.assertEqual(res_series_index.empty, True) + assert res_list.empty + assert res_series.empty + assert res_series_index.empty - self.assertEqual(res_list.index.tolist(), [0]) - self.assertEqual(res_series.index.tolist(), [0]) - self.assertEqual(res_series_index.index.tolist(), ['A']) + assert res_list.index.tolist() == [0] + assert res_series.index.tolist() == [0] + assert res_series_index.index.tolist() == ['A'] def test_include_na(self): s = ['a', 'b', np.nan] @@ -784,7 +776,7 @@ def test_stubs(self): # TODO: unused? df_long = pd.wide_to_long(df, stubs, i='id', j='age') # noqa - self.assertEqual(stubs, ['inc', 'edu']) + assert stubs == ['inc', 'edu'] def test_separating_character(self): # GH14779 diff --git a/pandas/tests/reshape/test_tile.py b/pandas/tests/reshape/test_tile.py index 923615c93d98b..2291030a2735c 100644 --- a/pandas/tests/reshape/test_tile.py +++ b/pandas/tests/reshape/test_tile.py @@ -122,7 +122,7 @@ def test_cut_pass_series_name_to_factor(self): s = Series(np.random.randn(100), name='foo') factor = cut(s, 4) - self.assertEqual(factor.name, 'foo') + assert factor.name == 'foo' def test_label_precision(self): arr = np.arange(0, 0.73, 0.01) @@ -158,16 +158,16 @@ def test_inf_handling(self): ex_uniques = IntervalIndex.from_breaks(bins) tm.assert_index_equal(result.categories, ex_uniques) - self.assertEqual(result[5], Interval(4, np.inf)) - self.assertEqual(result[0], Interval(-np.inf, 2)) - self.assertEqual(result_ser[5], Interval(4, np.inf)) - self.assertEqual(result_ser[0], Interval(-np.inf, 2)) + assert result[5] == Interval(4, np.inf) + assert result[0] == Interval(-np.inf, 2) + assert result_ser[5] == Interval(4, np.inf) + assert result_ser[0] == Interval(-np.inf, 2) def test_qcut(self): arr = np.random.randn(1000) - # we store the bins as Index that have been rounded - # to comparisions are a bit tricky + # We store the bins as Index that have been rounded + # to comparisons are a bit tricky. labels, bins = qcut(arr, 4, retbins=True) ex_bins = quantile(arr, [0, .25, .5, .75, 1.]) result = labels.categories.left.values @@ -182,7 +182,7 @@ def test_qcut_bounds(self): arr = np.random.randn(1000) factor = qcut(arr, 10, labels=False) - self.assertEqual(len(np.unique(factor)), 10) + assert len(np.unique(factor)) == 10 def test_qcut_specify_quantiles(self): arr = np.random.randn(100) @@ -253,14 +253,14 @@ def test_round_frac(self): # #1979, negative numbers result = tmod._round_frac(-117.9998, precision=3) - self.assertEqual(result, -118) + assert result == -118 result = tmod._round_frac(117.9998, precision=3) - self.assertEqual(result, 118) + assert result == 118 result = tmod._round_frac(117.9998, precision=2) - self.assertEqual(result, 118) + assert result == 118 result = tmod._round_frac(0.000123456, precision=2) - self.assertEqual(result, 0.00012) + assert result == 0.00012 def test_qcut_binning_issues(self): # #1978, 1979 diff --git a/pandas/tests/scalar/test_interval.py b/pandas/tests/scalar/test_interval.py index d77deabee58d4..079c41657bec6 100644 --- a/pandas/tests/scalar/test_interval.py +++ b/pandas/tests/scalar/test_interval.py @@ -10,20 +10,18 @@ def setUp(self): self.interval = Interval(0, 1) def test_properties(self): - self.assertEqual(self.interval.closed, 'right') - self.assertEqual(self.interval.left, 0) - self.assertEqual(self.interval.right, 1) - self.assertEqual(self.interval.mid, 0.5) + assert self.interval.closed == 'right' + assert self.interval.left == 0 + assert self.interval.right == 1 + assert self.interval.mid == 0.5 def test_repr(self): - self.assertEqual(repr(self.interval), - "Interval(0, 1, closed='right')") - self.assertEqual(str(self.interval), "(0, 1]") + assert repr(self.interval) == "Interval(0, 1, closed='right')" + assert str(self.interval) == "(0, 1]" interval_left = Interval(0, 1, closed='left') - self.assertEqual(repr(interval_left), - "Interval(0, 1, closed='left')") - self.assertEqual(str(interval_left), "[0, 1)") + assert repr(interval_left) == "Interval(0, 1, closed='left')" + assert str(interval_left) == "[0, 1)" def test_contains(self): assert 0.5 in self.interval @@ -41,9 +39,9 @@ def test_contains(self): assert 1 not in interval def test_equal(self): - self.assertEqual(Interval(0, 1), Interval(0, 1, closed='right')) - self.assertNotEqual(Interval(0, 1), Interval(0, 1, closed='left')) - self.assertNotEqual(Interval(0, 1), 0) + assert Interval(0, 1) == Interval(0, 1, closed='right') + assert Interval(0, 1) != Interval(0, 1, closed='left') + assert Interval(0, 1) != 0 def test_comparison(self): with tm.assert_raises_regex(TypeError, 'unorderable types'): @@ -63,15 +61,15 @@ def test_hash(self): def test_math_add(self): expected = Interval(1, 2) actual = self.interval + 1 - self.assertEqual(expected, actual) + assert expected == actual expected = Interval(1, 2) actual = 1 + self.interval - self.assertEqual(expected, actual) + assert expected == actual actual = self.interval actual += 1 - self.assertEqual(expected, actual) + assert expected == actual with pytest.raises(TypeError): self.interval + Interval(1, 2) @@ -82,11 +80,11 @@ def test_math_add(self): def test_math_sub(self): expected = Interval(-1, 0) actual = self.interval - 1 - self.assertEqual(expected, actual) + assert expected == actual actual = self.interval actual -= 1 - self.assertEqual(expected, actual) + assert expected == actual with pytest.raises(TypeError): self.interval - Interval(1, 2) @@ -97,15 +95,15 @@ def test_math_sub(self): def test_math_mult(self): expected = Interval(0, 2) actual = self.interval * 2 - self.assertEqual(expected, actual) + assert expected == actual expected = Interval(0, 2) actual = 2 * self.interval - self.assertEqual(expected, actual) + assert expected == actual actual = self.interval actual *= 2 - self.assertEqual(expected, actual) + assert expected == actual with pytest.raises(TypeError): self.interval * Interval(1, 2) @@ -116,11 +114,11 @@ def test_math_mult(self): def test_math_div(self): expected = Interval(0, 0.5) actual = self.interval / 2.0 - self.assertEqual(expected, actual) + assert expected == actual actual = self.interval actual /= 2.0 - self.assertEqual(expected, actual) + assert expected == actual with pytest.raises(TypeError): self.interval / Interval(1, 2) diff --git a/pandas/tests/scalar/test_period.py b/pandas/tests/scalar/test_period.py index fc0921451c133..00a1fa1b507b6 100644 --- a/pandas/tests/scalar/test_period.py +++ b/pandas/tests/scalar/test_period.py @@ -35,18 +35,18 @@ def test_is_leap_year(self): def test_quarterly_negative_ordinals(self): p = Period(ordinal=-1, freq='Q-DEC') - self.assertEqual(p.year, 1969) - self.assertEqual(p.quarter, 4) + assert p.year == 1969 + assert p.quarter == 4 assert isinstance(p, Period) p = Period(ordinal=-2, freq='Q-DEC') - self.assertEqual(p.year, 1969) - self.assertEqual(p.quarter, 3) + assert p.year == 1969 + assert p.quarter == 3 assert isinstance(p, Period) p = Period(ordinal=-2, freq='M') - self.assertEqual(p.year, 1969) - self.assertEqual(p.month, 11) + assert p.year == 1969 + assert p.month == 11 assert isinstance(p, Period) def test_period_cons_quarterly(self): @@ -57,11 +57,11 @@ def test_period_cons_quarterly(self): assert '1989Q3' in str(exp) stamp = exp.to_timestamp('D', how='end') p = Period(stamp, freq=freq) - self.assertEqual(p, exp) + assert p == exp stamp = exp.to_timestamp('3D', how='end') p = Period(stamp, freq=freq) - self.assertEqual(p, exp) + assert p == exp def test_period_cons_annual(self): # bugs in scikits.timeseries @@ -70,7 +70,7 @@ def test_period_cons_annual(self): exp = Period('1989', freq=freq) stamp = exp.to_timestamp('D', how='end') + timedelta(days=30) p = Period(stamp, freq=freq) - self.assertEqual(p, exp + 1) + assert p == exp + 1 assert isinstance(p, Period) def test_period_cons_weekly(self): @@ -81,13 +81,13 @@ def test_period_cons_weekly(self): result = Period(daystr, freq=freq) expected = Period(daystr, freq='D').asfreq(freq) - self.assertEqual(result, expected) + assert result == expected assert isinstance(result, Period) def test_period_from_ordinal(self): p = pd.Period('2011-01', freq='M') res = pd.Period._from_ordinal(p.ordinal, freq='M') - self.assertEqual(p, res) + assert p == res assert isinstance(res, Period) def test_period_cons_nat(self): @@ -115,23 +115,23 @@ def test_period_cons_nat(self): def test_period_cons_mult(self): p1 = Period('2011-01', freq='3M') p2 = Period('2011-01', freq='M') - self.assertEqual(p1.ordinal, p2.ordinal) + assert p1.ordinal == p2.ordinal - self.assertEqual(p1.freq, offsets.MonthEnd(3)) - self.assertEqual(p1.freqstr, '3M') + assert p1.freq == offsets.MonthEnd(3) + assert p1.freqstr == '3M' - self.assertEqual(p2.freq, offsets.MonthEnd()) - self.assertEqual(p2.freqstr, 'M') + assert p2.freq == offsets.MonthEnd() + assert p2.freqstr == 'M' result = p1 + 1 - self.assertEqual(result.ordinal, (p2 + 3).ordinal) - self.assertEqual(result.freq, p1.freq) - self.assertEqual(result.freqstr, '3M') + assert result.ordinal == (p2 + 3).ordinal + assert result.freq == p1.freq + assert result.freqstr == '3M' result = p1 - 1 - self.assertEqual(result.ordinal, (p2 - 3).ordinal) - self.assertEqual(result.freq, p1.freq) - self.assertEqual(result.freqstr, '3M') + assert result.ordinal == (p2 - 3).ordinal + assert result.freq == p1.freq + assert result.freqstr == '3M' msg = ('Frequency must be positive, because it' ' represents span: -3M') @@ -151,37 +151,37 @@ def test_period_cons_combined(self): Period(ordinal=1, freq='H'))] for p1, p2, p3 in p: - self.assertEqual(p1.ordinal, p3.ordinal) - self.assertEqual(p2.ordinal, p3.ordinal) + assert p1.ordinal == p3.ordinal + assert p2.ordinal == p3.ordinal - self.assertEqual(p1.freq, offsets.Hour(25)) - self.assertEqual(p1.freqstr, '25H') + assert p1.freq == offsets.Hour(25) + assert p1.freqstr == '25H' - self.assertEqual(p2.freq, offsets.Hour(25)) - self.assertEqual(p2.freqstr, '25H') + assert p2.freq == offsets.Hour(25) + assert p2.freqstr == '25H' - self.assertEqual(p3.freq, offsets.Hour()) - self.assertEqual(p3.freqstr, 'H') + assert p3.freq == offsets.Hour() + assert p3.freqstr == 'H' result = p1 + 1 - self.assertEqual(result.ordinal, (p3 + 25).ordinal) - self.assertEqual(result.freq, p1.freq) - self.assertEqual(result.freqstr, '25H') + assert result.ordinal == (p3 + 25).ordinal + assert result.freq == p1.freq + assert result.freqstr == '25H' result = p2 + 1 - self.assertEqual(result.ordinal, (p3 + 25).ordinal) - self.assertEqual(result.freq, p2.freq) - self.assertEqual(result.freqstr, '25H') + assert result.ordinal == (p3 + 25).ordinal + assert result.freq == p2.freq + assert result.freqstr == '25H' result = p1 - 1 - self.assertEqual(result.ordinal, (p3 - 25).ordinal) - self.assertEqual(result.freq, p1.freq) - self.assertEqual(result.freqstr, '25H') + assert result.ordinal == (p3 - 25).ordinal + assert result.freq == p1.freq + assert result.freqstr == '25H' result = p2 - 1 - self.assertEqual(result.ordinal, (p3 - 25).ordinal) - self.assertEqual(result.freq, p2.freq) - self.assertEqual(result.freqstr, '25H') + assert result.ordinal == (p3 - 25).ordinal + assert result.freq == p2.freq + assert result.freqstr == '25H' msg = ('Frequency must be positive, because it' ' represents span: -25H') @@ -217,33 +217,33 @@ def test_timestamp_tz_arg(self): exp = Timestamp('1/1/2005', tz='UTC').tz_convert(case) exp_zone = pytz.timezone(case).normalize(p) - self.assertEqual(p, exp) - self.assertEqual(p.tz, exp_zone.tzinfo) - self.assertEqual(p.tz, exp.tz) + assert p == exp + assert p.tz == exp_zone.tzinfo + assert p.tz == exp.tz p = Period('1/1/2005', freq='3H').to_timestamp(tz=case) exp = Timestamp('1/1/2005', tz='UTC').tz_convert(case) exp_zone = pytz.timezone(case).normalize(p) - self.assertEqual(p, exp) - self.assertEqual(p.tz, exp_zone.tzinfo) - self.assertEqual(p.tz, exp.tz) + assert p == exp + assert p.tz == exp_zone.tzinfo + assert p.tz == exp.tz p = Period('1/1/2005', freq='A').to_timestamp(freq='A', tz=case) exp = Timestamp('31/12/2005', tz='UTC').tz_convert(case) exp_zone = pytz.timezone(case).normalize(p) - self.assertEqual(p, exp) - self.assertEqual(p.tz, exp_zone.tzinfo) - self.assertEqual(p.tz, exp.tz) + assert p == exp + assert p.tz == exp_zone.tzinfo + assert p.tz == exp.tz p = Period('1/1/2005', freq='A').to_timestamp(freq='3H', tz=case) exp = Timestamp('1/1/2005', tz='UTC').tz_convert(case) exp_zone = pytz.timezone(case).normalize(p) - self.assertEqual(p, exp) - self.assertEqual(p.tz, exp_zone.tzinfo) - self.assertEqual(p.tz, exp.tz) + assert p == exp + assert p.tz == exp_zone.tzinfo + assert p.tz == exp.tz def test_timestamp_tz_arg_dateutil(self): from pandas._libs.tslib import _dateutil_gettz as gettz @@ -253,86 +253,86 @@ def test_timestamp_tz_arg_dateutil(self): p = Period('1/1/2005', freq='M').to_timestamp( tz=maybe_get_tz(case)) exp = Timestamp('1/1/2005', tz='UTC').tz_convert(case) - self.assertEqual(p, exp) - self.assertEqual(p.tz, gettz(case.split('/', 1)[1])) - self.assertEqual(p.tz, exp.tz) + assert p == exp + assert p.tz == gettz(case.split('/', 1)[1]) + assert p.tz == exp.tz p = Period('1/1/2005', freq='M').to_timestamp(freq='3H', tz=maybe_get_tz(case)) exp = Timestamp('1/1/2005', tz='UTC').tz_convert(case) - self.assertEqual(p, exp) - self.assertEqual(p.tz, gettz(case.split('/', 1)[1])) - self.assertEqual(p.tz, exp.tz) + assert p == exp + assert p.tz == gettz(case.split('/', 1)[1]) + assert p.tz == exp.tz def test_timestamp_tz_arg_dateutil_from_string(self): from pandas._libs.tslib import _dateutil_gettz as gettz p = Period('1/1/2005', freq='M').to_timestamp(tz='dateutil/Europe/Brussels') - self.assertEqual(p.tz, gettz('Europe/Brussels')) + assert p.tz == gettz('Europe/Brussels') def test_timestamp_mult(self): p = pd.Period('2011-01', freq='M') - self.assertEqual(p.to_timestamp(how='S'), pd.Timestamp('2011-01-01')) - self.assertEqual(p.to_timestamp(how='E'), pd.Timestamp('2011-01-31')) + assert p.to_timestamp(how='S') == pd.Timestamp('2011-01-01') + assert p.to_timestamp(how='E') == pd.Timestamp('2011-01-31') p = pd.Period('2011-01', freq='3M') - self.assertEqual(p.to_timestamp(how='S'), pd.Timestamp('2011-01-01')) - self.assertEqual(p.to_timestamp(how='E'), pd.Timestamp('2011-03-31')) + assert p.to_timestamp(how='S') == pd.Timestamp('2011-01-01') + assert p.to_timestamp(how='E') == pd.Timestamp('2011-03-31') def test_construction(self): i1 = Period('1/1/2005', freq='M') i2 = Period('Jan 2005') - self.assertEqual(i1, i2) + assert i1 == i2 i1 = Period('2005', freq='A') i2 = Period('2005') i3 = Period('2005', freq='a') - self.assertEqual(i1, i2) - self.assertEqual(i1, i3) + assert i1 == i2 + assert i1 == i3 i4 = Period('2005', freq='M') i5 = Period('2005', freq='m') pytest.raises(ValueError, i1.__ne__, i4) - self.assertEqual(i4, i5) + assert i4 == i5 i1 = Period.now('Q') i2 = Period(datetime.now(), freq='Q') i3 = Period.now('q') - self.assertEqual(i1, i2) - self.assertEqual(i1, i3) + assert i1 == i2 + assert i1 == i3 i1 = Period('1982', freq='min') i2 = Period('1982', freq='MIN') - self.assertEqual(i1, i2) + assert i1 == i2 i2 = Period('1982', freq=('Min', 1)) - self.assertEqual(i1, i2) + assert i1 == i2 i1 = Period(year=2005, month=3, day=1, freq='D') i2 = Period('3/1/2005', freq='D') - self.assertEqual(i1, i2) + assert i1 == i2 i3 = Period(year=2005, month=3, day=1, freq='d') - self.assertEqual(i1, i3) + assert i1 == i3 i1 = Period('2007-01-01 09:00:00.001') expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1000), freq='L') - self.assertEqual(i1, expected) + assert i1 == expected expected = Period(np_datetime64_compat( '2007-01-01 09:00:00.001Z'), freq='L') - self.assertEqual(i1, expected) + assert i1 == expected i1 = Period('2007-01-01 09:00:00.00101') expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1010), freq='U') - self.assertEqual(i1, expected) + assert i1 == expected expected = Period(np_datetime64_compat('2007-01-01 09:00:00.00101Z'), freq='U') - self.assertEqual(i1, expected) + assert i1 == expected pytest.raises(ValueError, Period, ordinal=200701) @@ -343,157 +343,155 @@ def test_construction_bday(self): # Biz day construction, roll forward if non-weekday i1 = Period('3/10/12', freq='B') i2 = Period('3/10/12', freq='D') - self.assertEqual(i1, i2.asfreq('B')) + assert i1 == i2.asfreq('B') i2 = Period('3/11/12', freq='D') - self.assertEqual(i1, i2.asfreq('B')) + assert i1 == i2.asfreq('B') i2 = Period('3/12/12', freq='D') - self.assertEqual(i1, i2.asfreq('B')) + assert i1 == i2.asfreq('B') i3 = Period('3/10/12', freq='b') - self.assertEqual(i1, i3) + assert i1 == i3 i1 = Period(year=2012, month=3, day=10, freq='B') i2 = Period('3/12/12', freq='B') - self.assertEqual(i1, i2) + assert i1 == i2 def test_construction_quarter(self): i1 = Period(year=2005, quarter=1, freq='Q') i2 = Period('1/1/2005', freq='Q') - self.assertEqual(i1, i2) + assert i1 == i2 i1 = Period(year=2005, quarter=3, freq='Q') i2 = Period('9/1/2005', freq='Q') - self.assertEqual(i1, i2) + assert i1 == i2 i1 = Period('2005Q1') i2 = Period(year=2005, quarter=1, freq='Q') i3 = Period('2005q1') - self.assertEqual(i1, i2) - self.assertEqual(i1, i3) + assert i1 == i2 + assert i1 == i3 i1 = Period('05Q1') - self.assertEqual(i1, i2) + assert i1 == i2 lower = Period('05q1') - self.assertEqual(i1, lower) + assert i1 == lower i1 = Period('1Q2005') - self.assertEqual(i1, i2) + assert i1 == i2 lower = Period('1q2005') - self.assertEqual(i1, lower) + assert i1 == lower i1 = Period('1Q05') - self.assertEqual(i1, i2) + assert i1 == i2 lower = Period('1q05') - self.assertEqual(i1, lower) + assert i1 == lower i1 = Period('4Q1984') - self.assertEqual(i1.year, 1984) + assert i1.year == 1984 lower = Period('4q1984') - self.assertEqual(i1, lower) + assert i1 == lower def test_construction_month(self): expected = Period('2007-01', freq='M') i1 = Period('200701', freq='M') - self.assertEqual(i1, expected) + assert i1 == expected i1 = Period('200701', freq='M') - self.assertEqual(i1, expected) + assert i1 == expected i1 = Period(200701, freq='M') - self.assertEqual(i1, expected) + assert i1 == expected i1 = Period(ordinal=200701, freq='M') - self.assertEqual(i1.year, 18695) + assert i1.year == 18695 i1 = Period(datetime(2007, 1, 1), freq='M') i2 = Period('200701', freq='M') - self.assertEqual(i1, i2) + assert i1 == i2 i1 = Period(date(2007, 1, 1), freq='M') i2 = Period(datetime(2007, 1, 1), freq='M') i3 = Period(np.datetime64('2007-01-01'), freq='M') i4 = Period(np_datetime64_compat('2007-01-01 00:00:00Z'), freq='M') i5 = Period(np_datetime64_compat('2007-01-01 00:00:00.000Z'), freq='M') - self.assertEqual(i1, i2) - self.assertEqual(i1, i3) - self.assertEqual(i1, i4) - self.assertEqual(i1, i5) + assert i1 == i2 + assert i1 == i3 + assert i1 == i4 + assert i1 == i5 def test_period_constructor_offsets(self): - self.assertEqual(Period('1/1/2005', freq=offsets.MonthEnd()), - Period('1/1/2005', freq='M')) - self.assertEqual(Period('2005', freq=offsets.YearEnd()), - Period('2005', freq='A')) - self.assertEqual(Period('2005', freq=offsets.MonthEnd()), - Period('2005', freq='M')) - self.assertEqual(Period('3/10/12', freq=offsets.BusinessDay()), - Period('3/10/12', freq='B')) - self.assertEqual(Period('3/10/12', freq=offsets.Day()), - Period('3/10/12', freq='D')) - - self.assertEqual(Period(year=2005, quarter=1, - freq=offsets.QuarterEnd(startingMonth=12)), - Period(year=2005, quarter=1, freq='Q')) - self.assertEqual(Period(year=2005, quarter=2, - freq=offsets.QuarterEnd(startingMonth=12)), - Period(year=2005, quarter=2, freq='Q')) - - self.assertEqual(Period(year=2005, month=3, day=1, freq=offsets.Day()), - Period(year=2005, month=3, day=1, freq='D')) - self.assertEqual(Period(year=2012, month=3, day=10, - freq=offsets.BDay()), - Period(year=2012, month=3, day=10, freq='B')) + assert (Period('1/1/2005', freq=offsets.MonthEnd()) == + Period('1/1/2005', freq='M')) + assert (Period('2005', freq=offsets.YearEnd()) == + Period('2005', freq='A')) + assert (Period('2005', freq=offsets.MonthEnd()) == + Period('2005', freq='M')) + assert (Period('3/10/12', freq=offsets.BusinessDay()) == + Period('3/10/12', freq='B')) + assert (Period('3/10/12', freq=offsets.Day()) == + Period('3/10/12', freq='D')) + + assert (Period(year=2005, quarter=1, + freq=offsets.QuarterEnd(startingMonth=12)) == + Period(year=2005, quarter=1, freq='Q')) + assert (Period(year=2005, quarter=2, + freq=offsets.QuarterEnd(startingMonth=12)) == + Period(year=2005, quarter=2, freq='Q')) + + assert (Period(year=2005, month=3, day=1, freq=offsets.Day()) == + Period(year=2005, month=3, day=1, freq='D')) + assert (Period(year=2012, month=3, day=10, freq=offsets.BDay()) == + Period(year=2012, month=3, day=10, freq='B')) expected = Period('2005-03-01', freq='3D') - self.assertEqual(Period(year=2005, month=3, day=1, - freq=offsets.Day(3)), expected) - self.assertEqual(Period(year=2005, month=3, day=1, freq='3D'), - expected) + assert (Period(year=2005, month=3, day=1, + freq=offsets.Day(3)) == expected) + assert Period(year=2005, month=3, day=1, freq='3D') == expected - self.assertEqual(Period(year=2012, month=3, day=10, - freq=offsets.BDay(3)), - Period(year=2012, month=3, day=10, freq='3B')) + assert (Period(year=2012, month=3, day=10, + freq=offsets.BDay(3)) == + Period(year=2012, month=3, day=10, freq='3B')) - self.assertEqual(Period(200701, freq=offsets.MonthEnd()), - Period(200701, freq='M')) + assert (Period(200701, freq=offsets.MonthEnd()) == + Period(200701, freq='M')) i1 = Period(ordinal=200701, freq=offsets.MonthEnd()) i2 = Period(ordinal=200701, freq='M') - self.assertEqual(i1, i2) - self.assertEqual(i1.year, 18695) - self.assertEqual(i2.year, 18695) + assert i1 == i2 + assert i1.year == 18695 + assert i2.year == 18695 i1 = Period(datetime(2007, 1, 1), freq='M') i2 = Period('200701', freq='M') - self.assertEqual(i1, i2) + assert i1 == i2 i1 = Period(date(2007, 1, 1), freq='M') i2 = Period(datetime(2007, 1, 1), freq='M') i3 = Period(np.datetime64('2007-01-01'), freq='M') i4 = Period(np_datetime64_compat('2007-01-01 00:00:00Z'), freq='M') i5 = Period(np_datetime64_compat('2007-01-01 00:00:00.000Z'), freq='M') - self.assertEqual(i1, i2) - self.assertEqual(i1, i3) - self.assertEqual(i1, i4) - self.assertEqual(i1, i5) + assert i1 == i2 + assert i1 == i3 + assert i1 == i4 + assert i1 == i5 i1 = Period('2007-01-01 09:00:00.001') expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1000), freq='L') - self.assertEqual(i1, expected) + assert i1 == expected expected = Period(np_datetime64_compat( '2007-01-01 09:00:00.001Z'), freq='L') - self.assertEqual(i1, expected) + assert i1 == expected i1 = Period('2007-01-01 09:00:00.00101') expected = Period(datetime(2007, 1, 1, 9, 0, 0, 1010), freq='U') - self.assertEqual(i1, expected) + assert i1 == expected expected = Period(np_datetime64_compat('2007-01-01 09:00:00.00101Z'), freq='U') - self.assertEqual(i1, expected) + assert i1 == expected pytest.raises(ValueError, Period, ordinal=200701) @@ -501,8 +499,8 @@ def test_period_constructor_offsets(self): def test_freq_str(self): i1 = Period('1982', freq='Min') - self.assertEqual(i1.freq, offsets.Minute()) - self.assertEqual(i1.freqstr, 'T') + assert i1.freq == offsets.Minute() + assert i1.freqstr == 'T' def test_period_deprecated_freq(self): cases = {"M": ["MTH", "MONTH", "MONTHLY", "Mth", "month", "monthly"], @@ -530,17 +528,17 @@ def test_period_deprecated_freq(self): assert isinstance(p2, Period) def test_hash(self): - self.assertEqual(hash(Period('2011-01', freq='M')), - hash(Period('2011-01', freq='M'))) + assert (hash(Period('2011-01', freq='M')) == + hash(Period('2011-01', freq='M'))) - self.assertNotEqual(hash(Period('2011-01-01', freq='D')), - hash(Period('2011-01', freq='M'))) + assert (hash(Period('2011-01-01', freq='D')) != + hash(Period('2011-01', freq='M'))) - self.assertNotEqual(hash(Period('2011-01', freq='3M')), - hash(Period('2011-01', freq='2M'))) + assert (hash(Period('2011-01', freq='3M')) != + hash(Period('2011-01', freq='2M'))) - self.assertNotEqual(hash(Period('2011-01', freq='M')), - hash(Period('2011-02', freq='M'))) + assert (hash(Period('2011-01', freq='M')) != + hash(Period('2011-02', freq='M'))) def test_repr(self): p = Period('Jan-2000') @@ -556,23 +554,23 @@ def test_repr_nat(self): def test_millisecond_repr(self): p = Period('2000-01-01 12:15:02.123') - self.assertEqual("Period('2000-01-01 12:15:02.123', 'L')", repr(p)) + assert repr(p) == "Period('2000-01-01 12:15:02.123', 'L')" def test_microsecond_repr(self): p = Period('2000-01-01 12:15:02.123567') - self.assertEqual("Period('2000-01-01 12:15:02.123567', 'U')", repr(p)) + assert repr(p) == "Period('2000-01-01 12:15:02.123567', 'U')" def test_strftime(self): p = Period('2000-1-1 12:34:12', freq='S') res = p.strftime('%Y-%m-%d %H:%M:%S') - self.assertEqual(res, '2000-01-01 12:34:12') + assert res == '2000-01-01 12:34:12' assert isinstance(res, text_type) # GH3363 def test_sub_delta(self): left, right = Period('2011', freq='A'), Period('2007', freq='A') result = left - right - self.assertEqual(result, 4) + assert result == 4 with pytest.raises(period.IncompatibleFrequency): left - Period('2007-01', freq='M') @@ -582,15 +580,15 @@ def test_to_timestamp(self): start_ts = p.to_timestamp(how='S') aliases = ['s', 'StarT', 'BEGIn'] for a in aliases: - self.assertEqual(start_ts, p.to_timestamp('D', how=a)) + assert start_ts == p.to_timestamp('D', how=a) # freq with mult should not affect to the result - self.assertEqual(start_ts, p.to_timestamp('3D', how=a)) + assert start_ts == p.to_timestamp('3D', how=a) end_ts = p.to_timestamp(how='E') aliases = ['e', 'end', 'FINIsH'] for a in aliases: - self.assertEqual(end_ts, p.to_timestamp('D', how=a)) - self.assertEqual(end_ts, p.to_timestamp('3D', how=a)) + assert end_ts == p.to_timestamp('D', how=a) + assert end_ts == p.to_timestamp('3D', how=a) from_lst = ['A', 'Q', 'M', 'W', 'B', 'D', 'H', 'Min', 'S'] @@ -600,11 +598,11 @@ def _ex(p): for i, fcode in enumerate(from_lst): p = Period('1982', freq=fcode) result = p.to_timestamp().to_period(fcode) - self.assertEqual(result, p) + assert result == p - self.assertEqual(p.start_time, p.to_timestamp(how='S')) + assert p.start_time == p.to_timestamp(how='S') - self.assertEqual(p.end_time, _ex(p)) + assert p.end_time == _ex(p) # Frequency other than daily @@ -612,42 +610,40 @@ def _ex(p): result = p.to_timestamp('H', how='end') expected = datetime(1985, 12, 31, 23) - self.assertEqual(result, expected) + assert result == expected result = p.to_timestamp('3H', how='end') - self.assertEqual(result, expected) + assert result == expected result = p.to_timestamp('T', how='end') expected = datetime(1985, 12, 31, 23, 59) - self.assertEqual(result, expected) + assert result == expected result = p.to_timestamp('2T', how='end') - self.assertEqual(result, expected) + assert result == expected result = p.to_timestamp(how='end') expected = datetime(1985, 12, 31) - self.assertEqual(result, expected) + assert result == expected expected = datetime(1985, 1, 1) result = p.to_timestamp('H', how='start') - self.assertEqual(result, expected) + assert result == expected result = p.to_timestamp('T', how='start') - self.assertEqual(result, expected) + assert result == expected result = p.to_timestamp('S', how='start') - self.assertEqual(result, expected) + assert result == expected result = p.to_timestamp('3H', how='start') - self.assertEqual(result, expected) + assert result == expected result = p.to_timestamp('5S', how='start') - self.assertEqual(result, expected) + assert result == expected def test_start_time(self): freq_lst = ['A', 'Q', 'M', 'D', 'H', 'T', 'S'] xp = datetime(2012, 1, 1) for f in freq_lst: p = Period('2012', freq=f) - self.assertEqual(p.start_time, xp) - self.assertEqual(Period('2012', freq='B').start_time, - datetime(2012, 1, 2)) - self.assertEqual(Period('2012', freq='W').start_time, - datetime(2011, 12, 26)) + assert p.start_time == xp + assert Period('2012', freq='B').start_time == datetime(2012, 1, 2) + assert Period('2012', freq='W').start_time == datetime(2011, 12, 26) def test_end_time(self): p = Period('2012', freq='A') @@ -656,44 +652,44 @@ def _ex(*args): return Timestamp(Timestamp(datetime(*args)).value - 1) xp = _ex(2013, 1, 1) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time p = Period('2012', freq='Q') xp = _ex(2012, 4, 1) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time p = Period('2012', freq='M') xp = _ex(2012, 2, 1) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time p = Period('2012', freq='D') xp = _ex(2012, 1, 2) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time p = Period('2012', freq='H') xp = _ex(2012, 1, 1, 1) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time p = Period('2012', freq='B') xp = _ex(2012, 1, 3) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time p = Period('2012', freq='W') xp = _ex(2012, 1, 2) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time # Test for GH 11738 p = Period('2012', freq='15D') xp = _ex(2012, 1, 16) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time p = Period('2012', freq='1D1H') xp = _ex(2012, 1, 2, 1) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time p = Period('2012', freq='1H1D') xp = _ex(2012, 1, 2, 1) - self.assertEqual(xp, p.end_time) + assert xp == p.end_time def test_anchor_week_end_time(self): def _ex(*args): @@ -701,12 +697,12 @@ def _ex(*args): p = Period('2013-1-1', 'W-SAT') xp = _ex(2013, 1, 6) - self.assertEqual(p.end_time, xp) + assert p.end_time == xp def test_properties_annually(self): # Test properties on Periods with annually frequency. a_date = Period(freq='A', year=2007) - self.assertEqual(a_date.year, 2007) + assert a_date.year == 2007 def test_properties_quarterly(self): # Test properties on Periods with daily frequency. @@ -716,50 +712,50 @@ def test_properties_quarterly(self): # for x in range(3): for qd in (qedec_date, qejan_date, qejun_date): - self.assertEqual((qd + x).qyear, 2007) - self.assertEqual((qd + x).quarter, x + 1) + assert (qd + x).qyear == 2007 + assert (qd + x).quarter == x + 1 def test_properties_monthly(self): # Test properties on Periods with daily frequency. m_date = Period(freq='M', year=2007, month=1) for x in range(11): m_ival_x = m_date + x - self.assertEqual(m_ival_x.year, 2007) + assert m_ival_x.year == 2007 if 1 <= x + 1 <= 3: - self.assertEqual(m_ival_x.quarter, 1) + assert m_ival_x.quarter == 1 elif 4 <= x + 1 <= 6: - self.assertEqual(m_ival_x.quarter, 2) + assert m_ival_x.quarter == 2 elif 7 <= x + 1 <= 9: - self.assertEqual(m_ival_x.quarter, 3) + assert m_ival_x.quarter == 3 elif 10 <= x + 1 <= 12: - self.assertEqual(m_ival_x.quarter, 4) - self.assertEqual(m_ival_x.month, x + 1) + assert m_ival_x.quarter == 4 + assert m_ival_x.month == x + 1 def test_properties_weekly(self): # Test properties on Periods with daily frequency. w_date = Period(freq='W', year=2007, month=1, day=7) # - self.assertEqual(w_date.year, 2007) - self.assertEqual(w_date.quarter, 1) - self.assertEqual(w_date.month, 1) - self.assertEqual(w_date.week, 1) - self.assertEqual((w_date - 1).week, 52) - self.assertEqual(w_date.days_in_month, 31) - self.assertEqual(Period(freq='W', year=2012, - month=2, day=1).days_in_month, 29) + assert w_date.year == 2007 + assert w_date.quarter == 1 + assert w_date.month == 1 + assert w_date.week == 1 + assert (w_date - 1).week == 52 + assert w_date.days_in_month == 31 + assert Period(freq='W', year=2012, + month=2, day=1).days_in_month == 29 def test_properties_weekly_legacy(self): # Test properties on Periods with daily frequency. w_date = Period(freq='W', year=2007, month=1, day=7) - self.assertEqual(w_date.year, 2007) - self.assertEqual(w_date.quarter, 1) - self.assertEqual(w_date.month, 1) - self.assertEqual(w_date.week, 1) - self.assertEqual((w_date - 1).week, 52) - self.assertEqual(w_date.days_in_month, 31) + assert w_date.year == 2007 + assert w_date.quarter == 1 + assert w_date.month == 1 + assert w_date.week == 1 + assert (w_date - 1).week == 52 + assert w_date.days_in_month == 31 exp = Period(freq='W', year=2012, month=2, day=1) - self.assertEqual(exp.days_in_month, 29) + assert exp.days_in_month == 29 msg = pd.tseries.frequencies._INVALID_FREQ_ERROR with tm.assert_raises_regex(ValueError, msg): @@ -769,27 +765,27 @@ def test_properties_daily(self): # Test properties on Periods with daily frequency. b_date = Period(freq='B', year=2007, month=1, day=1) # - self.assertEqual(b_date.year, 2007) - self.assertEqual(b_date.quarter, 1) - self.assertEqual(b_date.month, 1) - self.assertEqual(b_date.day, 1) - self.assertEqual(b_date.weekday, 0) - self.assertEqual(b_date.dayofyear, 1) - self.assertEqual(b_date.days_in_month, 31) - self.assertEqual(Period(freq='B', year=2012, - month=2, day=1).days_in_month, 29) - # + assert b_date.year == 2007 + assert b_date.quarter == 1 + assert b_date.month == 1 + assert b_date.day == 1 + assert b_date.weekday == 0 + assert b_date.dayofyear == 1 + assert b_date.days_in_month == 31 + assert Period(freq='B', year=2012, + month=2, day=1).days_in_month == 29 + d_date = Period(freq='D', year=2007, month=1, day=1) - # - self.assertEqual(d_date.year, 2007) - self.assertEqual(d_date.quarter, 1) - self.assertEqual(d_date.month, 1) - self.assertEqual(d_date.day, 1) - self.assertEqual(d_date.weekday, 0) - self.assertEqual(d_date.dayofyear, 1) - self.assertEqual(d_date.days_in_month, 31) - self.assertEqual(Period(freq='D', year=2012, month=2, - day=1).days_in_month, 29) + + assert d_date.year == 2007 + assert d_date.quarter == 1 + assert d_date.month == 1 + assert d_date.day == 1 + assert d_date.weekday == 0 + assert d_date.dayofyear == 1 + assert d_date.days_in_month == 31 + assert Period(freq='D', year=2012, month=2, + day=1).days_in_month == 29 def test_properties_hourly(self): # Test properties on Periods with hourly frequency. @@ -797,50 +793,50 @@ def test_properties_hourly(self): h_date2 = Period(freq='2H', year=2007, month=1, day=1, hour=0) for h_date in [h_date1, h_date2]: - self.assertEqual(h_date.year, 2007) - self.assertEqual(h_date.quarter, 1) - self.assertEqual(h_date.month, 1) - self.assertEqual(h_date.day, 1) - self.assertEqual(h_date.weekday, 0) - self.assertEqual(h_date.dayofyear, 1) - self.assertEqual(h_date.hour, 0) - self.assertEqual(h_date.days_in_month, 31) - self.assertEqual(Period(freq='H', year=2012, month=2, day=1, - hour=0).days_in_month, 29) + assert h_date.year == 2007 + assert h_date.quarter == 1 + assert h_date.month == 1 + assert h_date.day == 1 + assert h_date.weekday == 0 + assert h_date.dayofyear == 1 + assert h_date.hour == 0 + assert h_date.days_in_month == 31 + assert Period(freq='H', year=2012, month=2, day=1, + hour=0).days_in_month == 29 def test_properties_minutely(self): # Test properties on Periods with minutely frequency. t_date = Period(freq='Min', year=2007, month=1, day=1, hour=0, minute=0) # - self.assertEqual(t_date.quarter, 1) - self.assertEqual(t_date.month, 1) - self.assertEqual(t_date.day, 1) - self.assertEqual(t_date.weekday, 0) - self.assertEqual(t_date.dayofyear, 1) - self.assertEqual(t_date.hour, 0) - self.assertEqual(t_date.minute, 0) - self.assertEqual(t_date.days_in_month, 31) - self.assertEqual(Period(freq='D', year=2012, month=2, day=1, hour=0, - minute=0).days_in_month, 29) + assert t_date.quarter == 1 + assert t_date.month == 1 + assert t_date.day == 1 + assert t_date.weekday == 0 + assert t_date.dayofyear == 1 + assert t_date.hour == 0 + assert t_date.minute == 0 + assert t_date.days_in_month == 31 + assert Period(freq='D', year=2012, month=2, day=1, hour=0, + minute=0).days_in_month == 29 def test_properties_secondly(self): # Test properties on Periods with secondly frequency. s_date = Period(freq='Min', year=2007, month=1, day=1, hour=0, minute=0, second=0) # - self.assertEqual(s_date.year, 2007) - self.assertEqual(s_date.quarter, 1) - self.assertEqual(s_date.month, 1) - self.assertEqual(s_date.day, 1) - self.assertEqual(s_date.weekday, 0) - self.assertEqual(s_date.dayofyear, 1) - self.assertEqual(s_date.hour, 0) - self.assertEqual(s_date.minute, 0) - self.assertEqual(s_date.second, 0) - self.assertEqual(s_date.days_in_month, 31) - self.assertEqual(Period(freq='Min', year=2012, month=2, day=1, hour=0, - minute=0, second=0).days_in_month, 29) + assert s_date.year == 2007 + assert s_date.quarter == 1 + assert s_date.month == 1 + assert s_date.day == 1 + assert s_date.weekday == 0 + assert s_date.dayofyear == 1 + assert s_date.hour == 0 + assert s_date.minute == 0 + assert s_date.second == 0 + assert s_date.days_in_month == 31 + assert Period(freq='Min', year=2012, month=2, day=1, hour=0, + minute=0, second=0).days_in_month == 29 def test_pnow(self): @@ -851,7 +847,7 @@ def test_pnow(self): def test_constructor_corner(self): expected = Period('2007-01', freq='2M') - self.assertEqual(Period(year=2007, month=1, freq='2M'), expected) + assert Period(year=2007, month=1, freq='2M') == expected pytest.raises(ValueError, Period, datetime.now()) pytest.raises(ValueError, Period, datetime.now().date()) @@ -865,29 +861,29 @@ def test_constructor_corner(self): result = Period(p, freq='A') exp = Period('2007', freq='A') - self.assertEqual(result, exp) + assert result == exp def test_constructor_infer_freq(self): p = Period('2007-01-01') - self.assertEqual(p.freq, 'D') + assert p.freq == 'D' p = Period('2007-01-01 07') - self.assertEqual(p.freq, 'H') + assert p.freq == 'H' p = Period('2007-01-01 07:10') - self.assertEqual(p.freq, 'T') + assert p.freq == 'T' p = Period('2007-01-01 07:10:15') - self.assertEqual(p.freq, 'S') + assert p.freq == 'S' p = Period('2007-01-01 07:10:15.123') - self.assertEqual(p.freq, 'L') + assert p.freq == 'L' p = Period('2007-01-01 07:10:15.123000') - self.assertEqual(p.freq, 'L') + assert p.freq == 'L' p = Period('2007-01-01 07:10:15.123400') - self.assertEqual(p.freq, 'U') + assert p.freq == 'U' def test_badinput(self): pytest.raises(ValueError, Period, '-2000', 'A') @@ -897,22 +893,22 @@ def test_badinput(self): def test_multiples(self): result1 = Period('1989', freq='2A') result2 = Period('1989', freq='A') - self.assertEqual(result1.ordinal, result2.ordinal) - self.assertEqual(result1.freqstr, '2A-DEC') - self.assertEqual(result2.freqstr, 'A-DEC') - self.assertEqual(result1.freq, offsets.YearEnd(2)) - self.assertEqual(result2.freq, offsets.YearEnd()) + assert result1.ordinal == result2.ordinal + assert result1.freqstr == '2A-DEC' + assert result2.freqstr == 'A-DEC' + assert result1.freq == offsets.YearEnd(2) + assert result2.freq == offsets.YearEnd() - self.assertEqual((result1 + 1).ordinal, result1.ordinal + 2) - self.assertEqual((1 + result1).ordinal, result1.ordinal + 2) - self.assertEqual((result1 - 1).ordinal, result2.ordinal - 2) - self.assertEqual((-1 + result1).ordinal, result2.ordinal - 2) + assert (result1 + 1).ordinal == result1.ordinal + 2 + assert (1 + result1).ordinal == result1.ordinal + 2 + assert (result1 - 1).ordinal == result2.ordinal - 2 + assert (-1 + result1).ordinal == result2.ordinal - 2 def test_round_trip(self): p = Period('2000Q1') new_p = tm.round_trip_pickle(p) - self.assertEqual(new_p, p) + assert new_p == p class TestPeriodField(tm.TestCase): @@ -935,7 +931,7 @@ def setUp(self): self.day = Period('2012-01-01', 'D') def test_equal(self): - self.assertEqual(self.january1, self.january2) + assert self.january1 == self.january2 def test_equal_Raises_Value(self): with pytest.raises(period.IncompatibleFrequency): @@ -991,7 +987,7 @@ def test_smaller_Raises_Type(self): def test_sort(self): periods = [self.march, self.january1, self.february] correctPeriods = [self.january1, self.february, self.march] - self.assertEqual(sorted(periods), correctPeriods) + assert sorted(periods) == correctPeriods def test_period_nat_comp(self): p_nat = Period('NaT', freq='D') @@ -1002,12 +998,12 @@ def test_period_nat_comp(self): # confirm Period('NaT') work identical with Timestamp('NaT') for left, right in [(p_nat, p), (p, p_nat), (p_nat, p_nat), (nat, t), (t, nat), (nat, nat)]: - self.assertEqual(left < right, False) - self.assertEqual(left > right, False) - self.assertEqual(left == right, False) - self.assertEqual(left != right, True) - self.assertEqual(left <= right, False) - self.assertEqual(left >= right, False) + assert not left < right + assert not left > right + assert not left == right + assert left != right + assert not left <= right + assert not left >= right class TestMethods(tm.TestCase): @@ -1015,8 +1011,8 @@ class TestMethods(tm.TestCase): def test_add(self): dt1 = Period(freq='D', year=2008, month=1, day=1) dt2 = Period(freq='D', year=2008, month=1, day=2) - self.assertEqual(dt1 + 1, dt2) - self.assertEqual(1 + dt1, dt2) + assert dt1 + 1 == dt2 + assert 1 + dt1 == dt2 def test_add_pdnat(self): p = pd.Period('2011-01', freq='M') @@ -1046,8 +1042,8 @@ def test_sub(self): dt1 = Period('2011-01-01', freq='D') dt2 = Period('2011-01-15', freq='D') - self.assertEqual(dt1 - dt2, -14) - self.assertEqual(dt2 - dt1, 14) + assert dt1 - dt2 == -14 + assert dt2 - dt1 == 14 msg = r"Input has different freq=M from Period\(freq=D\)" with tm.assert_raises_regex(period.IncompatibleFrequency, msg): @@ -1058,8 +1054,8 @@ def test_add_offset(self): for freq in ['A', '2A', '3A']: p = Period('2011', freq=freq) exp = Period('2013', freq=freq) - self.assertEqual(p + offsets.YearEnd(2), exp) - self.assertEqual(offsets.YearEnd(2) + p, exp) + assert p + offsets.YearEnd(2) == exp + assert offsets.YearEnd(2) + p == exp for o in [offsets.YearBegin(2), offsets.MonthBegin(1), offsets.Minute(), np.timedelta64(365, 'D'), @@ -1077,12 +1073,12 @@ def test_add_offset(self): for freq in ['M', '2M', '3M']: p = Period('2011-03', freq=freq) exp = Period('2011-05', freq=freq) - self.assertEqual(p + offsets.MonthEnd(2), exp) - self.assertEqual(offsets.MonthEnd(2) + p, exp) + assert p + offsets.MonthEnd(2) == exp + assert offsets.MonthEnd(2) + p == exp exp = Period('2012-03', freq=freq) - self.assertEqual(p + offsets.MonthEnd(12), exp) - self.assertEqual(offsets.MonthEnd(12) + p, exp) + assert p + offsets.MonthEnd(12) == exp + assert offsets.MonthEnd(12) + p == exp for o in [offsets.YearBegin(2), offsets.MonthBegin(1), offsets.Minute(), np.timedelta64(365, 'D'), @@ -1102,30 +1098,30 @@ def test_add_offset(self): p = Period('2011-04-01', freq=freq) exp = Period('2011-04-06', freq=freq) - self.assertEqual(p + offsets.Day(5), exp) - self.assertEqual(offsets.Day(5) + p, exp) + assert p + offsets.Day(5) == exp + assert offsets.Day(5) + p == exp exp = Period('2011-04-02', freq=freq) - self.assertEqual(p + offsets.Hour(24), exp) - self.assertEqual(offsets.Hour(24) + p, exp) + assert p + offsets.Hour(24) == exp + assert offsets.Hour(24) + p == exp exp = Period('2011-04-03', freq=freq) - self.assertEqual(p + np.timedelta64(2, 'D'), exp) + assert p + np.timedelta64(2, 'D') == exp with pytest.raises(TypeError): np.timedelta64(2, 'D') + p exp = Period('2011-04-02', freq=freq) - self.assertEqual(p + np.timedelta64(3600 * 24, 's'), exp) + assert p + np.timedelta64(3600 * 24, 's') == exp with pytest.raises(TypeError): np.timedelta64(3600 * 24, 's') + p exp = Period('2011-03-30', freq=freq) - self.assertEqual(p + timedelta(-2), exp) - self.assertEqual(timedelta(-2) + p, exp) + assert p + timedelta(-2) == exp + assert timedelta(-2) + p == exp exp = Period('2011-04-03', freq=freq) - self.assertEqual(p + timedelta(hours=48), exp) - self.assertEqual(timedelta(hours=48) + p, exp) + assert p + timedelta(hours=48) == exp + assert timedelta(hours=48) + p == exp for o in [offsets.YearBegin(2), offsets.MonthBegin(1), offsets.Minute(), np.timedelta64(4, 'h'), @@ -1144,30 +1140,30 @@ def test_add_offset(self): p = Period('2011-04-01 09:00', freq=freq) exp = Period('2011-04-03 09:00', freq=freq) - self.assertEqual(p + offsets.Day(2), exp) - self.assertEqual(offsets.Day(2) + p, exp) + assert p + offsets.Day(2) == exp + assert offsets.Day(2) + p == exp exp = Period('2011-04-01 12:00', freq=freq) - self.assertEqual(p + offsets.Hour(3), exp) - self.assertEqual(offsets.Hour(3) + p, exp) + assert p + offsets.Hour(3) == exp + assert offsets.Hour(3) + p == exp exp = Period('2011-04-01 12:00', freq=freq) - self.assertEqual(p + np.timedelta64(3, 'h'), exp) + assert p + np.timedelta64(3, 'h') == exp with pytest.raises(TypeError): np.timedelta64(3, 'h') + p exp = Period('2011-04-01 10:00', freq=freq) - self.assertEqual(p + np.timedelta64(3600, 's'), exp) + assert p + np.timedelta64(3600, 's') == exp with pytest.raises(TypeError): np.timedelta64(3600, 's') + p exp = Period('2011-04-01 11:00', freq=freq) - self.assertEqual(p + timedelta(minutes=120), exp) - self.assertEqual(timedelta(minutes=120) + p, exp) + assert p + timedelta(minutes=120) == exp + assert timedelta(minutes=120) + p == exp exp = Period('2011-04-05 12:00', freq=freq) - self.assertEqual(p + timedelta(days=4, minutes=180), exp) - self.assertEqual(timedelta(days=4, minutes=180) + p, exp) + assert p + timedelta(days=4, minutes=180) == exp + assert timedelta(days=4, minutes=180) + p == exp for o in [offsets.YearBegin(2), offsets.MonthBegin(1), offsets.Minute(), np.timedelta64(3200, 's'), @@ -1283,7 +1279,7 @@ def test_sub_offset(self): # freq is DateOffset for freq in ['A', '2A', '3A']: p = Period('2011', freq=freq) - self.assertEqual(p - offsets.YearEnd(2), Period('2009', freq=freq)) + assert p - offsets.YearEnd(2) == Period('2009', freq=freq) for o in [offsets.YearBegin(2), offsets.MonthBegin(1), offsets.Minute(), np.timedelta64(365, 'D'), @@ -1293,10 +1289,8 @@ def test_sub_offset(self): for freq in ['M', '2M', '3M']: p = Period('2011-03', freq=freq) - self.assertEqual(p - offsets.MonthEnd(2), - Period('2011-01', freq=freq)) - self.assertEqual(p - offsets.MonthEnd(12), - Period('2010-03', freq=freq)) + assert p - offsets.MonthEnd(2) == Period('2011-01', freq=freq) + assert p - offsets.MonthEnd(12) == Period('2010-03', freq=freq) for o in [offsets.YearBegin(2), offsets.MonthBegin(1), offsets.Minute(), np.timedelta64(365, 'D'), @@ -1307,18 +1301,14 @@ def test_sub_offset(self): # freq is Tick for freq in ['D', '2D', '3D']: p = Period('2011-04-01', freq=freq) - self.assertEqual(p - offsets.Day(5), - Period('2011-03-27', freq=freq)) - self.assertEqual(p - offsets.Hour(24), - Period('2011-03-31', freq=freq)) - self.assertEqual(p - np.timedelta64(2, 'D'), - Period('2011-03-30', freq=freq)) - self.assertEqual(p - np.timedelta64(3600 * 24, 's'), - Period('2011-03-31', freq=freq)) - self.assertEqual(p - timedelta(-2), - Period('2011-04-03', freq=freq)) - self.assertEqual(p - timedelta(hours=48), - Period('2011-03-30', freq=freq)) + assert p - offsets.Day(5) == Period('2011-03-27', freq=freq) + assert p - offsets.Hour(24) == Period('2011-03-31', freq=freq) + assert p - np.timedelta64(2, 'D') == Period( + '2011-03-30', freq=freq) + assert p - np.timedelta64(3600 * 24, 's') == Period( + '2011-03-31', freq=freq) + assert p - timedelta(-2) == Period('2011-04-03', freq=freq) + assert p - timedelta(hours=48) == Period('2011-03-30', freq=freq) for o in [offsets.YearBegin(2), offsets.MonthBegin(1), offsets.Minute(), np.timedelta64(4, 'h'), @@ -1328,18 +1318,16 @@ def test_sub_offset(self): for freq in ['H', '2H', '3H']: p = Period('2011-04-01 09:00', freq=freq) - self.assertEqual(p - offsets.Day(2), - Period('2011-03-30 09:00', freq=freq)) - self.assertEqual(p - offsets.Hour(3), - Period('2011-04-01 06:00', freq=freq)) - self.assertEqual(p - np.timedelta64(3, 'h'), - Period('2011-04-01 06:00', freq=freq)) - self.assertEqual(p - np.timedelta64(3600, 's'), - Period('2011-04-01 08:00', freq=freq)) - self.assertEqual(p - timedelta(minutes=120), - Period('2011-04-01 07:00', freq=freq)) - self.assertEqual(p - timedelta(days=4, minutes=180), - Period('2011-03-28 06:00', freq=freq)) + assert p - offsets.Day(2) == Period('2011-03-30 09:00', freq=freq) + assert p - offsets.Hour(3) == Period('2011-04-01 06:00', freq=freq) + assert p - np.timedelta64(3, 'h') == Period( + '2011-04-01 06:00', freq=freq) + assert p - np.timedelta64(3600, 's') == Period( + '2011-04-01 08:00', freq=freq) + assert p - timedelta(minutes=120) == Period( + '2011-04-01 07:00', freq=freq) + assert p - timedelta(days=4, minutes=180) == Period( + '2011-03-28 06:00', freq=freq) for o in [offsets.YearBegin(2), offsets.MonthBegin(1), offsets.Minute(), np.timedelta64(3200, 's'), @@ -1407,11 +1395,11 @@ def test_period_ops_offset(self): p = Period('2011-04-01', freq='D') result = p + offsets.Day() exp = pd.Period('2011-04-02', freq='D') - self.assertEqual(result, exp) + assert result == exp result = p - offsets.Day(2) exp = pd.Period('2011-03-30', freq='D') - self.assertEqual(result, exp) + assert result == exp msg = r"Input cannot be converted to Period\(freq=D\)" with tm.assert_raises_regex(period.IncompatibleFrequency, msg): diff --git a/pandas/tests/scalar/test_period_asfreq.py b/pandas/tests/scalar/test_period_asfreq.py index d31eeda5c8e3c..7011cfeef90ae 100644 --- a/pandas/tests/scalar/test_period_asfreq.py +++ b/pandas/tests/scalar/test_period_asfreq.py @@ -5,17 +5,17 @@ class TestFreqConversion(tm.TestCase): - "Test frequency conversion of date objects" + """Test frequency conversion of date objects""" def test_asfreq_corner(self): val = Period(freq='A', year=2007) result1 = val.asfreq('5t') result2 = val.asfreq('t') expected = Period('2007-12-31 23:59', freq='t') - self.assertEqual(result1.ordinal, expected.ordinal) - self.assertEqual(result1.freqstr, '5T') - self.assertEqual(result2.ordinal, expected.ordinal) - self.assertEqual(result2.freqstr, 'T') + assert result1.ordinal == expected.ordinal + assert result1.freqstr == '5T' + assert result2.ordinal == expected.ordinal + assert result2.freqstr == 'T' def test_conv_annual(self): # frequency conversion tests: from Annual Frequency @@ -55,35 +55,35 @@ def test_conv_annual(self): ival_ANOV_to_D_end = Period(freq='D', year=2007, month=11, day=30) ival_ANOV_to_D_start = Period(freq='D', year=2006, month=12, day=1) - self.assertEqual(ival_A.asfreq('Q', 'S'), ival_A_to_Q_start) - self.assertEqual(ival_A.asfreq('Q', 'e'), ival_A_to_Q_end) - self.assertEqual(ival_A.asfreq('M', 's'), ival_A_to_M_start) - self.assertEqual(ival_A.asfreq('M', 'E'), ival_A_to_M_end) - self.assertEqual(ival_A.asfreq('W', 'S'), ival_A_to_W_start) - self.assertEqual(ival_A.asfreq('W', 'E'), ival_A_to_W_end) - self.assertEqual(ival_A.asfreq('B', 'S'), ival_A_to_B_start) - self.assertEqual(ival_A.asfreq('B', 'E'), ival_A_to_B_end) - self.assertEqual(ival_A.asfreq('D', 'S'), ival_A_to_D_start) - self.assertEqual(ival_A.asfreq('D', 'E'), ival_A_to_D_end) - self.assertEqual(ival_A.asfreq('H', 'S'), ival_A_to_H_start) - self.assertEqual(ival_A.asfreq('H', 'E'), ival_A_to_H_end) - self.assertEqual(ival_A.asfreq('min', 'S'), ival_A_to_T_start) - self.assertEqual(ival_A.asfreq('min', 'E'), ival_A_to_T_end) - self.assertEqual(ival_A.asfreq('T', 'S'), ival_A_to_T_start) - self.assertEqual(ival_A.asfreq('T', 'E'), ival_A_to_T_end) - self.assertEqual(ival_A.asfreq('S', 'S'), ival_A_to_S_start) - self.assertEqual(ival_A.asfreq('S', 'E'), ival_A_to_S_end) - - self.assertEqual(ival_AJAN.asfreq('D', 'S'), ival_AJAN_to_D_start) - self.assertEqual(ival_AJAN.asfreq('D', 'E'), ival_AJAN_to_D_end) - - self.assertEqual(ival_AJUN.asfreq('D', 'S'), ival_AJUN_to_D_start) - self.assertEqual(ival_AJUN.asfreq('D', 'E'), ival_AJUN_to_D_end) - - self.assertEqual(ival_ANOV.asfreq('D', 'S'), ival_ANOV_to_D_start) - self.assertEqual(ival_ANOV.asfreq('D', 'E'), ival_ANOV_to_D_end) - - self.assertEqual(ival_A.asfreq('A'), ival_A) + assert ival_A.asfreq('Q', 'S') == ival_A_to_Q_start + assert ival_A.asfreq('Q', 'e') == ival_A_to_Q_end + assert ival_A.asfreq('M', 's') == ival_A_to_M_start + assert ival_A.asfreq('M', 'E') == ival_A_to_M_end + assert ival_A.asfreq('W', 'S') == ival_A_to_W_start + assert ival_A.asfreq('W', 'E') == ival_A_to_W_end + assert ival_A.asfreq('B', 'S') == ival_A_to_B_start + assert ival_A.asfreq('B', 'E') == ival_A_to_B_end + assert ival_A.asfreq('D', 'S') == ival_A_to_D_start + assert ival_A.asfreq('D', 'E') == ival_A_to_D_end + assert ival_A.asfreq('H', 'S') == ival_A_to_H_start + assert ival_A.asfreq('H', 'E') == ival_A_to_H_end + assert ival_A.asfreq('min', 'S') == ival_A_to_T_start + assert ival_A.asfreq('min', 'E') == ival_A_to_T_end + assert ival_A.asfreq('T', 'S') == ival_A_to_T_start + assert ival_A.asfreq('T', 'E') == ival_A_to_T_end + assert ival_A.asfreq('S', 'S') == ival_A_to_S_start + assert ival_A.asfreq('S', 'E') == ival_A_to_S_end + + assert ival_AJAN.asfreq('D', 'S') == ival_AJAN_to_D_start + assert ival_AJAN.asfreq('D', 'E') == ival_AJAN_to_D_end + + assert ival_AJUN.asfreq('D', 'S') == ival_AJUN_to_D_start + assert ival_AJUN.asfreq('D', 'E') == ival_AJUN_to_D_end + + assert ival_ANOV.asfreq('D', 'S') == ival_ANOV_to_D_start + assert ival_ANOV.asfreq('D', 'E') == ival_ANOV_to_D_end + + assert ival_A.asfreq('A') == ival_A def test_conv_quarterly(self): # frequency conversion tests: from Quarterly Frequency @@ -120,30 +120,30 @@ def test_conv_quarterly(self): ival_QEJUN_to_D_start = Period(freq='D', year=2006, month=7, day=1) ival_QEJUN_to_D_end = Period(freq='D', year=2006, month=9, day=30) - self.assertEqual(ival_Q.asfreq('A'), ival_Q_to_A) - self.assertEqual(ival_Q_end_of_year.asfreq('A'), ival_Q_to_A) - - self.assertEqual(ival_Q.asfreq('M', 'S'), ival_Q_to_M_start) - self.assertEqual(ival_Q.asfreq('M', 'E'), ival_Q_to_M_end) - self.assertEqual(ival_Q.asfreq('W', 'S'), ival_Q_to_W_start) - self.assertEqual(ival_Q.asfreq('W', 'E'), ival_Q_to_W_end) - self.assertEqual(ival_Q.asfreq('B', 'S'), ival_Q_to_B_start) - self.assertEqual(ival_Q.asfreq('B', 'E'), ival_Q_to_B_end) - self.assertEqual(ival_Q.asfreq('D', 'S'), ival_Q_to_D_start) - self.assertEqual(ival_Q.asfreq('D', 'E'), ival_Q_to_D_end) - self.assertEqual(ival_Q.asfreq('H', 'S'), ival_Q_to_H_start) - self.assertEqual(ival_Q.asfreq('H', 'E'), ival_Q_to_H_end) - self.assertEqual(ival_Q.asfreq('Min', 'S'), ival_Q_to_T_start) - self.assertEqual(ival_Q.asfreq('Min', 'E'), ival_Q_to_T_end) - self.assertEqual(ival_Q.asfreq('S', 'S'), ival_Q_to_S_start) - self.assertEqual(ival_Q.asfreq('S', 'E'), ival_Q_to_S_end) - - self.assertEqual(ival_QEJAN.asfreq('D', 'S'), ival_QEJAN_to_D_start) - self.assertEqual(ival_QEJAN.asfreq('D', 'E'), ival_QEJAN_to_D_end) - self.assertEqual(ival_QEJUN.asfreq('D', 'S'), ival_QEJUN_to_D_start) - self.assertEqual(ival_QEJUN.asfreq('D', 'E'), ival_QEJUN_to_D_end) - - self.assertEqual(ival_Q.asfreq('Q'), ival_Q) + assert ival_Q.asfreq('A') == ival_Q_to_A + assert ival_Q_end_of_year.asfreq('A') == ival_Q_to_A + + assert ival_Q.asfreq('M', 'S') == ival_Q_to_M_start + assert ival_Q.asfreq('M', 'E') == ival_Q_to_M_end + assert ival_Q.asfreq('W', 'S') == ival_Q_to_W_start + assert ival_Q.asfreq('W', 'E') == ival_Q_to_W_end + assert ival_Q.asfreq('B', 'S') == ival_Q_to_B_start + assert ival_Q.asfreq('B', 'E') == ival_Q_to_B_end + assert ival_Q.asfreq('D', 'S') == ival_Q_to_D_start + assert ival_Q.asfreq('D', 'E') == ival_Q_to_D_end + assert ival_Q.asfreq('H', 'S') == ival_Q_to_H_start + assert ival_Q.asfreq('H', 'E') == ival_Q_to_H_end + assert ival_Q.asfreq('Min', 'S') == ival_Q_to_T_start + assert ival_Q.asfreq('Min', 'E') == ival_Q_to_T_end + assert ival_Q.asfreq('S', 'S') == ival_Q_to_S_start + assert ival_Q.asfreq('S', 'E') == ival_Q_to_S_end + + assert ival_QEJAN.asfreq('D', 'S') == ival_QEJAN_to_D_start + assert ival_QEJAN.asfreq('D', 'E') == ival_QEJAN_to_D_end + assert ival_QEJUN.asfreq('D', 'S') == ival_QEJUN_to_D_start + assert ival_QEJUN.asfreq('D', 'E') == ival_QEJUN_to_D_end + + assert ival_Q.asfreq('Q') == ival_Q def test_conv_monthly(self): # frequency conversion tests: from Monthly Frequency @@ -170,25 +170,25 @@ def test_conv_monthly(self): ival_M_to_S_end = Period(freq='S', year=2007, month=1, day=31, hour=23, minute=59, second=59) - self.assertEqual(ival_M.asfreq('A'), ival_M_to_A) - self.assertEqual(ival_M_end_of_year.asfreq('A'), ival_M_to_A) - self.assertEqual(ival_M.asfreq('Q'), ival_M_to_Q) - self.assertEqual(ival_M_end_of_quarter.asfreq('Q'), ival_M_to_Q) - - self.assertEqual(ival_M.asfreq('W', 'S'), ival_M_to_W_start) - self.assertEqual(ival_M.asfreq('W', 'E'), ival_M_to_W_end) - self.assertEqual(ival_M.asfreq('B', 'S'), ival_M_to_B_start) - self.assertEqual(ival_M.asfreq('B', 'E'), ival_M_to_B_end) - self.assertEqual(ival_M.asfreq('D', 'S'), ival_M_to_D_start) - self.assertEqual(ival_M.asfreq('D', 'E'), ival_M_to_D_end) - self.assertEqual(ival_M.asfreq('H', 'S'), ival_M_to_H_start) - self.assertEqual(ival_M.asfreq('H', 'E'), ival_M_to_H_end) - self.assertEqual(ival_M.asfreq('Min', 'S'), ival_M_to_T_start) - self.assertEqual(ival_M.asfreq('Min', 'E'), ival_M_to_T_end) - self.assertEqual(ival_M.asfreq('S', 'S'), ival_M_to_S_start) - self.assertEqual(ival_M.asfreq('S', 'E'), ival_M_to_S_end) - - self.assertEqual(ival_M.asfreq('M'), ival_M) + assert ival_M.asfreq('A') == ival_M_to_A + assert ival_M_end_of_year.asfreq('A') == ival_M_to_A + assert ival_M.asfreq('Q') == ival_M_to_Q + assert ival_M_end_of_quarter.asfreq('Q') == ival_M_to_Q + + assert ival_M.asfreq('W', 'S') == ival_M_to_W_start + assert ival_M.asfreq('W', 'E') == ival_M_to_W_end + assert ival_M.asfreq('B', 'S') == ival_M_to_B_start + assert ival_M.asfreq('B', 'E') == ival_M_to_B_end + assert ival_M.asfreq('D', 'S') == ival_M_to_D_start + assert ival_M.asfreq('D', 'E') == ival_M_to_D_end + assert ival_M.asfreq('H', 'S') == ival_M_to_H_start + assert ival_M.asfreq('H', 'E') == ival_M_to_H_end + assert ival_M.asfreq('Min', 'S') == ival_M_to_T_start + assert ival_M.asfreq('Min', 'E') == ival_M_to_T_end + assert ival_M.asfreq('S', 'S') == ival_M_to_S_start + assert ival_M.asfreq('S', 'E') == ival_M_to_S_end + + assert ival_M.asfreq('M') == ival_M def test_conv_weekly(self): # frequency conversion tests: from Weekly Frequency @@ -254,45 +254,44 @@ def test_conv_weekly(self): ival_W_to_S_end = Period(freq='S', year=2007, month=1, day=7, hour=23, minute=59, second=59) - self.assertEqual(ival_W.asfreq('A'), ival_W_to_A) - self.assertEqual(ival_W_end_of_year.asfreq('A'), - ival_W_to_A_end_of_year) - self.assertEqual(ival_W.asfreq('Q'), ival_W_to_Q) - self.assertEqual(ival_W_end_of_quarter.asfreq('Q'), - ival_W_to_Q_end_of_quarter) - self.assertEqual(ival_W.asfreq('M'), ival_W_to_M) - self.assertEqual(ival_W_end_of_month.asfreq('M'), - ival_W_to_M_end_of_month) - - self.assertEqual(ival_W.asfreq('B', 'S'), ival_W_to_B_start) - self.assertEqual(ival_W.asfreq('B', 'E'), ival_W_to_B_end) - - self.assertEqual(ival_W.asfreq('D', 'S'), ival_W_to_D_start) - self.assertEqual(ival_W.asfreq('D', 'E'), ival_W_to_D_end) - - self.assertEqual(ival_WSUN.asfreq('D', 'S'), ival_WSUN_to_D_start) - self.assertEqual(ival_WSUN.asfreq('D', 'E'), ival_WSUN_to_D_end) - self.assertEqual(ival_WSAT.asfreq('D', 'S'), ival_WSAT_to_D_start) - self.assertEqual(ival_WSAT.asfreq('D', 'E'), ival_WSAT_to_D_end) - self.assertEqual(ival_WFRI.asfreq('D', 'S'), ival_WFRI_to_D_start) - self.assertEqual(ival_WFRI.asfreq('D', 'E'), ival_WFRI_to_D_end) - self.assertEqual(ival_WTHU.asfreq('D', 'S'), ival_WTHU_to_D_start) - self.assertEqual(ival_WTHU.asfreq('D', 'E'), ival_WTHU_to_D_end) - self.assertEqual(ival_WWED.asfreq('D', 'S'), ival_WWED_to_D_start) - self.assertEqual(ival_WWED.asfreq('D', 'E'), ival_WWED_to_D_end) - self.assertEqual(ival_WTUE.asfreq('D', 'S'), ival_WTUE_to_D_start) - self.assertEqual(ival_WTUE.asfreq('D', 'E'), ival_WTUE_to_D_end) - self.assertEqual(ival_WMON.asfreq('D', 'S'), ival_WMON_to_D_start) - self.assertEqual(ival_WMON.asfreq('D', 'E'), ival_WMON_to_D_end) - - self.assertEqual(ival_W.asfreq('H', 'S'), ival_W_to_H_start) - self.assertEqual(ival_W.asfreq('H', 'E'), ival_W_to_H_end) - self.assertEqual(ival_W.asfreq('Min', 'S'), ival_W_to_T_start) - self.assertEqual(ival_W.asfreq('Min', 'E'), ival_W_to_T_end) - self.assertEqual(ival_W.asfreq('S', 'S'), ival_W_to_S_start) - self.assertEqual(ival_W.asfreq('S', 'E'), ival_W_to_S_end) - - self.assertEqual(ival_W.asfreq('W'), ival_W) + assert ival_W.asfreq('A') == ival_W_to_A + assert ival_W_end_of_year.asfreq('A') == ival_W_to_A_end_of_year + + assert ival_W.asfreq('Q') == ival_W_to_Q + assert ival_W_end_of_quarter.asfreq('Q') == ival_W_to_Q_end_of_quarter + + assert ival_W.asfreq('M') == ival_W_to_M + assert ival_W_end_of_month.asfreq('M') == ival_W_to_M_end_of_month + + assert ival_W.asfreq('B', 'S') == ival_W_to_B_start + assert ival_W.asfreq('B', 'E') == ival_W_to_B_end + + assert ival_W.asfreq('D', 'S') == ival_W_to_D_start + assert ival_W.asfreq('D', 'E') == ival_W_to_D_end + + assert ival_WSUN.asfreq('D', 'S') == ival_WSUN_to_D_start + assert ival_WSUN.asfreq('D', 'E') == ival_WSUN_to_D_end + assert ival_WSAT.asfreq('D', 'S') == ival_WSAT_to_D_start + assert ival_WSAT.asfreq('D', 'E') == ival_WSAT_to_D_end + assert ival_WFRI.asfreq('D', 'S') == ival_WFRI_to_D_start + assert ival_WFRI.asfreq('D', 'E') == ival_WFRI_to_D_end + assert ival_WTHU.asfreq('D', 'S') == ival_WTHU_to_D_start + assert ival_WTHU.asfreq('D', 'E') == ival_WTHU_to_D_end + assert ival_WWED.asfreq('D', 'S') == ival_WWED_to_D_start + assert ival_WWED.asfreq('D', 'E') == ival_WWED_to_D_end + assert ival_WTUE.asfreq('D', 'S') == ival_WTUE_to_D_start + assert ival_WTUE.asfreq('D', 'E') == ival_WTUE_to_D_end + assert ival_WMON.asfreq('D', 'S') == ival_WMON_to_D_start + assert ival_WMON.asfreq('D', 'E') == ival_WMON_to_D_end + + assert ival_W.asfreq('H', 'S') == ival_W_to_H_start + assert ival_W.asfreq('H', 'E') == ival_W_to_H_end + assert ival_W.asfreq('Min', 'S') == ival_W_to_T_start + assert ival_W.asfreq('Min', 'E') == ival_W_to_T_end + assert ival_W.asfreq('S', 'S') == ival_W_to_S_start + assert ival_W.asfreq('S', 'E') == ival_W_to_S_end + + assert ival_W.asfreq('W') == ival_W msg = pd.tseries.frequencies._INVALID_FREQ_ERROR with tm.assert_raises_regex(ValueError, msg): @@ -342,25 +341,25 @@ def test_conv_business(self): ival_B_to_S_end = Period(freq='S', year=2007, month=1, day=1, hour=23, minute=59, second=59) - self.assertEqual(ival_B.asfreq('A'), ival_B_to_A) - self.assertEqual(ival_B_end_of_year.asfreq('A'), ival_B_to_A) - self.assertEqual(ival_B.asfreq('Q'), ival_B_to_Q) - self.assertEqual(ival_B_end_of_quarter.asfreq('Q'), ival_B_to_Q) - self.assertEqual(ival_B.asfreq('M'), ival_B_to_M) - self.assertEqual(ival_B_end_of_month.asfreq('M'), ival_B_to_M) - self.assertEqual(ival_B.asfreq('W'), ival_B_to_W) - self.assertEqual(ival_B_end_of_week.asfreq('W'), ival_B_to_W) + assert ival_B.asfreq('A') == ival_B_to_A + assert ival_B_end_of_year.asfreq('A') == ival_B_to_A + assert ival_B.asfreq('Q') == ival_B_to_Q + assert ival_B_end_of_quarter.asfreq('Q') == ival_B_to_Q + assert ival_B.asfreq('M') == ival_B_to_M + assert ival_B_end_of_month.asfreq('M') == ival_B_to_M + assert ival_B.asfreq('W') == ival_B_to_W + assert ival_B_end_of_week.asfreq('W') == ival_B_to_W - self.assertEqual(ival_B.asfreq('D'), ival_B_to_D) + assert ival_B.asfreq('D') == ival_B_to_D - self.assertEqual(ival_B.asfreq('H', 'S'), ival_B_to_H_start) - self.assertEqual(ival_B.asfreq('H', 'E'), ival_B_to_H_end) - self.assertEqual(ival_B.asfreq('Min', 'S'), ival_B_to_T_start) - self.assertEqual(ival_B.asfreq('Min', 'E'), ival_B_to_T_end) - self.assertEqual(ival_B.asfreq('S', 'S'), ival_B_to_S_start) - self.assertEqual(ival_B.asfreq('S', 'E'), ival_B_to_S_end) + assert ival_B.asfreq('H', 'S') == ival_B_to_H_start + assert ival_B.asfreq('H', 'E') == ival_B_to_H_end + assert ival_B.asfreq('Min', 'S') == ival_B_to_T_start + assert ival_B.asfreq('Min', 'E') == ival_B_to_T_end + assert ival_B.asfreq('S', 'S') == ival_B_to_S_start + assert ival_B.asfreq('S', 'E') == ival_B_to_S_end - self.assertEqual(ival_B.asfreq('B'), ival_B) + assert ival_B.asfreq('B') == ival_B def test_conv_daily(self): # frequency conversion tests: from Business Frequency" @@ -405,39 +404,36 @@ def test_conv_daily(self): ival_D_to_S_end = Period(freq='S', year=2007, month=1, day=1, hour=23, minute=59, second=59) - self.assertEqual(ival_D.asfreq('A'), ival_D_to_A) - - self.assertEqual(ival_D_end_of_quarter.asfreq('A-JAN'), - ival_Deoq_to_AJAN) - self.assertEqual(ival_D_end_of_quarter.asfreq('A-JUN'), - ival_Deoq_to_AJUN) - self.assertEqual(ival_D_end_of_quarter.asfreq('A-DEC'), - ival_Deoq_to_ADEC) - - self.assertEqual(ival_D_end_of_year.asfreq('A'), ival_D_to_A) - self.assertEqual(ival_D_end_of_quarter.asfreq('Q'), ival_D_to_QEDEC) - self.assertEqual(ival_D.asfreq("Q-JAN"), ival_D_to_QEJAN) - self.assertEqual(ival_D.asfreq("Q-JUN"), ival_D_to_QEJUN) - self.assertEqual(ival_D.asfreq("Q-DEC"), ival_D_to_QEDEC) - self.assertEqual(ival_D.asfreq('M'), ival_D_to_M) - self.assertEqual(ival_D_end_of_month.asfreq('M'), ival_D_to_M) - self.assertEqual(ival_D.asfreq('W'), ival_D_to_W) - self.assertEqual(ival_D_end_of_week.asfreq('W'), ival_D_to_W) - - self.assertEqual(ival_D_friday.asfreq('B'), ival_B_friday) - self.assertEqual(ival_D_saturday.asfreq('B', 'S'), ival_B_friday) - self.assertEqual(ival_D_saturday.asfreq('B', 'E'), ival_B_monday) - self.assertEqual(ival_D_sunday.asfreq('B', 'S'), ival_B_friday) - self.assertEqual(ival_D_sunday.asfreq('B', 'E'), ival_B_monday) - - self.assertEqual(ival_D.asfreq('H', 'S'), ival_D_to_H_start) - self.assertEqual(ival_D.asfreq('H', 'E'), ival_D_to_H_end) - self.assertEqual(ival_D.asfreq('Min', 'S'), ival_D_to_T_start) - self.assertEqual(ival_D.asfreq('Min', 'E'), ival_D_to_T_end) - self.assertEqual(ival_D.asfreq('S', 'S'), ival_D_to_S_start) - self.assertEqual(ival_D.asfreq('S', 'E'), ival_D_to_S_end) - - self.assertEqual(ival_D.asfreq('D'), ival_D) + assert ival_D.asfreq('A') == ival_D_to_A + + assert ival_D_end_of_quarter.asfreq('A-JAN') == ival_Deoq_to_AJAN + assert ival_D_end_of_quarter.asfreq('A-JUN') == ival_Deoq_to_AJUN + assert ival_D_end_of_quarter.asfreq('A-DEC') == ival_Deoq_to_ADEC + + assert ival_D_end_of_year.asfreq('A') == ival_D_to_A + assert ival_D_end_of_quarter.asfreq('Q') == ival_D_to_QEDEC + assert ival_D.asfreq("Q-JAN") == ival_D_to_QEJAN + assert ival_D.asfreq("Q-JUN") == ival_D_to_QEJUN + assert ival_D.asfreq("Q-DEC") == ival_D_to_QEDEC + assert ival_D.asfreq('M') == ival_D_to_M + assert ival_D_end_of_month.asfreq('M') == ival_D_to_M + assert ival_D.asfreq('W') == ival_D_to_W + assert ival_D_end_of_week.asfreq('W') == ival_D_to_W + + assert ival_D_friday.asfreq('B') == ival_B_friday + assert ival_D_saturday.asfreq('B', 'S') == ival_B_friday + assert ival_D_saturday.asfreq('B', 'E') == ival_B_monday + assert ival_D_sunday.asfreq('B', 'S') == ival_B_friday + assert ival_D_sunday.asfreq('B', 'E') == ival_B_monday + + assert ival_D.asfreq('H', 'S') == ival_D_to_H_start + assert ival_D.asfreq('H', 'E') == ival_D_to_H_end + assert ival_D.asfreq('Min', 'S') == ival_D_to_T_start + assert ival_D.asfreq('Min', 'E') == ival_D_to_T_end + assert ival_D.asfreq('S', 'S') == ival_D_to_S_start + assert ival_D.asfreq('S', 'E') == ival_D_to_S_end + + assert ival_D.asfreq('D') == ival_D def test_conv_hourly(self): # frequency conversion tests: from Hourly Frequency" @@ -472,25 +468,25 @@ def test_conv_hourly(self): ival_H_to_S_end = Period(freq='S', year=2007, month=1, day=1, hour=0, minute=59, second=59) - self.assertEqual(ival_H.asfreq('A'), ival_H_to_A) - self.assertEqual(ival_H_end_of_year.asfreq('A'), ival_H_to_A) - self.assertEqual(ival_H.asfreq('Q'), ival_H_to_Q) - self.assertEqual(ival_H_end_of_quarter.asfreq('Q'), ival_H_to_Q) - self.assertEqual(ival_H.asfreq('M'), ival_H_to_M) - self.assertEqual(ival_H_end_of_month.asfreq('M'), ival_H_to_M) - self.assertEqual(ival_H.asfreq('W'), ival_H_to_W) - self.assertEqual(ival_H_end_of_week.asfreq('W'), ival_H_to_W) - self.assertEqual(ival_H.asfreq('D'), ival_H_to_D) - self.assertEqual(ival_H_end_of_day.asfreq('D'), ival_H_to_D) - self.assertEqual(ival_H.asfreq('B'), ival_H_to_B) - self.assertEqual(ival_H_end_of_bus.asfreq('B'), ival_H_to_B) - - self.assertEqual(ival_H.asfreq('Min', 'S'), ival_H_to_T_start) - self.assertEqual(ival_H.asfreq('Min', 'E'), ival_H_to_T_end) - self.assertEqual(ival_H.asfreq('S', 'S'), ival_H_to_S_start) - self.assertEqual(ival_H.asfreq('S', 'E'), ival_H_to_S_end) - - self.assertEqual(ival_H.asfreq('H'), ival_H) + assert ival_H.asfreq('A') == ival_H_to_A + assert ival_H_end_of_year.asfreq('A') == ival_H_to_A + assert ival_H.asfreq('Q') == ival_H_to_Q + assert ival_H_end_of_quarter.asfreq('Q') == ival_H_to_Q + assert ival_H.asfreq('M') == ival_H_to_M + assert ival_H_end_of_month.asfreq('M') == ival_H_to_M + assert ival_H.asfreq('W') == ival_H_to_W + assert ival_H_end_of_week.asfreq('W') == ival_H_to_W + assert ival_H.asfreq('D') == ival_H_to_D + assert ival_H_end_of_day.asfreq('D') == ival_H_to_D + assert ival_H.asfreq('B') == ival_H_to_B + assert ival_H_end_of_bus.asfreq('B') == ival_H_to_B + + assert ival_H.asfreq('Min', 'S') == ival_H_to_T_start + assert ival_H.asfreq('Min', 'E') == ival_H_to_T_end + assert ival_H.asfreq('S', 'S') == ival_H_to_S_start + assert ival_H.asfreq('S', 'E') == ival_H_to_S_end + + assert ival_H.asfreq('H') == ival_H def test_conv_minutely(self): # frequency conversion tests: from Minutely Frequency" @@ -525,25 +521,25 @@ def test_conv_minutely(self): ival_T_to_S_end = Period(freq='S', year=2007, month=1, day=1, hour=0, minute=0, second=59) - self.assertEqual(ival_T.asfreq('A'), ival_T_to_A) - self.assertEqual(ival_T_end_of_year.asfreq('A'), ival_T_to_A) - self.assertEqual(ival_T.asfreq('Q'), ival_T_to_Q) - self.assertEqual(ival_T_end_of_quarter.asfreq('Q'), ival_T_to_Q) - self.assertEqual(ival_T.asfreq('M'), ival_T_to_M) - self.assertEqual(ival_T_end_of_month.asfreq('M'), ival_T_to_M) - self.assertEqual(ival_T.asfreq('W'), ival_T_to_W) - self.assertEqual(ival_T_end_of_week.asfreq('W'), ival_T_to_W) - self.assertEqual(ival_T.asfreq('D'), ival_T_to_D) - self.assertEqual(ival_T_end_of_day.asfreq('D'), ival_T_to_D) - self.assertEqual(ival_T.asfreq('B'), ival_T_to_B) - self.assertEqual(ival_T_end_of_bus.asfreq('B'), ival_T_to_B) - self.assertEqual(ival_T.asfreq('H'), ival_T_to_H) - self.assertEqual(ival_T_end_of_hour.asfreq('H'), ival_T_to_H) - - self.assertEqual(ival_T.asfreq('S', 'S'), ival_T_to_S_start) - self.assertEqual(ival_T.asfreq('S', 'E'), ival_T_to_S_end) - - self.assertEqual(ival_T.asfreq('Min'), ival_T) + assert ival_T.asfreq('A') == ival_T_to_A + assert ival_T_end_of_year.asfreq('A') == ival_T_to_A + assert ival_T.asfreq('Q') == ival_T_to_Q + assert ival_T_end_of_quarter.asfreq('Q') == ival_T_to_Q + assert ival_T.asfreq('M') == ival_T_to_M + assert ival_T_end_of_month.asfreq('M') == ival_T_to_M + assert ival_T.asfreq('W') == ival_T_to_W + assert ival_T_end_of_week.asfreq('W') == ival_T_to_W + assert ival_T.asfreq('D') == ival_T_to_D + assert ival_T_end_of_day.asfreq('D') == ival_T_to_D + assert ival_T.asfreq('B') == ival_T_to_B + assert ival_T_end_of_bus.asfreq('B') == ival_T_to_B + assert ival_T.asfreq('H') == ival_T_to_H + assert ival_T_end_of_hour.asfreq('H') == ival_T_to_H + + assert ival_T.asfreq('S', 'S') == ival_T_to_S_start + assert ival_T.asfreq('S', 'E') == ival_T_to_S_end + + assert ival_T.asfreq('Min') == ival_T def test_conv_secondly(self): # frequency conversion tests: from Secondly Frequency" @@ -577,24 +573,24 @@ def test_conv_secondly(self): ival_S_to_T = Period(freq='Min', year=2007, month=1, day=1, hour=0, minute=0) - self.assertEqual(ival_S.asfreq('A'), ival_S_to_A) - self.assertEqual(ival_S_end_of_year.asfreq('A'), ival_S_to_A) - self.assertEqual(ival_S.asfreq('Q'), ival_S_to_Q) - self.assertEqual(ival_S_end_of_quarter.asfreq('Q'), ival_S_to_Q) - self.assertEqual(ival_S.asfreq('M'), ival_S_to_M) - self.assertEqual(ival_S_end_of_month.asfreq('M'), ival_S_to_M) - self.assertEqual(ival_S.asfreq('W'), ival_S_to_W) - self.assertEqual(ival_S_end_of_week.asfreq('W'), ival_S_to_W) - self.assertEqual(ival_S.asfreq('D'), ival_S_to_D) - self.assertEqual(ival_S_end_of_day.asfreq('D'), ival_S_to_D) - self.assertEqual(ival_S.asfreq('B'), ival_S_to_B) - self.assertEqual(ival_S_end_of_bus.asfreq('B'), ival_S_to_B) - self.assertEqual(ival_S.asfreq('H'), ival_S_to_H) - self.assertEqual(ival_S_end_of_hour.asfreq('H'), ival_S_to_H) - self.assertEqual(ival_S.asfreq('Min'), ival_S_to_T) - self.assertEqual(ival_S_end_of_minute.asfreq('Min'), ival_S_to_T) - - self.assertEqual(ival_S.asfreq('S'), ival_S) + assert ival_S.asfreq('A') == ival_S_to_A + assert ival_S_end_of_year.asfreq('A') == ival_S_to_A + assert ival_S.asfreq('Q') == ival_S_to_Q + assert ival_S_end_of_quarter.asfreq('Q') == ival_S_to_Q + assert ival_S.asfreq('M') == ival_S_to_M + assert ival_S_end_of_month.asfreq('M') == ival_S_to_M + assert ival_S.asfreq('W') == ival_S_to_W + assert ival_S_end_of_week.asfreq('W') == ival_S_to_W + assert ival_S.asfreq('D') == ival_S_to_D + assert ival_S_end_of_day.asfreq('D') == ival_S_to_D + assert ival_S.asfreq('B') == ival_S_to_B + assert ival_S_end_of_bus.asfreq('B') == ival_S_to_B + assert ival_S.asfreq('H') == ival_S_to_H + assert ival_S_end_of_hour.asfreq('H') == ival_S_to_H + assert ival_S.asfreq('Min') == ival_S_to_T + assert ival_S_end_of_minute.asfreq('Min') == ival_S_to_T + + assert ival_S.asfreq('S') == ival_S def test_asfreq_mult(self): # normal freq to mult freq @@ -604,17 +600,17 @@ def test_asfreq_mult(self): result = p.asfreq(freq) expected = Period('2007', freq='3A') - self.assertEqual(result, expected) - self.assertEqual(result.ordinal, expected.ordinal) - self.assertEqual(result.freq, expected.freq) + assert result == expected + assert result.ordinal == expected.ordinal + assert result.freq == expected.freq # ordinal will not change for freq in ['3A', offsets.YearEnd(3)]: result = p.asfreq(freq, how='S') expected = Period('2007', freq='3A') - self.assertEqual(result, expected) - self.assertEqual(result.ordinal, expected.ordinal) - self.assertEqual(result.freq, expected.freq) + assert result == expected + assert result.ordinal == expected.ordinal + assert result.freq == expected.freq # mult freq to normal freq p = Period(freq='3A', year=2007) @@ -623,49 +619,49 @@ def test_asfreq_mult(self): result = p.asfreq(freq) expected = Period('2009', freq='A') - self.assertEqual(result, expected) - self.assertEqual(result.ordinal, expected.ordinal) - self.assertEqual(result.freq, expected.freq) + assert result == expected + assert result.ordinal == expected.ordinal + assert result.freq == expected.freq # ordinal will not change for freq in ['A', offsets.YearEnd()]: result = p.asfreq(freq, how='S') expected = Period('2007', freq='A') - self.assertEqual(result, expected) - self.assertEqual(result.ordinal, expected.ordinal) - self.assertEqual(result.freq, expected.freq) + assert result == expected + assert result.ordinal == expected.ordinal + assert result.freq == expected.freq p = Period(freq='A', year=2007) for freq in ['2M', offsets.MonthEnd(2)]: result = p.asfreq(freq) expected = Period('2007-12', freq='2M') - self.assertEqual(result, expected) - self.assertEqual(result.ordinal, expected.ordinal) - self.assertEqual(result.freq, expected.freq) + assert result == expected + assert result.ordinal == expected.ordinal + assert result.freq == expected.freq for freq in ['2M', offsets.MonthEnd(2)]: result = p.asfreq(freq, how='S') expected = Period('2007-01', freq='2M') - self.assertEqual(result, expected) - self.assertEqual(result.ordinal, expected.ordinal) - self.assertEqual(result.freq, expected.freq) + assert result == expected + assert result.ordinal == expected.ordinal + assert result.freq == expected.freq p = Period(freq='3A', year=2007) for freq in ['2M', offsets.MonthEnd(2)]: result = p.asfreq(freq) expected = Period('2009-12', freq='2M') - self.assertEqual(result, expected) - self.assertEqual(result.ordinal, expected.ordinal) - self.assertEqual(result.freq, expected.freq) + assert result == expected + assert result.ordinal == expected.ordinal + assert result.freq == expected.freq for freq in ['2M', offsets.MonthEnd(2)]: result = p.asfreq(freq, how='S') expected = Period('2007-01', freq='2M') - self.assertEqual(result, expected) - self.assertEqual(result.ordinal, expected.ordinal) - self.assertEqual(result.freq, expected.freq) + assert result == expected + assert result.ordinal == expected.ordinal + assert result.freq == expected.freq def test_asfreq_combined(self): # normal freq to combined freq @@ -675,9 +671,9 @@ def test_asfreq_combined(self): expected = Period('2007', freq='25H') for freq, how in zip(['1D1H', '1H1D'], ['E', 'S']): result = p.asfreq(freq, how=how) - self.assertEqual(result, expected) - self.assertEqual(result.ordinal, expected.ordinal) - self.assertEqual(result.freq, expected.freq) + assert result == expected + assert result.ordinal == expected.ordinal + assert result.freq == expected.freq # combined freq to normal freq p1 = Period(freq='1D1H', year=2007) @@ -687,29 +683,28 @@ def test_asfreq_combined(self): result1 = p1.asfreq('H') result2 = p2.asfreq('H') expected = Period('2007-01-02', freq='H') - self.assertEqual(result1, expected) - self.assertEqual(result1.ordinal, expected.ordinal) - self.assertEqual(result1.freq, expected.freq) - self.assertEqual(result2, expected) - self.assertEqual(result2.ordinal, expected.ordinal) - self.assertEqual(result2.freq, expected.freq) + assert result1 == expected + assert result1.ordinal == expected.ordinal + assert result1.freq == expected.freq + assert result2 == expected + assert result2.ordinal == expected.ordinal + assert result2.freq == expected.freq # ordinal will not change result1 = p1.asfreq('H', how='S') result2 = p2.asfreq('H', how='S') expected = Period('2007-01-01', freq='H') - self.assertEqual(result1, expected) - self.assertEqual(result1.ordinal, expected.ordinal) - self.assertEqual(result1.freq, expected.freq) - self.assertEqual(result2, expected) - self.assertEqual(result2.ordinal, expected.ordinal) - self.assertEqual(result2.freq, expected.freq) + assert result1 == expected + assert result1.ordinal == expected.ordinal + assert result1.freq == expected.freq + assert result2 == expected + assert result2.ordinal == expected.ordinal + assert result2.freq == expected.freq def test_asfreq_MS(self): initial = Period("2013") - self.assertEqual(initial.asfreq(freq="M", how="S"), - Period('2013-01', 'M')) + assert initial.asfreq(freq="M", how="S") == Period('2013-01', 'M') msg = pd.tseries.frequencies._INVALID_FREQ_ERROR with tm.assert_raises_regex(ValueError, msg): diff --git a/pandas/tests/scalar/test_timedelta.py b/pandas/tests/scalar/test_timedelta.py index 9efd180afc2da..faddbcc84109f 100644 --- a/pandas/tests/scalar/test_timedelta.py +++ b/pandas/tests/scalar/test_timedelta.py @@ -21,22 +21,20 @@ def setUp(self): def test_construction(self): expected = np.timedelta64(10, 'D').astype('m8[ns]').view('i8') - self.assertEqual(Timedelta(10, unit='d').value, expected) - self.assertEqual(Timedelta(10.0, unit='d').value, expected) - self.assertEqual(Timedelta('10 days').value, expected) - self.assertEqual(Timedelta(days=10).value, expected) - self.assertEqual(Timedelta(days=10.0).value, expected) + assert Timedelta(10, unit='d').value == expected + assert Timedelta(10.0, unit='d').value == expected + assert Timedelta('10 days').value == expected + assert Timedelta(days=10).value == expected + assert Timedelta(days=10.0).value == expected expected += np.timedelta64(10, 's').astype('m8[ns]').view('i8') - self.assertEqual(Timedelta('10 days 00:00:10').value, expected) - self.assertEqual(Timedelta(days=10, seconds=10).value, expected) - self.assertEqual( - Timedelta(days=10, milliseconds=10 * 1000).value, expected) - self.assertEqual( - Timedelta(days=10, microseconds=10 * 1000 * 1000).value, expected) - - # test construction with np dtypes - # GH 8757 + assert Timedelta('10 days 00:00:10').value == expected + assert Timedelta(days=10, seconds=10).value == expected + assert Timedelta(days=10, milliseconds=10 * 1000).value == expected + assert (Timedelta(days=10, microseconds=10 * 1000 * 1000) + .value == expected) + + # gh-8757: test construction with np dtypes timedelta_kwargs = {'days': 'D', 'seconds': 's', 'microseconds': 'us', @@ -48,70 +46,64 @@ def test_construction(self): np.float16] for npdtype in npdtypes: for pykwarg, npkwarg in timedelta_kwargs.items(): - expected = np.timedelta64(1, - npkwarg).astype('m8[ns]').view('i8') - self.assertEqual( - Timedelta(**{pykwarg: npdtype(1)}).value, expected) + expected = np.timedelta64(1, npkwarg).astype( + 'm8[ns]').view('i8') + assert Timedelta(**{pykwarg: npdtype(1)}).value == expected # rounding cases - self.assertEqual(Timedelta(82739999850000).value, 82739999850000) + assert Timedelta(82739999850000).value == 82739999850000 assert ('0 days 22:58:59.999850' in str(Timedelta(82739999850000))) - self.assertEqual(Timedelta(123072001000000).value, 123072001000000) + assert Timedelta(123072001000000).value == 123072001000000 assert ('1 days 10:11:12.001' in str(Timedelta(123072001000000))) # string conversion with/without leading zero # GH 9570 - self.assertEqual(Timedelta('0:00:00'), timedelta(hours=0)) - self.assertEqual(Timedelta('00:00:00'), timedelta(hours=0)) - self.assertEqual(Timedelta('-1:00:00'), -timedelta(hours=1)) - self.assertEqual(Timedelta('-01:00:00'), -timedelta(hours=1)) + assert Timedelta('0:00:00') == timedelta(hours=0) + assert Timedelta('00:00:00') == timedelta(hours=0) + assert Timedelta('-1:00:00') == -timedelta(hours=1) + assert Timedelta('-01:00:00') == -timedelta(hours=1) # more strings & abbrevs # GH 8190 - self.assertEqual(Timedelta('1 h'), timedelta(hours=1)) - self.assertEqual(Timedelta('1 hour'), timedelta(hours=1)) - self.assertEqual(Timedelta('1 hr'), timedelta(hours=1)) - self.assertEqual(Timedelta('1 hours'), timedelta(hours=1)) - self.assertEqual(Timedelta('-1 hours'), -timedelta(hours=1)) - self.assertEqual(Timedelta('1 m'), timedelta(minutes=1)) - self.assertEqual(Timedelta('1.5 m'), timedelta(seconds=90)) - self.assertEqual(Timedelta('1 minute'), timedelta(minutes=1)) - self.assertEqual(Timedelta('1 minutes'), timedelta(minutes=1)) - self.assertEqual(Timedelta('1 s'), timedelta(seconds=1)) - self.assertEqual(Timedelta('1 second'), timedelta(seconds=1)) - self.assertEqual(Timedelta('1 seconds'), timedelta(seconds=1)) - self.assertEqual(Timedelta('1 ms'), timedelta(milliseconds=1)) - self.assertEqual(Timedelta('1 milli'), timedelta(milliseconds=1)) - self.assertEqual(Timedelta('1 millisecond'), timedelta(milliseconds=1)) - self.assertEqual(Timedelta('1 us'), timedelta(microseconds=1)) - self.assertEqual(Timedelta('1 micros'), timedelta(microseconds=1)) - self.assertEqual(Timedelta('1 microsecond'), timedelta(microseconds=1)) - self.assertEqual(Timedelta('1.5 microsecond'), - Timedelta('00:00:00.000001500')) - self.assertEqual(Timedelta('1 ns'), Timedelta('00:00:00.000000001')) - self.assertEqual(Timedelta('1 nano'), Timedelta('00:00:00.000000001')) - self.assertEqual(Timedelta('1 nanosecond'), - Timedelta('00:00:00.000000001')) + assert Timedelta('1 h') == timedelta(hours=1) + assert Timedelta('1 hour') == timedelta(hours=1) + assert Timedelta('1 hr') == timedelta(hours=1) + assert Timedelta('1 hours') == timedelta(hours=1) + assert Timedelta('-1 hours') == -timedelta(hours=1) + assert Timedelta('1 m') == timedelta(minutes=1) + assert Timedelta('1.5 m') == timedelta(seconds=90) + assert Timedelta('1 minute') == timedelta(minutes=1) + assert Timedelta('1 minutes') == timedelta(minutes=1) + assert Timedelta('1 s') == timedelta(seconds=1) + assert Timedelta('1 second') == timedelta(seconds=1) + assert Timedelta('1 seconds') == timedelta(seconds=1) + assert Timedelta('1 ms') == timedelta(milliseconds=1) + assert Timedelta('1 milli') == timedelta(milliseconds=1) + assert Timedelta('1 millisecond') == timedelta(milliseconds=1) + assert Timedelta('1 us') == timedelta(microseconds=1) + assert Timedelta('1 micros') == timedelta(microseconds=1) + assert Timedelta('1 microsecond') == timedelta(microseconds=1) + assert Timedelta('1.5 microsecond') == Timedelta('00:00:00.000001500') + assert Timedelta('1 ns') == Timedelta('00:00:00.000000001') + assert Timedelta('1 nano') == Timedelta('00:00:00.000000001') + assert Timedelta('1 nanosecond') == Timedelta('00:00:00.000000001') # combos - self.assertEqual(Timedelta('10 days 1 hour'), - timedelta(days=10, hours=1)) - self.assertEqual(Timedelta('10 days 1 h'), timedelta(days=10, hours=1)) - self.assertEqual(Timedelta('10 days 1 h 1m 1s'), timedelta( - days=10, hours=1, minutes=1, seconds=1)) - self.assertEqual(Timedelta('-10 days 1 h 1m 1s'), - - timedelta(days=10, hours=1, minutes=1, seconds=1)) - self.assertEqual(Timedelta('-10 days 1 h 1m 1s'), - - timedelta(days=10, hours=1, minutes=1, seconds=1)) - self.assertEqual(Timedelta('-10 days 1 h 1m 1s 3us'), - - timedelta(days=10, hours=1, minutes=1, - seconds=1, microseconds=3)) - self.assertEqual(Timedelta('-10 days 1 h 1.5m 1s 3us'), - - timedelta(days=10, hours=1, minutes=1, - seconds=31, microseconds=3)) - - # currently invalid as it has a - on the hhmmdd part (only allowed on - # the days) + assert Timedelta('10 days 1 hour') == timedelta(days=10, hours=1) + assert Timedelta('10 days 1 h') == timedelta(days=10, hours=1) + assert Timedelta('10 days 1 h 1m 1s') == timedelta( + days=10, hours=1, minutes=1, seconds=1) + assert Timedelta('-10 days 1 h 1m 1s') == -timedelta( + days=10, hours=1, minutes=1, seconds=1) + assert Timedelta('-10 days 1 h 1m 1s') == -timedelta( + days=10, hours=1, minutes=1, seconds=1) + assert Timedelta('-10 days 1 h 1m 1s 3us') == -timedelta( + days=10, hours=1, minutes=1, seconds=1, microseconds=3) + assert Timedelta('-10 days 1 h 1.5m 1s 3us'), -timedelta( + days=10, hours=1, minutes=1, seconds=31, microseconds=3) + + # Currently invalid as it has a - on the hh:mm:dd part + # (only allowed on the days) pytest.raises(ValueError, lambda: Timedelta('-10 days -1 h 1.5m 1s 3us')) @@ -139,34 +131,33 @@ def test_construction(self): '1ns', '-23:59:59.999999999']: td = Timedelta(v) - self.assertEqual(Timedelta(td.value), td) + assert Timedelta(td.value) == td # str does not normally display nanos if not td.nanoseconds: - self.assertEqual(Timedelta(str(td)), td) - self.assertEqual(Timedelta(td._repr_base(format='all')), td) + assert Timedelta(str(td)) == td + assert Timedelta(td._repr_base(format='all')) == td # floats expected = np.timedelta64( 10, 's').astype('m8[ns]').view('i8') + np.timedelta64( 500, 'ms').astype('m8[ns]').view('i8') - self.assertEqual(Timedelta(10.5, unit='s').value, expected) + assert Timedelta(10.5, unit='s').value == expected # offset - self.assertEqual(to_timedelta(pd.offsets.Hour(2)), - Timedelta('0 days, 02:00:00')) - self.assertEqual(Timedelta(pd.offsets.Hour(2)), - Timedelta('0 days, 02:00:00')) - self.assertEqual(Timedelta(pd.offsets.Second(2)), - Timedelta('0 days, 00:00:02')) - - # unicode - # GH 11995 + assert (to_timedelta(pd.offsets.Hour(2)) == + Timedelta('0 days, 02:00:00')) + assert (Timedelta(pd.offsets.Hour(2)) == + Timedelta('0 days, 02:00:00')) + assert (Timedelta(pd.offsets.Second(2)) == + Timedelta('0 days, 00:00:02')) + + # gh-11995: unicode expected = Timedelta('1H') result = pd.Timedelta(u'1H') - self.assertEqual(result, expected) - self.assertEqual(to_timedelta(pd.offsets.Hour(2)), - Timedelta(u'0 days, 02:00:00')) + assert result == expected + assert (to_timedelta(pd.offsets.Hour(2)) == + Timedelta(u'0 days, 02:00:00')) pytest.raises(ValueError, lambda: Timedelta(u'foo bar')) @@ -176,7 +167,7 @@ def test_overflow_on_construction(self): pytest.raises(OverflowError, pd.Timedelta, value) def test_total_seconds_scalar(self): - # GH 10939 + # see gh-10939 rng = Timedelta('1 days, 10:11:12.100123456') expt = 1 * 86400 + 10 * 3600 + 11 * 60 + 12 + 100123456. / 1e9 tm.assert_almost_equal(rng.total_seconds(), expt) @@ -186,14 +177,14 @@ def test_total_seconds_scalar(self): def test_repr(self): - self.assertEqual(repr(Timedelta(10, unit='d')), - "Timedelta('10 days 00:00:00')") - self.assertEqual(repr(Timedelta(10, unit='s')), - "Timedelta('0 days 00:00:10')") - self.assertEqual(repr(Timedelta(10, unit='ms')), - "Timedelta('0 days 00:00:00.010000')") - self.assertEqual(repr(Timedelta(-10, unit='ms')), - "Timedelta('-1 days +23:59:59.990000')") + assert (repr(Timedelta(10, unit='d')) == + "Timedelta('10 days 00:00:00')") + assert (repr(Timedelta(10, unit='s')) == + "Timedelta('0 days 00:00:10')") + assert (repr(Timedelta(10, unit='ms')) == + "Timedelta('0 days 00:00:00.010000')") + assert (repr(Timedelta(-10, unit='ms')) == + "Timedelta('-1 days +23:59:59.990000')") def test_conversion(self): @@ -201,14 +192,16 @@ def test_conversion(self): Timedelta('1 days, 10:11:12.012345')]: pydt = td.to_pytimedelta() assert td == Timedelta(pydt) - self.assertEqual(td, pydt) + assert td == pydt assert (isinstance(pydt, timedelta) and not isinstance( pydt, Timedelta)) - self.assertEqual(td, np.timedelta64(td.value, 'ns')) + assert td == np.timedelta64(td.value, 'ns') td64 = td.to_timedelta64() - self.assertEqual(td64, np.timedelta64(td.value, 'ns')) - self.assertEqual(td, td64) + + assert td64 == np.timedelta64(td.value, 'ns') + assert td == td64 + assert isinstance(td64, np.timedelta64) # this is NOT equal and cannot be roundtriped (because of the nanos) @@ -220,20 +213,20 @@ def test_freq_conversion(self): # truediv td = Timedelta('1 days 2 hours 3 ns') result = td / np.timedelta64(1, 'D') - self.assertEqual(result, td.value / float(86400 * 1e9)) + assert result == td.value / float(86400 * 1e9) result = td / np.timedelta64(1, 's') - self.assertEqual(result, td.value / float(1e9)) + assert result == td.value / float(1e9) result = td / np.timedelta64(1, 'ns') - self.assertEqual(result, td.value) + assert result == td.value # floordiv td = Timedelta('1 days 2 hours 3 ns') result = td // np.timedelta64(1, 'D') - self.assertEqual(result, 1) + assert result == 1 result = td // np.timedelta64(1, 's') - self.assertEqual(result, 93600) + assert result == 93600 result = td // np.timedelta64(1, 'ns') - self.assertEqual(result, td.value) + assert result == td.value def test_fields(self): def check(value): @@ -242,10 +235,10 @@ def check(value): # compat to datetime.timedelta rng = to_timedelta('1 days, 10:11:12') - self.assertEqual(rng.days, 1) - self.assertEqual(rng.seconds, 10 * 3600 + 11 * 60 + 12) - self.assertEqual(rng.microseconds, 0) - self.assertEqual(rng.nanoseconds, 0) + assert rng.days == 1 + assert rng.seconds == 10 * 3600 + 11 * 60 + 12 + assert rng.microseconds == 0 + assert rng.nanoseconds == 0 pytest.raises(AttributeError, lambda: rng.hours) pytest.raises(AttributeError, lambda: rng.minutes) @@ -258,30 +251,30 @@ def check(value): check(rng.nanoseconds) td = Timedelta('-1 days, 10:11:12') - self.assertEqual(abs(td), Timedelta('13:48:48')) + assert abs(td) == Timedelta('13:48:48') assert str(td) == "-1 days +10:11:12" - self.assertEqual(-td, Timedelta('0 days 13:48:48')) - self.assertEqual(-Timedelta('-1 days, 10:11:12').value, 49728000000000) - self.assertEqual(Timedelta('-1 days, 10:11:12').value, -49728000000000) + assert -td == Timedelta('0 days 13:48:48') + assert -Timedelta('-1 days, 10:11:12').value == 49728000000000 + assert Timedelta('-1 days, 10:11:12').value == -49728000000000 rng = to_timedelta('-1 days, 10:11:12.100123456') - self.assertEqual(rng.days, -1) - self.assertEqual(rng.seconds, 10 * 3600 + 11 * 60 + 12) - self.assertEqual(rng.microseconds, 100 * 1000 + 123) - self.assertEqual(rng.nanoseconds, 456) + assert rng.days == -1 + assert rng.seconds == 10 * 3600 + 11 * 60 + 12 + assert rng.microseconds == 100 * 1000 + 123 + assert rng.nanoseconds == 456 pytest.raises(AttributeError, lambda: rng.hours) pytest.raises(AttributeError, lambda: rng.minutes) pytest.raises(AttributeError, lambda: rng.milliseconds) # components tup = pd.to_timedelta(-1, 'us').components - self.assertEqual(tup.days, -1) - self.assertEqual(tup.hours, 23) - self.assertEqual(tup.minutes, 59) - self.assertEqual(tup.seconds, 59) - self.assertEqual(tup.milliseconds, 999) - self.assertEqual(tup.microseconds, 999) - self.assertEqual(tup.nanoseconds, 0) + assert tup.days == -1 + assert tup.hours == 23 + assert tup.minutes == 59 + assert tup.seconds == 59 + assert tup.milliseconds == 999 + assert tup.microseconds == 999 + assert tup.nanoseconds == 0 # GH 10050 check(tup.days) @@ -293,19 +286,17 @@ def check(value): check(tup.nanoseconds) tup = Timedelta('-1 days 1 us').components - self.assertEqual(tup.days, -2) - self.assertEqual(tup.hours, 23) - self.assertEqual(tup.minutes, 59) - self.assertEqual(tup.seconds, 59) - self.assertEqual(tup.milliseconds, 999) - self.assertEqual(tup.microseconds, 999) - self.assertEqual(tup.nanoseconds, 0) + assert tup.days == -2 + assert tup.hours == 23 + assert tup.minutes == 59 + assert tup.seconds == 59 + assert tup.milliseconds == 999 + assert tup.microseconds == 999 + assert tup.nanoseconds == 0 def test_nat_converters(self): - self.assertEqual(to_timedelta( - 'nat', box=False).astype('int64'), iNaT) - self.assertEqual(to_timedelta( - 'nan', box=False).astype('int64'), iNaT) + assert to_timedelta('nat', box=False).astype('int64') == iNaT + assert to_timedelta('nan', box=False).astype('int64') == iNaT def testit(unit, transform): @@ -319,7 +310,7 @@ def testit(unit, transform): result = to_timedelta(2, unit=unit) expected = Timedelta(np.timedelta64(2, transform(unit)).astype( 'timedelta64[ns]')) - self.assertEqual(result, expected) + assert result == expected # validate all units # GH 6855 @@ -340,27 +331,22 @@ def testit(unit, transform): testit('L', lambda x: 'ms') def test_numeric_conversions(self): - self.assertEqual(ct(0), np.timedelta64(0, 'ns')) - self.assertEqual(ct(10), np.timedelta64(10, 'ns')) - self.assertEqual(ct(10, unit='ns'), np.timedelta64( - 10, 'ns').astype('m8[ns]')) - - self.assertEqual(ct(10, unit='us'), np.timedelta64( - 10, 'us').astype('m8[ns]')) - self.assertEqual(ct(10, unit='ms'), np.timedelta64( - 10, 'ms').astype('m8[ns]')) - self.assertEqual(ct(10, unit='s'), np.timedelta64( - 10, 's').astype('m8[ns]')) - self.assertEqual(ct(10, unit='d'), np.timedelta64( - 10, 'D').astype('m8[ns]')) + assert ct(0) == np.timedelta64(0, 'ns') + assert ct(10) == np.timedelta64(10, 'ns') + assert ct(10, unit='ns') == np.timedelta64(10, 'ns').astype('m8[ns]') + + assert ct(10, unit='us') == np.timedelta64(10, 'us').astype('m8[ns]') + assert ct(10, unit='ms') == np.timedelta64(10, 'ms').astype('m8[ns]') + assert ct(10, unit='s') == np.timedelta64(10, 's').astype('m8[ns]') + assert ct(10, unit='d') == np.timedelta64(10, 'D').astype('m8[ns]') def test_timedelta_conversions(self): - self.assertEqual(ct(timedelta(seconds=1)), - np.timedelta64(1, 's').astype('m8[ns]')) - self.assertEqual(ct(timedelta(microseconds=1)), - np.timedelta64(1, 'us').astype('m8[ns]')) - self.assertEqual(ct(timedelta(days=1)), - np.timedelta64(1, 'D').astype('m8[ns]')) + assert (ct(timedelta(seconds=1)) == + np.timedelta64(1, 's').astype('m8[ns]')) + assert (ct(timedelta(microseconds=1)) == + np.timedelta64(1, 'us').astype('m8[ns]')) + assert (ct(timedelta(days=1)) == + np.timedelta64(1, 'D').astype('m8[ns]')) def test_round(self): @@ -387,9 +373,9 @@ def test_round(self): ('d', Timedelta('1 days'), Timedelta('-1 days'))]: r1 = t1.round(freq) - self.assertEqual(r1, s1) + assert r1 == s1 r2 = t2.round(freq) - self.assertEqual(r2, s2) + assert r2 == s2 # invalid for freq in ['Y', 'M', 'foobar']: @@ -465,43 +451,43 @@ def test_short_format_converters(self): def conv(v): return v.astype('m8[ns]') - self.assertEqual(ct('10'), np.timedelta64(10, 'ns')) - self.assertEqual(ct('10ns'), np.timedelta64(10, 'ns')) - self.assertEqual(ct('100'), np.timedelta64(100, 'ns')) - self.assertEqual(ct('100ns'), np.timedelta64(100, 'ns')) - - self.assertEqual(ct('1000'), np.timedelta64(1000, 'ns')) - self.assertEqual(ct('1000ns'), np.timedelta64(1000, 'ns')) - self.assertEqual(ct('1000NS'), np.timedelta64(1000, 'ns')) - - self.assertEqual(ct('10us'), np.timedelta64(10000, 'ns')) - self.assertEqual(ct('100us'), np.timedelta64(100000, 'ns')) - self.assertEqual(ct('1000us'), np.timedelta64(1000000, 'ns')) - self.assertEqual(ct('1000Us'), np.timedelta64(1000000, 'ns')) - self.assertEqual(ct('1000uS'), np.timedelta64(1000000, 'ns')) - - self.assertEqual(ct('1ms'), np.timedelta64(1000000, 'ns')) - self.assertEqual(ct('10ms'), np.timedelta64(10000000, 'ns')) - self.assertEqual(ct('100ms'), np.timedelta64(100000000, 'ns')) - self.assertEqual(ct('1000ms'), np.timedelta64(1000000000, 'ns')) - - self.assertEqual(ct('-1s'), -np.timedelta64(1000000000, 'ns')) - self.assertEqual(ct('1s'), np.timedelta64(1000000000, 'ns')) - self.assertEqual(ct('10s'), np.timedelta64(10000000000, 'ns')) - self.assertEqual(ct('100s'), np.timedelta64(100000000000, 'ns')) - self.assertEqual(ct('1000s'), np.timedelta64(1000000000000, 'ns')) - - self.assertEqual(ct('1d'), conv(np.timedelta64(1, 'D'))) - self.assertEqual(ct('-1d'), -conv(np.timedelta64(1, 'D'))) - self.assertEqual(ct('1D'), conv(np.timedelta64(1, 'D'))) - self.assertEqual(ct('10D'), conv(np.timedelta64(10, 'D'))) - self.assertEqual(ct('100D'), conv(np.timedelta64(100, 'D'))) - self.assertEqual(ct('1000D'), conv(np.timedelta64(1000, 'D'))) - self.assertEqual(ct('10000D'), conv(np.timedelta64(10000, 'D'))) + assert ct('10') == np.timedelta64(10, 'ns') + assert ct('10ns') == np.timedelta64(10, 'ns') + assert ct('100') == np.timedelta64(100, 'ns') + assert ct('100ns') == np.timedelta64(100, 'ns') + + assert ct('1000') == np.timedelta64(1000, 'ns') + assert ct('1000ns') == np.timedelta64(1000, 'ns') + assert ct('1000NS') == np.timedelta64(1000, 'ns') + + assert ct('10us') == np.timedelta64(10000, 'ns') + assert ct('100us') == np.timedelta64(100000, 'ns') + assert ct('1000us') == np.timedelta64(1000000, 'ns') + assert ct('1000Us') == np.timedelta64(1000000, 'ns') + assert ct('1000uS') == np.timedelta64(1000000, 'ns') + + assert ct('1ms') == np.timedelta64(1000000, 'ns') + assert ct('10ms') == np.timedelta64(10000000, 'ns') + assert ct('100ms') == np.timedelta64(100000000, 'ns') + assert ct('1000ms') == np.timedelta64(1000000000, 'ns') + + assert ct('-1s') == -np.timedelta64(1000000000, 'ns') + assert ct('1s') == np.timedelta64(1000000000, 'ns') + assert ct('10s') == np.timedelta64(10000000000, 'ns') + assert ct('100s') == np.timedelta64(100000000000, 'ns') + assert ct('1000s') == np.timedelta64(1000000000000, 'ns') + + assert ct('1d') == conv(np.timedelta64(1, 'D')) + assert ct('-1d') == -conv(np.timedelta64(1, 'D')) + assert ct('1D') == conv(np.timedelta64(1, 'D')) + assert ct('10D') == conv(np.timedelta64(10, 'D')) + assert ct('100D') == conv(np.timedelta64(100, 'D')) + assert ct('1000D') == conv(np.timedelta64(1000, 'D')) + assert ct('10000D') == conv(np.timedelta64(10000, 'D')) # space - self.assertEqual(ct(' 10000D '), conv(np.timedelta64(10000, 'D'))) - self.assertEqual(ct(' - 10000D '), -conv(np.timedelta64(10000, 'D'))) + assert ct(' 10000D ') == conv(np.timedelta64(10000, 'D')) + assert ct(' - 10000D ') == -conv(np.timedelta64(10000, 'D')) # invalid pytest.raises(ValueError, ct, '1foo') @@ -513,24 +499,22 @@ def conv(v): d1 = np.timedelta64(1, 'D') - self.assertEqual(ct('1days'), conv(d1)) - self.assertEqual(ct('1days,'), conv(d1)) - self.assertEqual(ct('- 1days,'), -conv(d1)) - - self.assertEqual(ct('00:00:01'), conv(np.timedelta64(1, 's'))) - self.assertEqual(ct('06:00:01'), conv( - np.timedelta64(6 * 3600 + 1, 's'))) - self.assertEqual(ct('06:00:01.0'), conv( - np.timedelta64(6 * 3600 + 1, 's'))) - self.assertEqual(ct('06:00:01.01'), conv( - np.timedelta64(1000 * (6 * 3600 + 1) + 10, 'ms'))) - - self.assertEqual(ct('- 1days, 00:00:01'), - conv(-d1 + np.timedelta64(1, 's'))) - self.assertEqual(ct('1days, 06:00:01'), conv( - d1 + np.timedelta64(6 * 3600 + 1, 's'))) - self.assertEqual(ct('1days, 06:00:01.01'), conv( - d1 + np.timedelta64(1000 * (6 * 3600 + 1) + 10, 'ms'))) + assert ct('1days') == conv(d1) + assert ct('1days,') == conv(d1) + assert ct('- 1days,') == -conv(d1) + + assert ct('00:00:01') == conv(np.timedelta64(1, 's')) + assert ct('06:00:01') == conv(np.timedelta64(6 * 3600 + 1, 's')) + assert ct('06:00:01.0') == conv(np.timedelta64(6 * 3600 + 1, 's')) + assert ct('06:00:01.01') == conv(np.timedelta64( + 1000 * (6 * 3600 + 1) + 10, 'ms')) + + assert (ct('- 1days, 00:00:01') == + conv(-d1 + np.timedelta64(1, 's'))) + assert (ct('1days, 06:00:01') == + conv(d1 + np.timedelta64(6 * 3600 + 1, 's'))) + assert (ct('1days, 06:00:01.01') == + conv(d1 + np.timedelta64(1000 * (6 * 3600 + 1) + 10, 'ms'))) # invalid pytest.raises(ValueError, ct, '- 1days, 00') @@ -560,16 +544,16 @@ def test_pickle(self): v = Timedelta('1 days 10:11:12.0123456') v_p = tm.round_trip_pickle(v) - self.assertEqual(v, v_p) + assert v == v_p def test_timedelta_hash_equality(self): # GH 11129 v = Timedelta(1, 'D') td = timedelta(days=1) - self.assertEqual(hash(v), hash(td)) + assert hash(v) == hash(td) d = {td: 2} - self.assertEqual(d[v], 2) + assert d[v] == 2 tds = timedelta_range('1 second', periods=20) assert all(hash(td) == hash(td.to_pytimedelta()) for td in tds) @@ -662,34 +646,34 @@ def test_isoformat(self): milliseconds=10, microseconds=10, nanoseconds=12) expected = 'P6DT0H50M3.010010012S' result = td.isoformat() - self.assertEqual(result, expected) + assert result == expected td = Timedelta(days=4, hours=12, minutes=30, seconds=5) result = td.isoformat() expected = 'P4DT12H30M5S' - self.assertEqual(result, expected) + assert result == expected td = Timedelta(nanoseconds=123) result = td.isoformat() expected = 'P0DT0H0M0.000000123S' - self.assertEqual(result, expected) + assert result == expected # trim nano td = Timedelta(microseconds=10) result = td.isoformat() expected = 'P0DT0H0M0.00001S' - self.assertEqual(result, expected) + assert result == expected # trim micro td = Timedelta(milliseconds=1) result = td.isoformat() expected = 'P0DT0H0M0.001S' - self.assertEqual(result, expected) + assert result == expected # don't strip every 0 result = Timedelta(minutes=1).isoformat() expected = 'P0DT0H1M0S' - self.assertEqual(result, expected) + assert result == expected def test_ops_error_str(self): # GH 13624 diff --git a/pandas/tests/scalar/test_timestamp.py b/pandas/tests/scalar/test_timestamp.py index 72b1e4d450b84..8a28a9a4bedd0 100644 --- a/pandas/tests/scalar/test_timestamp.py +++ b/pandas/tests/scalar/test_timestamp.py @@ -31,8 +31,8 @@ def test_constructor(self): # confirm base representation is correct import calendar - self.assertEqual(calendar.timegm(base_dt.timetuple()) * 1000000000, - base_expected) + assert (calendar.timegm(base_dt.timetuple()) * 1000000000 == + base_expected) tests = [(base_str, base_dt, base_expected), ('2014-07-01 10:00', datetime(2014, 7, 1, 10), @@ -56,32 +56,32 @@ def test_constructor(self): for date_str, date, expected in tests: for result in [Timestamp(date_str), Timestamp(date)]: # only with timestring - self.assertEqual(result.value, expected) - self.assertEqual(tslib.pydt_to_i8(result), expected) + assert result.value == expected + assert tslib.pydt_to_i8(result) == expected # re-creation shouldn't affect to internal value result = Timestamp(result) - self.assertEqual(result.value, expected) - self.assertEqual(tslib.pydt_to_i8(result), expected) + assert result.value == expected + assert tslib.pydt_to_i8(result) == expected # with timezone for tz, offset in timezones: for result in [Timestamp(date_str, tz=tz), Timestamp(date, tz=tz)]: expected_tz = expected - offset * 3600 * 1000000000 - self.assertEqual(result.value, expected_tz) - self.assertEqual(tslib.pydt_to_i8(result), expected_tz) + assert result.value == expected_tz + assert tslib.pydt_to_i8(result) == expected_tz # should preserve tz result = Timestamp(result) - self.assertEqual(result.value, expected_tz) - self.assertEqual(tslib.pydt_to_i8(result), expected_tz) + assert result.value == expected_tz + assert tslib.pydt_to_i8(result) == expected_tz # should convert to UTC result = Timestamp(result, tz='UTC') expected_utc = expected - offset * 3600 * 1000000000 - self.assertEqual(result.value, expected_utc) - self.assertEqual(tslib.pydt_to_i8(result), expected_utc) + assert result.value == expected_utc + assert tslib.pydt_to_i8(result) == expected_utc def test_constructor_with_stringoffset(self): # GH 7833 @@ -91,8 +91,8 @@ def test_constructor_with_stringoffset(self): # confirm base representation is correct import calendar - self.assertEqual(calendar.timegm(base_dt.timetuple()) * 1000000000, - base_expected) + assert (calendar.timegm(base_dt.timetuple()) * 1000000000 == + base_expected) tests = [(base_str, base_expected), ('2014-07-01 12:00:00+02:00', @@ -112,64 +112,64 @@ def test_constructor_with_stringoffset(self): for date_str, expected in tests: for result in [Timestamp(date_str)]: # only with timestring - self.assertEqual(result.value, expected) - self.assertEqual(tslib.pydt_to_i8(result), expected) + assert result.value == expected + assert tslib.pydt_to_i8(result) == expected # re-creation shouldn't affect to internal value result = Timestamp(result) - self.assertEqual(result.value, expected) - self.assertEqual(tslib.pydt_to_i8(result), expected) + assert result.value == expected + assert tslib.pydt_to_i8(result) == expected # with timezone for tz, offset in timezones: result = Timestamp(date_str, tz=tz) expected_tz = expected - self.assertEqual(result.value, expected_tz) - self.assertEqual(tslib.pydt_to_i8(result), expected_tz) + assert result.value == expected_tz + assert tslib.pydt_to_i8(result) == expected_tz # should preserve tz result = Timestamp(result) - self.assertEqual(result.value, expected_tz) - self.assertEqual(tslib.pydt_to_i8(result), expected_tz) + assert result.value == expected_tz + assert tslib.pydt_to_i8(result) == expected_tz # should convert to UTC result = Timestamp(result, tz='UTC') expected_utc = expected - self.assertEqual(result.value, expected_utc) - self.assertEqual(tslib.pydt_to_i8(result), expected_utc) + assert result.value == expected_utc + assert tslib.pydt_to_i8(result) == expected_utc # This should be 2013-11-01 05:00 in UTC # converted to Chicago tz result = Timestamp('2013-11-01 00:00:00-0500', tz='America/Chicago') - self.assertEqual(result.value, Timestamp('2013-11-01 05:00').value) + assert result.value == Timestamp('2013-11-01 05:00').value expected = "Timestamp('2013-11-01 00:00:00-0500', tz='America/Chicago')" # noqa - self.assertEqual(repr(result), expected) - self.assertEqual(result, eval(repr(result))) + assert repr(result) == expected + assert result == eval(repr(result)) # This should be 2013-11-01 05:00 in UTC # converted to Tokyo tz (+09:00) result = Timestamp('2013-11-01 00:00:00-0500', tz='Asia/Tokyo') - self.assertEqual(result.value, Timestamp('2013-11-01 05:00').value) + assert result.value == Timestamp('2013-11-01 05:00').value expected = "Timestamp('2013-11-01 14:00:00+0900', tz='Asia/Tokyo')" - self.assertEqual(repr(result), expected) - self.assertEqual(result, eval(repr(result))) + assert repr(result) == expected + assert result == eval(repr(result)) # GH11708 # This should be 2015-11-18 10:00 in UTC # converted to Asia/Katmandu result = Timestamp("2015-11-18 15:45:00+05:45", tz="Asia/Katmandu") - self.assertEqual(result.value, Timestamp("2015-11-18 10:00").value) + assert result.value == Timestamp("2015-11-18 10:00").value expected = "Timestamp('2015-11-18 15:45:00+0545', tz='Asia/Katmandu')" - self.assertEqual(repr(result), expected) - self.assertEqual(result, eval(repr(result))) + assert repr(result) == expected + assert result == eval(repr(result)) # This should be 2015-11-18 10:00 in UTC # converted to Asia/Kolkata result = Timestamp("2015-11-18 15:30:00+05:30", tz="Asia/Kolkata") - self.assertEqual(result.value, Timestamp("2015-11-18 10:00").value) + assert result.value == Timestamp("2015-11-18 10:00").value expected = "Timestamp('2015-11-18 15:30:00+0530', tz='Asia/Kolkata')" - self.assertEqual(repr(result), expected) - self.assertEqual(result, eval(repr(result))) + assert repr(result) == expected + assert result == eval(repr(result)) def test_constructor_invalid(self): with tm.assert_raises_regex(TypeError, 'Cannot convert input'): @@ -178,7 +178,7 @@ def test_constructor_invalid(self): Timestamp(Period('1000-01-01')) def test_constructor_positional(self): - # GH 10758 + # see gh-10758 with pytest.raises(TypeError): Timestamp(2000, 1) with pytest.raises(ValueError): @@ -190,14 +190,11 @@ def test_constructor_positional(self): with pytest.raises(ValueError): Timestamp(2000, 1, 32) - # GH 11630 - self.assertEqual( - repr(Timestamp(2015, 11, 12)), - repr(Timestamp('20151112'))) - - self.assertEqual( - repr(Timestamp(2015, 11, 12, 1, 2, 3, 999999)), - repr(Timestamp('2015-11-12 01:02:03.999999'))) + # see gh-11630 + assert (repr(Timestamp(2015, 11, 12)) == + repr(Timestamp('20151112'))) + assert (repr(Timestamp(2015, 11, 12, 1, 2, 3, 999999)) == + repr(Timestamp('2015-11-12 01:02:03.999999'))) def test_constructor_keyword(self): # GH 10758 @@ -212,37 +209,35 @@ def test_constructor_keyword(self): with pytest.raises(ValueError): Timestamp(year=2000, month=1, day=32) - self.assertEqual( - repr(Timestamp(year=2015, month=11, day=12)), - repr(Timestamp('20151112'))) + assert (repr(Timestamp(year=2015, month=11, day=12)) == + repr(Timestamp('20151112'))) - self.assertEqual( - repr(Timestamp(year=2015, month=11, day=12, - hour=1, minute=2, second=3, microsecond=999999)), - repr(Timestamp('2015-11-12 01:02:03.999999'))) + assert (repr(Timestamp(year=2015, month=11, day=12, hour=1, minute=2, + second=3, microsecond=999999)) == + repr(Timestamp('2015-11-12 01:02:03.999999'))) def test_constructor_fromordinal(self): base = datetime(2000, 1, 1) ts = Timestamp.fromordinal(base.toordinal(), freq='D') - self.assertEqual(base, ts) - self.assertEqual(ts.freq, 'D') - self.assertEqual(base.toordinal(), ts.toordinal()) + assert base == ts + assert ts.freq == 'D' + assert base.toordinal() == ts.toordinal() ts = Timestamp.fromordinal(base.toordinal(), tz='US/Eastern') - self.assertEqual(Timestamp('2000-01-01', tz='US/Eastern'), ts) - self.assertEqual(base.toordinal(), ts.toordinal()) + assert Timestamp('2000-01-01', tz='US/Eastern') == ts + assert base.toordinal() == ts.toordinal() def test_constructor_offset_depr(self): - # GH 12160 + # see gh-12160 with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): ts = Timestamp('2011-01-01', offset='D') - self.assertEqual(ts.freq, 'D') + assert ts.freq == 'D' with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): - self.assertEqual(ts.offset, 'D') + assert ts.offset == 'D' msg = "Can only specify freq or offset, not both" with tm.assert_raises_regex(TypeError, msg): @@ -255,9 +250,9 @@ def test_constructor_offset_depr_fromordinal(self): with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): ts = Timestamp.fromordinal(base.toordinal(), offset='D') - self.assertEqual(Timestamp('2000-01-01'), ts) - self.assertEqual(ts.freq, 'D') - self.assertEqual(base.toordinal(), ts.toordinal()) + assert Timestamp('2000-01-01') == ts + assert ts.freq == 'D' + assert base.toordinal() == ts.toordinal() msg = "Can only specify freq or offset, not both" with tm.assert_raises_regex(TypeError, msg): @@ -269,14 +264,14 @@ def test_conversion(self): result = ts.to_pydatetime() expected = datetime(2000, 1, 1) - self.assertEqual(result, expected) - self.assertEqual(type(result), type(expected)) + assert result == expected + assert type(result) == type(expected) result = ts.to_datetime64() expected = np.datetime64(ts.value, 'ns') - self.assertEqual(result, expected) - self.assertEqual(type(result), type(expected)) - self.assertEqual(result.dtype, expected.dtype) + assert result == expected + assert type(result) == type(expected) + assert result.dtype == expected.dtype def test_repr(self): tm._skip_if_no_pytz() @@ -365,20 +360,20 @@ def test_tz(self): t = '2014-02-01 09:00' ts = Timestamp(t) local = ts.tz_localize('Asia/Tokyo') - self.assertEqual(local.hour, 9) - self.assertEqual(local, Timestamp(t, tz='Asia/Tokyo')) + assert local.hour == 9 + assert local == Timestamp(t, tz='Asia/Tokyo') conv = local.tz_convert('US/Eastern') - self.assertEqual(conv, Timestamp('2014-01-31 19:00', tz='US/Eastern')) - self.assertEqual(conv.hour, 19) + assert conv == Timestamp('2014-01-31 19:00', tz='US/Eastern') + assert conv.hour == 19 # preserves nanosecond ts = Timestamp(t) + offsets.Nano(5) local = ts.tz_localize('Asia/Tokyo') - self.assertEqual(local.hour, 9) - self.assertEqual(local.nanosecond, 5) + assert local.hour == 9 + assert local.nanosecond == 5 conv = local.tz_convert('US/Eastern') - self.assertEqual(conv.nanosecond, 5) - self.assertEqual(conv.hour, 19) + assert conv.nanosecond == 5 + assert conv.hour == 19 def test_tz_localize_ambiguous(self): @@ -387,8 +382,8 @@ def test_tz_localize_ambiguous(self): ts_no_dst = ts.tz_localize('US/Eastern', ambiguous=False) rng = date_range('2014-11-02', periods=3, freq='H', tz='US/Eastern') - self.assertEqual(rng[1], ts_dst) - self.assertEqual(rng[2], ts_no_dst) + assert rng[1] == ts_dst + assert rng[2] == ts_no_dst pytest.raises(ValueError, ts.tz_localize, 'US/Eastern', ambiguous='infer') @@ -431,13 +426,13 @@ def test_tz_localize_roundtrip(self): '2014-11-01 17:00', '2014-11-05 00:00']: ts = Timestamp(t) localized = ts.tz_localize(tz) - self.assertEqual(localized, Timestamp(t, tz=tz)) + assert localized == Timestamp(t, tz=tz) with pytest.raises(TypeError): localized.tz_localize(tz) reset = localized.tz_localize(None) - self.assertEqual(reset, ts) + assert reset == ts assert reset.tzinfo is None def test_tz_convert_roundtrip(self): @@ -448,10 +443,9 @@ def test_tz_convert_roundtrip(self): converted = ts.tz_convert(tz) reset = converted.tz_convert(None) - self.assertEqual(reset, Timestamp(t)) + assert reset == Timestamp(t) assert reset.tzinfo is None - self.assertEqual(reset, - converted.tz_convert('UTC').tz_localize(None)) + assert reset == converted.tz_convert('UTC').tz_localize(None) def test_barely_oob_dts(self): one_us = np.timedelta64(1).astype('timedelta64[us]') @@ -472,8 +466,7 @@ def test_barely_oob_dts(self): pytest.raises(ValueError, Timestamp, max_ts_us + one_us) def test_utc_z_designator(self): - self.assertEqual(get_timezone( - Timestamp('2014-11-02 01:00Z').tzinfo), 'UTC') + assert get_timezone(Timestamp('2014-11-02 01:00Z').tzinfo) == 'UTC' def test_now(self): # #9000 @@ -513,18 +506,20 @@ def test_today(self): def test_asm8(self): np.random.seed(7960929) - ns = [Timestamp.min.value, Timestamp.max.value, 1000, ] + ns = [Timestamp.min.value, Timestamp.max.value, 1000] + for n in ns: - self.assertEqual(Timestamp(n).asm8.view('i8'), - np.datetime64(n, 'ns').view('i8'), n) - self.assertEqual(Timestamp('nat').asm8.view('i8'), - np.datetime64('nat', 'ns').view('i8')) + assert (Timestamp(n).asm8.view('i8') == + np.datetime64(n, 'ns').view('i8') == n) + + assert (Timestamp('nat').asm8.view('i8') == + np.datetime64('nat', 'ns').view('i8')) def test_fields(self): def check(value, equal): # that we are int/long like assert isinstance(value, (int, compat.long)) - self.assertEqual(value, equal) + assert value == equal # GH 10050 ts = Timestamp('2015-05-10 09:06:03.000100001') @@ -587,7 +582,7 @@ def test_pprint(self): {'w': {'a': Timestamp('2011-01-01 00:00:00')}}, {'w': {'a': Timestamp('2011-01-01 00:00:00')}}], 'foo': 1}""" - self.assertEqual(result, expected) + assert result == expected def to_datetime_depr(self): # see gh-8254 @@ -597,7 +592,7 @@ def to_datetime_depr(self): check_stacklevel=False): expected = datetime(2011, 1, 1) result = ts.to_datetime() - self.assertEqual(result, expected) + assert result == expected def to_pydatetime_nonzero_nano(self): ts = Timestamp('2011-01-01 9:00:00.123456789') @@ -607,7 +602,7 @@ def to_pydatetime_nonzero_nano(self): check_stacklevel=False): expected = datetime(2011, 1, 1, 9, 0, 0, 123456) result = ts.to_pydatetime() - self.assertEqual(result, expected) + assert result == expected def test_round(self): @@ -615,27 +610,27 @@ def test_round(self): dt = Timestamp('20130101 09:10:11') result = dt.round('D') expected = Timestamp('20130101') - self.assertEqual(result, expected) + assert result == expected dt = Timestamp('20130101 19:10:11') result = dt.round('D') expected = Timestamp('20130102') - self.assertEqual(result, expected) + assert result == expected dt = Timestamp('20130201 12:00:00') result = dt.round('D') expected = Timestamp('20130202') - self.assertEqual(result, expected) + assert result == expected dt = Timestamp('20130104 12:00:00') result = dt.round('D') expected = Timestamp('20130105') - self.assertEqual(result, expected) + assert result == expected dt = Timestamp('20130104 12:32:00') result = dt.round('30Min') expected = Timestamp('20130104 12:30:00') - self.assertEqual(result, expected) + assert result == expected dti = date_range('20130101 09:10:11', periods=5) result = dti.round('D') @@ -646,23 +641,23 @@ def test_round(self): dt = Timestamp('20130101 09:10:11') result = dt.floor('D') expected = Timestamp('20130101') - self.assertEqual(result, expected) + assert result == expected # ceil dt = Timestamp('20130101 09:10:11') result = dt.ceil('D') expected = Timestamp('20130102') - self.assertEqual(result, expected) + assert result == expected # round with tz dt = Timestamp('20130101 09:10:11', tz='US/Eastern') result = dt.round('D') expected = Timestamp('20130101', tz='US/Eastern') - self.assertEqual(result, expected) + assert result == expected dt = Timestamp('20130101 09:10:11', tz='US/Eastern') result = dt.round('s') - self.assertEqual(result, dt) + assert result == dt dti = date_range('20130101 09:10:11', periods=5).tz_localize('UTC').tz_convert('US/Eastern') @@ -680,19 +675,19 @@ def test_round(self): # GH 14440 & 15578 result = Timestamp('2016-10-17 12:00:00.0015').round('ms') expected = Timestamp('2016-10-17 12:00:00.002000') - self.assertEqual(result, expected) + assert result == expected result = Timestamp('2016-10-17 12:00:00.00149').round('ms') expected = Timestamp('2016-10-17 12:00:00.001000') - self.assertEqual(result, expected) + assert result == expected ts = Timestamp('2016-10-17 12:00:00.0015') for freq in ['us', 'ns']: - self.assertEqual(ts, ts.round(freq)) + assert ts == ts.round(freq) result = Timestamp('2016-10-17 12:00:00.001501031').round('10ns') expected = Timestamp('2016-10-17 12:00:00.001501030') - self.assertEqual(result, expected) + assert result == expected with tm.assert_produces_warning(): Timestamp('2016-10-17 12:00:00.001501031').round('1010ns') @@ -702,7 +697,7 @@ def test_round_misc(self): def _check_round(freq, expected): result = stamp.round(freq=freq) - self.assertEqual(result, expected) + assert result == expected for freq, expected in [('D', Timestamp('2000-01-05 00:00:00')), ('H', Timestamp('2000-01-05 05:00:00')), @@ -718,8 +713,8 @@ def test_class_ops_pytz(self): from pytz import timezone def compare(x, y): - self.assertEqual(int(Timestamp(x).value / 1e9), - int(Timestamp(y).value / 1e9)) + assert (int(Timestamp(x).value / 1e9) == + int(Timestamp(y).value / 1e9)) compare(Timestamp.now(), datetime.now()) compare(Timestamp.now('UTC'), datetime.now(timezone('UTC'))) @@ -741,8 +736,8 @@ def test_class_ops_dateutil(self): from dateutil.tz import tzutc def compare(x, y): - self.assertEqual(int(np.round(Timestamp(x).value / 1e9)), - int(np.round(Timestamp(y).value / 1e9))) + assert (int(np.round(Timestamp(x).value / 1e9)) == + int(np.round(Timestamp(y).value / 1e9))) compare(Timestamp.now(), datetime.now()) compare(Timestamp.now('UTC'), datetime.now(tzutc())) @@ -762,37 +757,37 @@ def compare(x, y): def test_basics_nanos(self): val = np.int64(946684800000000000).view('M8[ns]') stamp = Timestamp(val.view('i8') + 500) - self.assertEqual(stamp.year, 2000) - self.assertEqual(stamp.month, 1) - self.assertEqual(stamp.microsecond, 0) - self.assertEqual(stamp.nanosecond, 500) + assert stamp.year == 2000 + assert stamp.month == 1 + assert stamp.microsecond == 0 + assert stamp.nanosecond == 500 # GH 14415 val = np.iinfo(np.int64).min + 80000000000000 stamp = Timestamp(val) - self.assertEqual(stamp.year, 1677) - self.assertEqual(stamp.month, 9) - self.assertEqual(stamp.day, 21) - self.assertEqual(stamp.microsecond, 145224) - self.assertEqual(stamp.nanosecond, 192) + assert stamp.year == 1677 + assert stamp.month == 9 + assert stamp.day == 21 + assert stamp.microsecond == 145224 + assert stamp.nanosecond == 192 def test_unit(self): def check(val, unit=None, h=1, s=1, us=0): stamp = Timestamp(val, unit=unit) - self.assertEqual(stamp.year, 2000) - self.assertEqual(stamp.month, 1) - self.assertEqual(stamp.day, 1) - self.assertEqual(stamp.hour, h) + assert stamp.year == 2000 + assert stamp.month == 1 + assert stamp.day == 1 + assert stamp.hour == h if unit != 'D': - self.assertEqual(stamp.minute, 1) - self.assertEqual(stamp.second, s) - self.assertEqual(stamp.microsecond, us) + assert stamp.minute == 1 + assert stamp.second == s + assert stamp.microsecond == us else: - self.assertEqual(stamp.minute, 0) - self.assertEqual(stamp.second, 0) - self.assertEqual(stamp.microsecond, 0) - self.assertEqual(stamp.nanosecond, 0) + assert stamp.minute == 0 + assert stamp.second == 0 + assert stamp.microsecond == 0 + assert stamp.nanosecond == 0 ts = Timestamp('20000101 01:01:01') val = ts.value @@ -835,25 +830,25 @@ def test_roundtrip(self): base = Timestamp('20140101 00:00:00') result = Timestamp(base.value + Timedelta('5ms').value) - self.assertEqual(result, Timestamp(str(base) + ".005000")) - self.assertEqual(result.microsecond, 5000) + assert result == Timestamp(str(base) + ".005000") + assert result.microsecond == 5000 result = Timestamp(base.value + Timedelta('5us').value) - self.assertEqual(result, Timestamp(str(base) + ".000005")) - self.assertEqual(result.microsecond, 5) + assert result == Timestamp(str(base) + ".000005") + assert result.microsecond == 5 result = Timestamp(base.value + Timedelta('5ns').value) - self.assertEqual(result, Timestamp(str(base) + ".000000005")) - self.assertEqual(result.nanosecond, 5) - self.assertEqual(result.microsecond, 0) + assert result == Timestamp(str(base) + ".000000005") + assert result.nanosecond == 5 + assert result.microsecond == 0 result = Timestamp(base.value + Timedelta('6ms 5us').value) - self.assertEqual(result, Timestamp(str(base) + ".006005")) - self.assertEqual(result.microsecond, 5 + 6 * 1000) + assert result == Timestamp(str(base) + ".006005") + assert result.microsecond == 5 + 6 * 1000 result = Timestamp(base.value + Timedelta('200ms 5us').value) - self.assertEqual(result, Timestamp(str(base) + ".200005")) - self.assertEqual(result.microsecond, 5 + 200 * 1000) + assert result == Timestamp(str(base) + ".200005") + assert result.microsecond == 5 + 200 * 1000 def test_comparison(self): # 5-18-2012 00:00:00.000 @@ -861,7 +856,7 @@ def test_comparison(self): val = Timestamp(stamp) - self.assertEqual(val, val) + assert val == val assert not val != val assert not val < val assert val <= val @@ -869,7 +864,7 @@ def test_comparison(self): assert val >= val other = datetime(2012, 5, 18) - self.assertEqual(val, other) + assert val == other assert not val != other assert not val < other assert val <= other @@ -986,26 +981,26 @@ def test_cant_compare_tz_naive_w_aware_dateutil(self): def test_delta_preserve_nanos(self): val = Timestamp(long(1337299200000000123)) result = val + timedelta(1) - self.assertEqual(result.nanosecond, val.nanosecond) + assert result.nanosecond == val.nanosecond def test_frequency_misc(self): - self.assertEqual(frequencies.get_freq_group('T'), - frequencies.FreqGroup.FR_MIN) + assert (frequencies.get_freq_group('T') == + frequencies.FreqGroup.FR_MIN) code, stride = frequencies.get_freq_code(offsets.Hour()) - self.assertEqual(code, frequencies.FreqGroup.FR_HR) + assert code == frequencies.FreqGroup.FR_HR code, stride = frequencies.get_freq_code((5, 'T')) - self.assertEqual(code, frequencies.FreqGroup.FR_MIN) - self.assertEqual(stride, 5) + assert code == frequencies.FreqGroup.FR_MIN + assert stride == 5 offset = offsets.Hour() result = frequencies.to_offset(offset) - self.assertEqual(result, offset) + assert result == offset result = frequencies.to_offset((5, 'T')) expected = offsets.Minute(5) - self.assertEqual(result, expected) + assert result == expected pytest.raises(ValueError, frequencies.get_freq_code, (5, 'baz')) @@ -1015,12 +1010,12 @@ def test_frequency_misc(self): with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = frequencies.get_standard_freq(offsets.Hour()) - self.assertEqual(result, 'H') + assert result == 'H' def test_hash_equivalent(self): d = {datetime(2011, 1, 1): 5} stamp = Timestamp(datetime(2011, 1, 1)) - self.assertEqual(d[stamp], 5) + assert d[stamp] == 5 def test_timestamp_compare_scalars(self): # case where ndim == 0 @@ -1041,11 +1036,11 @@ def test_timestamp_compare_scalars(self): expected = left_f(lhs, rhs) result = right_f(rhs, lhs) - self.assertEqual(result, expected) + assert result == expected expected = left_f(rhs, nat) result = right_f(nat, rhs) - self.assertEqual(result, expected) + assert result == expected def test_timestamp_compare_series(self): # make sure we can compare Timestamps on the right AND left hand side @@ -1108,7 +1103,7 @@ def assert_ns_timedelta(self, modified_timestamp, expected_value): value = self.timestamp.value modified_value = modified_timestamp.value - self.assertEqual(modified_value - value, expected_value) + assert modified_value - value == expected_value def test_timedelta_ns_arithmetic(self): self.assert_ns_timedelta(self.timestamp + np.timedelta64(-123, 'ns'), @@ -1131,68 +1126,68 @@ def test_nanosecond_string_parsing(self): # GH 7878 expected_repr = '2013-05-01 07:15:45.123456789' expected_value = 1367392545123456789 - self.assertEqual(ts.value, expected_value) + assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp('2013-05-01 07:15:45.123456789+09:00', tz='Asia/Tokyo') - self.assertEqual(ts.value, expected_value - 9 * 3600 * 1000000000) + assert ts.value == expected_value - 9 * 3600 * 1000000000 assert expected_repr in repr(ts) ts = Timestamp('2013-05-01 07:15:45.123456789', tz='UTC') - self.assertEqual(ts.value, expected_value) + assert ts.value == expected_value assert expected_repr in repr(ts) ts = Timestamp('2013-05-01 07:15:45.123456789', tz='US/Eastern') - self.assertEqual(ts.value, expected_value + 4 * 3600 * 1000000000) + assert ts.value == expected_value + 4 * 3600 * 1000000000 assert expected_repr in repr(ts) # GH 10041 ts = Timestamp('20130501T071545.123456789') - self.assertEqual(ts.value, expected_value) + assert ts.value == expected_value assert expected_repr in repr(ts) def test_nanosecond_timestamp(self): # GH 7610 expected = 1293840000000000005 t = Timestamp('2011-01-01') + offsets.Nano(5) - self.assertEqual(repr(t), "Timestamp('2011-01-01 00:00:00.000000005')") - self.assertEqual(t.value, expected) - self.assertEqual(t.nanosecond, 5) + assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" + assert t.value == expected + assert t.nanosecond == 5 t = Timestamp(t) - self.assertEqual(repr(t), "Timestamp('2011-01-01 00:00:00.000000005')") - self.assertEqual(t.value, expected) - self.assertEqual(t.nanosecond, 5) + assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" + assert t.value == expected + assert t.nanosecond == 5 t = Timestamp(np_datetime64_compat('2011-01-01 00:00:00.000000005Z')) - self.assertEqual(repr(t), "Timestamp('2011-01-01 00:00:00.000000005')") - self.assertEqual(t.value, expected) - self.assertEqual(t.nanosecond, 5) + assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000005')" + assert t.value == expected + assert t.nanosecond == 5 expected = 1293840000000000010 t = t + offsets.Nano(5) - self.assertEqual(repr(t), "Timestamp('2011-01-01 00:00:00.000000010')") - self.assertEqual(t.value, expected) - self.assertEqual(t.nanosecond, 10) + assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" + assert t.value == expected + assert t.nanosecond == 10 t = Timestamp(t) - self.assertEqual(repr(t), "Timestamp('2011-01-01 00:00:00.000000010')") - self.assertEqual(t.value, expected) - self.assertEqual(t.nanosecond, 10) + assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" + assert t.value == expected + assert t.nanosecond == 10 t = Timestamp(np_datetime64_compat('2011-01-01 00:00:00.000000010Z')) - self.assertEqual(repr(t), "Timestamp('2011-01-01 00:00:00.000000010')") - self.assertEqual(t.value, expected) - self.assertEqual(t.nanosecond, 10) + assert repr(t) == "Timestamp('2011-01-01 00:00:00.000000010')" + assert t.value == expected + assert t.nanosecond == 10 class TestTimestampOps(tm.TestCase): def test_timestamp_and_datetime(self): - self.assertEqual((Timestamp(datetime( - 2013, 10, 13)) - datetime(2013, 10, 12)).days, 1) - self.assertEqual((datetime(2013, 10, 12) - - Timestamp(datetime(2013, 10, 13))).days, -1) + assert ((Timestamp(datetime(2013, 10, 13)) - + datetime(2013, 10, 12)).days == 1) + assert ((datetime(2013, 10, 12) - + Timestamp(datetime(2013, 10, 13))).days == -1) def test_timestamp_and_series(self): timestamp_series = Series(date_range('2014-03-17', periods=2, freq='D', @@ -1213,42 +1208,36 @@ def test_addition_subtraction_types(self): timestamp_instance = date_range(datetime_instance, periods=1, freq='D')[0] - self.assertEqual(type(timestamp_instance + 1), Timestamp) - self.assertEqual(type(timestamp_instance - 1), Timestamp) + assert type(timestamp_instance + 1) == Timestamp + assert type(timestamp_instance - 1) == Timestamp # Timestamp + datetime not supported, though subtraction is supported # and yields timedelta more tests in tseries/base/tests/test_base.py - self.assertEqual( - type(timestamp_instance - datetime_instance), Timedelta) - self.assertEqual( - type(timestamp_instance + timedelta_instance), Timestamp) - self.assertEqual( - type(timestamp_instance - timedelta_instance), Timestamp) + assert type(timestamp_instance - datetime_instance) == Timedelta + assert type(timestamp_instance + timedelta_instance) == Timestamp + assert type(timestamp_instance - timedelta_instance) == Timestamp # Timestamp +/- datetime64 not supported, so not tested (could possibly # assert error raised?) timedelta64_instance = np.timedelta64(1, 'D') - self.assertEqual( - type(timestamp_instance + timedelta64_instance), Timestamp) - self.assertEqual( - type(timestamp_instance - timedelta64_instance), Timestamp) + assert type(timestamp_instance + timedelta64_instance) == Timestamp + assert type(timestamp_instance - timedelta64_instance) == Timestamp def test_addition_subtraction_preserve_frequency(self): timestamp_instance = date_range('2014-03-05', periods=1, freq='D')[0] timedelta_instance = timedelta(days=1) original_freq = timestamp_instance.freq - self.assertEqual((timestamp_instance + 1).freq, original_freq) - self.assertEqual((timestamp_instance - 1).freq, original_freq) - self.assertEqual( - (timestamp_instance + timedelta_instance).freq, original_freq) - self.assertEqual( - (timestamp_instance - timedelta_instance).freq, original_freq) + + assert (timestamp_instance + 1).freq == original_freq + assert (timestamp_instance - 1).freq == original_freq + assert (timestamp_instance + timedelta_instance).freq == original_freq + assert (timestamp_instance - timedelta_instance).freq == original_freq timedelta64_instance = np.timedelta64(1, 'D') - self.assertEqual( - (timestamp_instance + timedelta64_instance).freq, original_freq) - self.assertEqual( - (timestamp_instance - timedelta64_instance).freq, original_freq) + assert (timestamp_instance + + timedelta64_instance).freq == original_freq + assert (timestamp_instance - + timedelta64_instance).freq == original_freq def test_resolution(self): @@ -1264,30 +1253,30 @@ def test_resolution(self): idx = date_range(start='2013-04-01', periods=30, freq=freq, tz=tz) result = period.resolution(idx.asi8, idx.tz) - self.assertEqual(result, expected) + assert result == expected class TestTimestampToJulianDate(tm.TestCase): def test_compare_1700(self): r = Timestamp('1700-06-23').to_julian_date() - self.assertEqual(r, 2342145.5) + assert r == 2342145.5 def test_compare_2000(self): r = Timestamp('2000-04-12').to_julian_date() - self.assertEqual(r, 2451646.5) + assert r == 2451646.5 def test_compare_2100(self): r = Timestamp('2100-08-12').to_julian_date() - self.assertEqual(r, 2488292.5) + assert r == 2488292.5 def test_compare_hour01(self): r = Timestamp('2000-08-12T01:00:00').to_julian_date() - self.assertEqual(r, 2451768.5416666666666666) + assert r == 2451768.5416666666666666 def test_compare_hour13(self): r = Timestamp('2000-08-12T13:00:00').to_julian_date() - self.assertEqual(r, 2451769.0416666666666666) + assert r == 2451769.0416666666666666 class TestTimeSeries(tm.TestCase): @@ -1298,8 +1287,8 @@ def test_timestamp_to_datetime(self): stamp = rng[0] dtval = stamp.to_pydatetime() - self.assertEqual(stamp, dtval) - self.assertEqual(stamp.tzinfo, dtval.tzinfo) + assert stamp == dtval + assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_dateutil(self): tm._skip_if_no_pytz() @@ -1307,8 +1296,8 @@ def test_timestamp_to_datetime_dateutil(self): stamp = rng[0] dtval = stamp.to_pydatetime() - self.assertEqual(stamp, dtval) - self.assertEqual(stamp.tzinfo, dtval.tzinfo) + assert stamp == dtval + assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_explicit_pytz(self): tm._skip_if_no_pytz() @@ -1318,8 +1307,8 @@ def test_timestamp_to_datetime_explicit_pytz(self): stamp = rng[0] dtval = stamp.to_pydatetime() - self.assertEqual(stamp, dtval) - self.assertEqual(stamp.tzinfo, dtval.tzinfo) + assert stamp == dtval + assert stamp.tzinfo == dtval.tzinfo def test_timestamp_to_datetime_explicit_dateutil(self): tm._skip_if_windows_python_3() @@ -1329,8 +1318,8 @@ def test_timestamp_to_datetime_explicit_dateutil(self): stamp = rng[0] dtval = stamp.to_pydatetime() - self.assertEqual(stamp, dtval) - self.assertEqual(stamp.tzinfo, dtval.tzinfo) + assert stamp == dtval + assert stamp.tzinfo == dtval.tzinfo def test_timestamp_fields(self): # extra fields from DatetimeIndex like quarter and week @@ -1343,16 +1332,16 @@ def test_timestamp_fields(self): for f in fields: expected = getattr(idx, f)[-1] result = getattr(Timestamp(idx[-1]), f) - self.assertEqual(result, expected) + assert result == expected - self.assertEqual(idx.freq, Timestamp(idx[-1], idx.freq).freq) - self.assertEqual(idx.freqstr, Timestamp(idx[-1], idx.freq).freqstr) + assert idx.freq == Timestamp(idx[-1], idx.freq).freq + assert idx.freqstr == Timestamp(idx[-1], idx.freq).freqstr def test_timestamp_date_out_of_range(self): pytest.raises(ValueError, Timestamp, '1676-01-01') pytest.raises(ValueError, Timestamp, '2263-01-01') - # 1475 + # see gh-1475 pytest.raises(ValueError, DatetimeIndex, ['1400-01-01']) pytest.raises(ValueError, DatetimeIndex, [datetime(1400, 1, 1)]) @@ -1371,13 +1360,13 @@ def test_timestamp_from_ordinal(self): # GH 3042 dt = datetime(2011, 4, 16, 0, 0) ts = Timestamp.fromordinal(dt.toordinal()) - self.assertEqual(ts.to_pydatetime(), dt) + assert ts.to_pydatetime() == dt # with a tzinfo stamp = Timestamp('2011-4-16', tz='US/Eastern') dt_tz = stamp.to_pydatetime() ts = Timestamp.fromordinal(dt_tz.toordinal(), tz='US/Eastern') - self.assertEqual(ts.to_pydatetime(), dt_tz) + assert ts.to_pydatetime() == dt_tz def test_timestamp_compare_with_early_datetime(self): # e.g. datetime.min @@ -1461,9 +1450,9 @@ def test_dti_slicing(self): v2 = dti2[1] v3 = dti2[2] - self.assertEqual(v1, Timestamp('2/28/2005')) - self.assertEqual(v2, Timestamp('4/30/2005')) - self.assertEqual(v3, Timestamp('6/30/2005')) + assert v1 == Timestamp('2/28/2005') + assert v2 == Timestamp('4/30/2005') + assert v3 == Timestamp('6/30/2005') # don't carry freq through irregular slicing assert dti2.freq is None @@ -1473,27 +1462,27 @@ def test_woy_boundary(self): d = datetime(2013, 12, 31) result = Timestamp(d).week expected = 1 # ISO standard - self.assertEqual(result, expected) + assert result == expected d = datetime(2008, 12, 28) result = Timestamp(d).week expected = 52 # ISO standard - self.assertEqual(result, expected) + assert result == expected d = datetime(2009, 12, 31) result = Timestamp(d).week expected = 53 # ISO standard - self.assertEqual(result, expected) + assert result == expected d = datetime(2010, 1, 1) result = Timestamp(d).week expected = 53 # ISO standard - self.assertEqual(result, expected) + assert result == expected d = datetime(2010, 1, 3) result = Timestamp(d).week expected = 53 # ISO standard - self.assertEqual(result, expected) + assert result == expected result = np.array([Timestamp(datetime(*args)).week for args in [(2000, 1, 1), (2000, 1, 2), ( @@ -1516,12 +1505,10 @@ def test_to_datetime_bijective(self): # by going from nanoseconds to microseconds. exp_warning = None if Timestamp.max.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): - self.assertEqual( - Timestamp(Timestamp.max.to_pydatetime()).value / 1000, - Timestamp.max.value / 1000) + assert (Timestamp(Timestamp.max.to_pydatetime()).value / 1000 == + Timestamp.max.value / 1000) exp_warning = None if Timestamp.min.nanosecond == 0 else UserWarning with tm.assert_produces_warning(exp_warning, check_stacklevel=False): - self.assertEqual( - Timestamp(Timestamp.min.to_pydatetime()).value / 1000, - Timestamp.min.value / 1000) + assert (Timestamp(Timestamp.min.to_pydatetime()).value / 1000 == + Timestamp.min.value / 1000) diff --git a/pandas/tests/series/test_alter_axes.py b/pandas/tests/series/test_alter_axes.py index e0964fea95cc9..33a4cdb6e26c4 100644 --- a/pandas/tests/series/test_alter_axes.py +++ b/pandas/tests/series/test_alter_axes.py @@ -38,7 +38,7 @@ def test_setindex(self): def test_rename(self): renamer = lambda x: x.strftime('%Y%m%d') renamed = self.ts.rename(renamer) - self.assertEqual(renamed.index[0], renamer(self.ts.index[0])) + assert renamed.index[0] == renamer(self.ts.index[0]) # dict rename_dict = dict(zip(self.ts.index, renamed.index)) @@ -55,7 +55,7 @@ def test_rename(self): index=Index(['a', 'b', 'c', 'd'], name='name'), dtype='int64') renamed = renamer.rename({}) - self.assertEqual(renamed.index.name, renamer.index.name) + assert renamed.index.name == renamer.index.name def test_rename_by_series(self): s = Series(range(5), name='foo') @@ -68,7 +68,7 @@ def test_rename_set_name(self): s = Series(range(4), index=list('abcd')) for name in ['foo', 123, 123., datetime(2001, 11, 11), ('foo',)]: result = s.rename(name) - self.assertEqual(result.name, name) + assert result.name == name tm.assert_numpy_array_equal(result.index.values, s.index.values) assert s.name is None @@ -76,7 +76,7 @@ def test_rename_set_name_inplace(self): s = Series(range(3), index=list('abc')) for name in ['foo', 123, 123., datetime(2001, 11, 11), ('foo',)]: s.rename(name, inplace=True) - self.assertEqual(s.name, name) + assert s.name == name exp = np.array(['a', 'b', 'c'], dtype=np.object_) tm.assert_numpy_array_equal(s.index.values, exp) @@ -86,14 +86,14 @@ def test_set_name_attribute(self): s2 = Series([1, 2, 3], name='bar') for name in [7, 7., 'name', datetime(2001, 1, 1), (1,), u"\u05D0"]: s.name = name - self.assertEqual(s.name, name) + assert s.name == name s2.name = name - self.assertEqual(s2.name, name) + assert s2.name == name def test_set_name(self): s = Series([1, 2, 3]) s2 = s._set_name('foo') - self.assertEqual(s2.name, 'foo') + assert s2.name == 'foo' assert s.name is None assert s is not s2 @@ -102,7 +102,7 @@ def test_rename_inplace(self): expected = renamer(self.ts.index[0]) self.ts.rename(renamer, inplace=True) - self.assertEqual(self.ts.index[0], expected) + assert self.ts.index[0] == expected def test_set_index_makes_timeseries(self): idx = tm.makeDateIndex(10) @@ -135,7 +135,7 @@ def test_reset_index(self): [0, 1, 0, 1, 0, 1]]) s = Series(np.random.randn(6), index=index) rs = s.reset_index(level=1) - self.assertEqual(len(rs.columns), 2) + assert len(rs.columns) == 2 rs = s.reset_index(level=[0, 2], drop=True) tm.assert_index_equal(rs.index, Index(index.get_level_values(1))) diff --git a/pandas/tests/series/test_analytics.py b/pandas/tests/series/test_analytics.py index 233d71cb1d8a5..73515c47388ea 100644 --- a/pandas/tests/series/test_analytics.py +++ b/pandas/tests/series/test_analytics.py @@ -32,14 +32,14 @@ class TestSeriesAnalytics(TestData, tm.TestCase): def test_sum_zero(self): arr = np.array([]) - self.assertEqual(nanops.nansum(arr), 0) + assert nanops.nansum(arr) == 0 arr = np.empty((10, 0)) assert (nanops.nansum(arr, axis=1) == 0).all() # GH #844 s = Series([], index=[]) - self.assertEqual(s.sum(), 0) + assert s.sum() == 0 df = DataFrame(np.empty((10, 0))) assert (df.sum(1) == 0).all() @@ -58,19 +58,19 @@ def test_overflow(self): # no bottleneck result = s.sum(skipna=False) - self.assertEqual(int(result), v.sum(dtype='int64')) + assert int(result) == v.sum(dtype='int64') result = s.min(skipna=False) - self.assertEqual(int(result), 0) + assert int(result) == 0 result = s.max(skipna=False) - self.assertEqual(int(result), v[-1]) + assert int(result) == v[-1] # use bottleneck if available result = s.sum() - self.assertEqual(int(result), v.sum(dtype='int64')) + assert int(result) == v.sum(dtype='int64') result = s.min() - self.assertEqual(int(result), 0) + assert int(result) == 0 result = s.max() - self.assertEqual(int(result), v[-1]) + assert int(result) == v[-1] for dtype in ['float32', 'float64']: v = np.arange(5000000, dtype=dtype) @@ -78,7 +78,7 @@ def test_overflow(self): # no bottleneck result = s.sum(skipna=False) - self.assertEqual(result, v.sum(dtype=dtype)) + assert result == v.sum(dtype=dtype) result = s.min(skipna=False) assert np.allclose(float(result), 0.0) result = s.max(skipna=False) @@ -86,7 +86,7 @@ def test_overflow(self): # use bottleneck if available result = s.sum() - self.assertEqual(result, v.sum(dtype=dtype)) + assert result == v.sum(dtype=dtype) result = s.min() assert np.allclose(float(result), 0.0) result = s.max() @@ -284,7 +284,7 @@ def test_skew(self): assert np.isnan(s.skew()) assert np.isnan(df.skew()).all() else: - self.assertEqual(0, s.skew()) + assert 0 == s.skew() assert (df.skew() == 0).all() def test_kurt(self): @@ -310,7 +310,7 @@ def test_kurt(self): assert np.isnan(s.kurt()) assert np.isnan(df.kurt()).all() else: - self.assertEqual(0, s.kurt()) + assert 0 == s.kurt() assert (df.kurt() == 0).all() def test_describe(self): @@ -341,9 +341,9 @@ def test_argsort(self): # GH 2967 (introduced bug in 0.11-dev I think) s = Series([Timestamp('201301%02d' % (i + 1)) for i in range(5)]) - self.assertEqual(s.dtype, 'datetime64[ns]') + assert s.dtype == 'datetime64[ns]' shifted = s.shift(-1) - self.assertEqual(shifted.dtype, 'datetime64[ns]') + assert shifted.dtype == 'datetime64[ns]' assert isnull(shifted[4]) result = s.argsort() @@ -520,7 +520,7 @@ def testit(): assert nanops._USE_BOTTLENECK import bottleneck as bn # noqa assert bn.__version__ >= LooseVersion('1.0') - self.assertEqual(f(allna), 0.0) + assert f(allna) == 0.0 except: assert np.isnan(f(allna)) @@ -539,7 +539,7 @@ def testit(): s = Series(bdate_range('1/1/2000', periods=10)) res = f(s) exp = alternate(s) - self.assertEqual(res, exp) + assert res == exp # check on string data if name not in ['sum', 'min', 'max']: @@ -609,7 +609,7 @@ def test_round(self): expected = Series(np.round(self.ts.values, 2), index=self.ts.index, name='ts') assert_series_equal(result, expected) - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name def test_numpy_round(self): # See gh-12600 @@ -651,7 +651,7 @@ def test_all_any(self): # Alternative types, with implicit 'object' dtype. s = Series(['abc', True]) - self.assertEqual('abc', s.any()) # 'abc' || True => 'abc' + assert 'abc' == s.any() # 'abc' || True => 'abc' def test_all_any_params(self): # Check skipna, with implicit 'object' dtype. @@ -719,7 +719,7 @@ def test_ops_consistency_on_empty(self): # float result = Series(dtype=float).sum() - self.assertEqual(result, 0) + assert result == 0 result = Series(dtype=float).mean() assert isnull(result) @@ -729,7 +729,7 @@ def test_ops_consistency_on_empty(self): # timedelta64[ns] result = Series(dtype='m8[ns]').sum() - self.assertEqual(result, Timedelta(0)) + assert result == Timedelta(0) result = Series(dtype='m8[ns]').mean() assert result is pd.NaT @@ -827,11 +827,11 @@ def test_cov(self): assert isnull(ts1.cov(ts2, min_periods=12)) def test_count(self): - self.assertEqual(self.ts.count(), len(self.ts)) + assert self.ts.count() == len(self.ts) self.ts[::2] = np.NaN - self.assertEqual(self.ts.count(), np.isfinite(self.ts).sum()) + assert self.ts.count() == np.isfinite(self.ts).sum() mi = MultiIndex.from_arrays([list('aabbcc'), [1, 2, 2, nan, 1, 2]]) ts = Series(np.arange(len(mi)), index=mi) @@ -876,7 +876,7 @@ def test_value_counts_nunique(self): series[20:500] = np.nan series[10:20] = 5000 result = series.nunique() - self.assertEqual(result, 11) + assert result == 11 def test_unique(self): @@ -884,18 +884,18 @@ def test_unique(self): s = Series([1.2345] * 100) s[::2] = np.nan result = s.unique() - self.assertEqual(len(result), 2) + assert len(result) == 2 s = Series([1.2345] * 100, dtype='f4') s[::2] = np.nan result = s.unique() - self.assertEqual(len(result), 2) + assert len(result) == 2 # NAs in object arrays #714 s = Series(['foo'] * 100, dtype='O') s[::2] = np.nan result = s.unique() - self.assertEqual(len(result), 2) + assert len(result) == 2 # decision about None s = Series([1, 2, 3, None, None, None], dtype=object) @@ -953,11 +953,11 @@ def test_drop_duplicates(self): def test_clip(self): val = self.ts.median() - self.assertEqual(self.ts.clip_lower(val).min(), val) - self.assertEqual(self.ts.clip_upper(val).max(), val) + assert self.ts.clip_lower(val).min() == val + assert self.ts.clip_upper(val).max() == val - self.assertEqual(self.ts.clip(lower=val).min(), val) - self.assertEqual(self.ts.clip(upper=val).max(), val) + assert self.ts.clip(lower=val).min() == val + assert self.ts.clip(upper=val).max() == val result = self.ts.clip(-0.5, 0.5) expected = np.clip(self.ts, -0.5, 0.5) @@ -974,10 +974,10 @@ def test_clip_types_and_nulls(self): thresh = s[2] l = s.clip_lower(thresh) u = s.clip_upper(thresh) - self.assertEqual(l[notnull(l)].min(), thresh) - self.assertEqual(u[notnull(u)].max(), thresh) - self.assertEqual(list(isnull(s)), list(isnull(l))) - self.assertEqual(list(isnull(s)), list(isnull(u))) + assert l[notnull(l)].min() == thresh + assert u[notnull(u)].max() == thresh + assert list(isnull(s)) == list(isnull(l)) + assert list(isnull(s)) == list(isnull(u)) def test_clip_against_series(self): # GH #6966 @@ -1109,20 +1109,20 @@ def test_timedelta64_analytics(self): Timestamp('20120101') result = td.idxmin() - self.assertEqual(result, 0) + assert result == 0 result = td.idxmax() - self.assertEqual(result, 2) + assert result == 2 # GH 2982 # with NaT td[0] = np.nan result = td.idxmin() - self.assertEqual(result, 1) + assert result == 1 result = td.idxmax() - self.assertEqual(result, 2) + assert result == 2 # abs s1 = Series(date_range('20120101', periods=3)) @@ -1139,11 +1139,11 @@ def test_timedelta64_analytics(self): # max/min result = td.max() expected = Timedelta('2 days') - self.assertEqual(result, expected) + assert result == expected result = td.min() expected = Timedelta('1 days') - self.assertEqual(result, expected) + assert result == expected def test_idxmin(self): # test idxmin @@ -1153,14 +1153,14 @@ def test_idxmin(self): self.series[5:15] = np.NaN # skipna or no - self.assertEqual(self.series[self.series.idxmin()], self.series.min()) + assert self.series[self.series.idxmin()] == self.series.min() assert isnull(self.series.idxmin(skipna=False)) # no NaNs nona = self.series.dropna() - self.assertEqual(nona[nona.idxmin()], nona.min()) - self.assertEqual(nona.index.values.tolist().index(nona.idxmin()), - nona.values.argmin()) + assert nona[nona.idxmin()] == nona.min() + assert (nona.index.values.tolist().index(nona.idxmin()) == + nona.values.argmin()) # all NaNs allna = self.series * nan @@ -1170,17 +1170,17 @@ def test_idxmin(self): from pandas import date_range s = Series(date_range('20130102', periods=6)) result = s.idxmin() - self.assertEqual(result, 0) + assert result == 0 s[0] = np.nan result = s.idxmin() - self.assertEqual(result, 1) + assert result == 1 def test_numpy_argmin(self): # argmin is aliased to idxmin data = np.random.randint(0, 11, size=10) result = np.argmin(Series(data)) - self.assertEqual(result, np.argmin(data)) + assert result == np.argmin(data) if not _np_version_under1p10: msg = "the 'out' parameter is not supported" @@ -1195,14 +1195,14 @@ def test_idxmax(self): self.series[5:15] = np.NaN # skipna or no - self.assertEqual(self.series[self.series.idxmax()], self.series.max()) + assert self.series[self.series.idxmax()] == self.series.max() assert isnull(self.series.idxmax(skipna=False)) # no NaNs nona = self.series.dropna() - self.assertEqual(nona[nona.idxmax()], nona.max()) - self.assertEqual(nona.index.values.tolist().index(nona.idxmax()), - nona.values.argmax()) + assert nona[nona.idxmax()] == nona.max() + assert (nona.index.values.tolist().index(nona.idxmax()) == + nona.values.argmax()) # all NaNs allna = self.series * nan @@ -1211,32 +1211,32 @@ def test_idxmax(self): from pandas import date_range s = Series(date_range('20130102', periods=6)) result = s.idxmax() - self.assertEqual(result, 5) + assert result == 5 s[5] = np.nan result = s.idxmax() - self.assertEqual(result, 4) + assert result == 4 # Float64Index # GH 5914 s = pd.Series([1, 2, 3], [1.1, 2.1, 3.1]) result = s.idxmax() - self.assertEqual(result, 3.1) + assert result == 3.1 result = s.idxmin() - self.assertEqual(result, 1.1) + assert result == 1.1 s = pd.Series(s.index, s.index) result = s.idxmax() - self.assertEqual(result, 3.1) + assert result == 3.1 result = s.idxmin() - self.assertEqual(result, 1.1) + assert result == 1.1 def test_numpy_argmax(self): # argmax is aliased to idxmax data = np.random.randint(0, 11, size=10) result = np.argmax(Series(data)) - self.assertEqual(result, np.argmax(data)) + assert result == np.argmax(data) if not _np_version_under1p10: msg = "the 'out' parameter is not supported" @@ -1247,11 +1247,11 @@ def test_ptp(self): N = 1000 arr = np.random.randn(N) ser = Series(arr) - self.assertEqual(np.ptp(ser), np.ptp(arr)) + assert np.ptp(ser) == np.ptp(arr) # GH11163 s = Series([3, 5, np.nan, -3, 10]) - self.assertEqual(s.ptp(), 13) + assert s.ptp() == 13 assert pd.isnull(s.ptp(skipna=False)) mi = pd.MultiIndex.from_product([['a', 'b'], [1, 2, 3]]) @@ -1326,7 +1326,7 @@ def test_searchsorted_numeric_dtypes_scalar(self): s = Series([1, 2, 90, 1000, 3e9]) r = s.searchsorted(30) e = 2 - self.assertEqual(r, e) + assert r == e r = s.searchsorted([30]) e = np.array([2], dtype=np.intp) @@ -1343,7 +1343,7 @@ def test_search_sorted_datetime64_scalar(self): v = pd.Timestamp('20120102') r = s.searchsorted(v) e = 1 - self.assertEqual(r, e) + assert r == e def test_search_sorted_datetime64_list(self): s = Series(pd.date_range('20120101', periods=10, freq='2D')) @@ -1417,7 +1417,7 @@ def test_apply_categorical(self): result = s.apply(lambda x: 'A') exp = pd.Series(['A'] * 7, name='XX', index=list('abcdefg')) tm.assert_series_equal(result, exp) - self.assertEqual(result.dtype, np.object) + assert result.dtype == np.object def test_shift_int(self): ts = self.ts.astype(int) diff --git a/pandas/tests/series/test_api.py b/pandas/tests/series/test_api.py index 7d331f0643b18..5bb463c7a2ebe 100644 --- a/pandas/tests/series/test_api.py +++ b/pandas/tests/series/test_api.py @@ -23,11 +23,11 @@ class SharedWithSparse(object): def test_scalarop_preserve_name(self): result = self.ts * 2 - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name def test_copy_name(self): result = self.ts.copy() - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name def test_copy_index_name_checking(self): # don't want to be able to modify the index stored elsewhere after @@ -44,17 +44,17 @@ def test_copy_index_name_checking(self): def test_append_preserve_name(self): result = self.ts[:5].append(self.ts[5:]) - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name def test_binop_maybe_preserve_name(self): # names match, preserve result = self.ts * self.ts - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name result = self.ts.mul(self.ts) - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name result = self.ts * self.ts[:-2] - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name # names don't match, don't preserve cp = self.ts.copy() @@ -70,7 +70,7 @@ def test_binop_maybe_preserve_name(self): # names match, preserve s = self.ts.copy() result = getattr(s, op)(s) - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name # names don't match, don't preserve cp = self.ts.copy() @@ -80,17 +80,17 @@ def test_binop_maybe_preserve_name(self): def test_combine_first_name(self): result = self.ts.combine_first(self.ts[:5]) - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name def test_getitem_preserve_name(self): result = self.ts[self.ts > 0] - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name result = self.ts[[0, 2, 4]] - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name result = self.ts[5:10] - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name def test_pickle(self): unp_series = self._pickle_roundtrip(self.series) @@ -107,15 +107,15 @@ def _pickle_roundtrip(self, obj): def test_argsort_preserve_name(self): result = self.ts.argsort() - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name def test_sort_index_name(self): result = self.ts.sort_index(ascending=False) - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name def test_to_sparse_pass_name(self): result = self.ts.to_sparse() - self.assertEqual(result.name, self.ts.name) + assert result.name == self.ts.name class TestSeriesMisc(TestData, SharedWithSparse, tm.TestCase): @@ -158,46 +158,47 @@ def test_contains(self): def test_iter(self): for i, val in enumerate(self.series): - self.assertEqual(val, self.series[i]) + assert val == self.series[i] for i, val in enumerate(self.ts): - self.assertEqual(val, self.ts[i]) + assert val == self.ts[i] def test_iter_box(self): vals = [pd.Timestamp('2011-01-01'), pd.Timestamp('2011-01-02')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'datetime64[ns]') + assert s.dtype == 'datetime64[ns]' for res, exp in zip(s, vals): assert isinstance(res, pd.Timestamp) - self.assertEqual(res, exp) assert res.tz is None + assert res == exp vals = [pd.Timestamp('2011-01-01', tz='US/Eastern'), pd.Timestamp('2011-01-02', tz='US/Eastern')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'datetime64[ns, US/Eastern]') + + assert s.dtype == 'datetime64[ns, US/Eastern]' for res, exp in zip(s, vals): assert isinstance(res, pd.Timestamp) - self.assertEqual(res, exp) - self.assertEqual(res.tz, exp.tz) + assert res.tz == exp.tz + assert res == exp # timedelta vals = [pd.Timedelta('1 days'), pd.Timedelta('2 days')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'timedelta64[ns]') + assert s.dtype == 'timedelta64[ns]' for res, exp in zip(s, vals): assert isinstance(res, pd.Timedelta) - self.assertEqual(res, exp) + assert res == exp # period (object dtype, not boxed) vals = [pd.Period('2011-01-01', freq='M'), pd.Period('2011-01-02', freq='M')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'object') + assert s.dtype == 'object' for res, exp in zip(s, vals): assert isinstance(res, pd.Period) - self.assertEqual(res, exp) - self.assertEqual(res.freq, 'M') + assert res.freq == 'M' + assert res == exp def test_keys(self): # HACK: By doing this in two stages, we avoid 2to3 wrapping the call @@ -210,10 +211,10 @@ def test_values(self): def test_iteritems(self): for idx, val in compat.iteritems(self.series): - self.assertEqual(val, self.series[idx]) + assert val == self.series[idx] for idx, val in compat.iteritems(self.ts): - self.assertEqual(val, self.ts[idx]) + assert val == self.ts[idx] # assert is lazy (genrators don't define reverse, lists do) assert not hasattr(self.series.iteritems(), 'reverse') @@ -274,9 +275,9 @@ def test_copy(self): def test_axis_alias(self): s = Series([1, 2, np.nan]) assert_series_equal(s.dropna(axis='rows'), s.dropna(axis='index')) - self.assertEqual(s.dropna().sum('rows'), 3) - self.assertEqual(s._get_axis_number('rows'), 0) - self.assertEqual(s._get_axis_name('rows'), 'index') + assert s.dropna().sum('rows') == 3 + assert s._get_axis_number('rows') == 0 + assert s._get_axis_name('rows') == 'index' def test_numpy_unique(self): # it works! @@ -293,19 +294,19 @@ def f(x): result = tsdf.apply(f) expected = tsdf.max() - assert_series_equal(result, expected) + tm.assert_series_equal(result, expected) # .item() s = Series([1]) result = s.item() - self.assertEqual(result, 1) - self.assertEqual(s.item(), s.iloc[0]) + assert result == 1 + assert s.item() == s.iloc[0] # using an ndarray like function s = Series(np.random.randn(10)) - result = np.ones_like(s) + result = Series(np.ones_like(s)) expected = Series(1, index=range(10), dtype='float64') - # assert_series_equal(result,expected) + tm.assert_series_equal(result, expected) # ravel s = Series(np.random.randn(10)) @@ -315,21 +316,21 @@ def f(x): # GH 6658 s = Series([0, 1., -1], index=list('abc')) result = np.compress(s > 0, s) - assert_series_equal(result, Series([1.], index=['b'])) + tm.assert_series_equal(result, Series([1.], index=['b'])) result = np.compress(s < -1, s) # result empty Index(dtype=object) as the same as original exp = Series([], dtype='float64', index=Index([], dtype='object')) - assert_series_equal(result, exp) + tm.assert_series_equal(result, exp) s = Series([0, 1., -1], index=[.1, .2, .3]) result = np.compress(s > 0, s) - assert_series_equal(result, Series([1.], index=[.2])) + tm.assert_series_equal(result, Series([1.], index=[.2])) result = np.compress(s < -1, s) # result empty Float64Index as the same as original exp = Series([], dtype='float64', index=Index([], dtype='float64')) - assert_series_equal(result, exp) + tm.assert_series_equal(result, exp) def test_str_attribute(self): # GH9068 diff --git a/pandas/tests/series/test_apply.py b/pandas/tests/series/test_apply.py index c764d7b856bb8..089a2c36a5574 100644 --- a/pandas/tests/series/test_apply.py +++ b/pandas/tests/series/test_apply.py @@ -61,27 +61,27 @@ def test_apply_dont_convert_dtype(self): f = lambda x: x if x > 0 else np.nan result = s.apply(f, convert_dtype=False) - self.assertEqual(result.dtype, object) + assert result.dtype == object def test_with_string_args(self): for arg in ['sum', 'mean', 'min', 'max', 'std']: result = self.ts.apply(arg) expected = getattr(self.ts, arg)() - self.assertEqual(result, expected) + assert result == expected def test_apply_args(self): s = Series(['foo,bar']) result = s.apply(str.split, args=(',', )) - self.assertEqual(result[0], ['foo', 'bar']) + assert result[0] == ['foo', 'bar'] assert isinstance(result[0], list) def test_apply_box(self): # ufunc will not be boxed. Same test cases as the test_map_box vals = [pd.Timestamp('2011-01-01'), pd.Timestamp('2011-01-02')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'datetime64[ns]') + assert s.dtype == 'datetime64[ns]' # boxed value must be Timestamp instance res = s.apply(lambda x: '{0}_{1}_{2}'.format(x.__class__.__name__, x.day, x.tz)) @@ -91,7 +91,7 @@ def test_apply_box(self): vals = [pd.Timestamp('2011-01-01', tz='US/Eastern'), pd.Timestamp('2011-01-02', tz='US/Eastern')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'datetime64[ns, US/Eastern]') + assert s.dtype == 'datetime64[ns, US/Eastern]' res = s.apply(lambda x: '{0}_{1}_{2}'.format(x.__class__.__name__, x.day, x.tz)) exp = pd.Series(['Timestamp_1_US/Eastern', 'Timestamp_2_US/Eastern']) @@ -100,7 +100,7 @@ def test_apply_box(self): # timedelta vals = [pd.Timedelta('1 days'), pd.Timedelta('2 days')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'timedelta64[ns]') + assert s.dtype == 'timedelta64[ns]' res = s.apply(lambda x: '{0}_{1}'.format(x.__class__.__name__, x.days)) exp = pd.Series(['Timedelta_1', 'Timedelta_2']) tm.assert_series_equal(res, exp) @@ -109,7 +109,7 @@ def test_apply_box(self): vals = [pd.Period('2011-01-01', freq='M'), pd.Period('2011-01-02', freq='M')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'object') + assert s.dtype == 'object' res = s.apply(lambda x: '{0}_{1}'.format(x.__class__.__name__, x.freqstr)) exp = pd.Series(['Period_M', 'Period_M']) @@ -318,13 +318,13 @@ def test_map(self): merged = target.map(source) for k, v in compat.iteritems(merged): - self.assertEqual(v, source[target[k]]) + assert v == source[target[k]] # input could be a dict merged = target.map(source.to_dict()) for k, v in compat.iteritems(merged): - self.assertEqual(v, source[target[k]]) + assert v == source[target[k]] # function result = self.ts.map(lambda x: x * 2) @@ -372,11 +372,11 @@ def test_map_int(self): left = Series({'a': 1., 'b': 2., 'c': 3., 'd': 4}) right = Series({1: 11, 2: 22, 3: 33}) - self.assertEqual(left.dtype, np.float_) + assert left.dtype == np.float_ assert issubclass(right.dtype.type, np.integer) merged = left.map(right) - self.assertEqual(merged.dtype, np.float_) + assert merged.dtype == np.float_ assert isnull(merged['d']) assert not isnull(merged['c']) @@ -389,7 +389,7 @@ def test_map_decimal(self): from decimal import Decimal result = self.series.map(lambda x: Decimal(str(x))) - self.assertEqual(result.dtype, np.object_) + assert result.dtype == np.object_ assert isinstance(result[0], Decimal) def test_map_na_exclusion(self): @@ -457,7 +457,7 @@ class DictWithoutMissing(dict): def test_map_box(self): vals = [pd.Timestamp('2011-01-01'), pd.Timestamp('2011-01-02')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'datetime64[ns]') + assert s.dtype == 'datetime64[ns]' # boxed value must be Timestamp instance res = s.map(lambda x: '{0}_{1}_{2}'.format(x.__class__.__name__, x.day, x.tz)) @@ -467,7 +467,7 @@ def test_map_box(self): vals = [pd.Timestamp('2011-01-01', tz='US/Eastern'), pd.Timestamp('2011-01-02', tz='US/Eastern')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'datetime64[ns, US/Eastern]') + assert s.dtype == 'datetime64[ns, US/Eastern]' res = s.map(lambda x: '{0}_{1}_{2}'.format(x.__class__.__name__, x.day, x.tz)) exp = pd.Series(['Timestamp_1_US/Eastern', 'Timestamp_2_US/Eastern']) @@ -476,7 +476,7 @@ def test_map_box(self): # timedelta vals = [pd.Timedelta('1 days'), pd.Timedelta('2 days')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'timedelta64[ns]') + assert s.dtype == 'timedelta64[ns]' res = s.map(lambda x: '{0}_{1}'.format(x.__class__.__name__, x.days)) exp = pd.Series(['Timedelta_1', 'Timedelta_2']) tm.assert_series_equal(res, exp) @@ -485,7 +485,7 @@ def test_map_box(self): vals = [pd.Period('2011-01-01', freq='M'), pd.Period('2011-01-02', freq='M')] s = pd.Series(vals) - self.assertEqual(s.dtype, 'object') + assert s.dtype == 'object' res = s.map(lambda x: '{0}_{1}'.format(x.__class__.__name__, x.freqstr)) exp = pd.Series(['Period_M', 'Period_M']) @@ -506,7 +506,7 @@ def test_map_categorical(self): result = s.map(lambda x: 'A') exp = pd.Series(['A'] * 7, name='XX', index=list('abcdefg')) tm.assert_series_equal(result, exp) - self.assertEqual(result.dtype, np.object) + assert result.dtype == np.object with pytest.raises(NotImplementedError): s.map(lambda x: x, na_action='ignore') diff --git a/pandas/tests/series/test_asof.py b/pandas/tests/series/test_asof.py index 80556a5e5ffdb..a839d571c116c 100644 --- a/pandas/tests/series/test_asof.py +++ b/pandas/tests/series/test_asof.py @@ -37,7 +37,7 @@ def test_basic(self): assert (rs == ts[lb]).all() val = result[result.index[result.index >= ub][0]] - self.assertEqual(ts[ub], val) + assert ts[ub] == val def test_scalar(self): @@ -50,16 +50,16 @@ def test_scalar(self): val1 = ts.asof(ts.index[7]) val2 = ts.asof(ts.index[19]) - self.assertEqual(val1, ts[4]) - self.assertEqual(val2, ts[14]) + assert val1 == ts[4] + assert val2 == ts[14] # accepts strings val1 = ts.asof(str(ts.index[7])) - self.assertEqual(val1, ts[4]) + assert val1 == ts[4] # in there result = ts.asof(ts.index[3]) - self.assertEqual(result, ts[3]) + assert result == ts[3] # no as of value d = ts.index[0] - offsets.BDay() @@ -118,15 +118,15 @@ def test_periodindex(self): val1 = ts.asof(ts.index[7]) val2 = ts.asof(ts.index[19]) - self.assertEqual(val1, ts[4]) - self.assertEqual(val2, ts[14]) + assert val1 == ts[4] + assert val2 == ts[14] # accepts strings val1 = ts.asof(str(ts.index[7])) - self.assertEqual(val1, ts[4]) + assert val1 == ts[4] # in there - self.assertEqual(ts.asof(ts.index[3]), ts[3]) + assert ts.asof(ts.index[3]) == ts[3] # no as of value d = ts.index[0].to_timestamp() - offsets.BDay() diff --git a/pandas/tests/series/test_combine_concat.py b/pandas/tests/series/test_combine_concat.py index 6042a8c0a2e9d..1291449ae7ce9 100644 --- a/pandas/tests/series/test_combine_concat.py +++ b/pandas/tests/series/test_combine_concat.py @@ -24,9 +24,9 @@ def test_append(self): appendedSeries = self.series.append(self.objSeries) for idx, value in compat.iteritems(appendedSeries): if idx in self.series.index: - self.assertEqual(value, self.series[idx]) + assert value == self.series[idx] elif idx in self.objSeries.index: - self.assertEqual(value, self.objSeries[idx]) + assert value == self.objSeries[idx] else: self.fail("orphaned index!") @@ -117,9 +117,9 @@ def test_concat_empty_series_dtypes_roundtrips(self): 'M8[ns]']) for dtype in dtypes: - self.assertEqual(pd.concat([Series(dtype=dtype)]).dtype, dtype) - self.assertEqual(pd.concat([Series(dtype=dtype), - Series(dtype=dtype)]).dtype, dtype) + assert pd.concat([Series(dtype=dtype)]).dtype == dtype + assert pd.concat([Series(dtype=dtype), + Series(dtype=dtype)]).dtype == dtype def int_result_type(dtype, dtype2): typs = set([dtype.kind, dtype2.kind]) @@ -155,55 +155,52 @@ def get_result_type(dtype, dtype2): expected = get_result_type(dtype, dtype2) result = pd.concat([Series(dtype=dtype), Series(dtype=dtype2) ]).dtype - self.assertEqual(result.kind, expected) + assert result.kind == expected def test_concat_empty_series_dtypes(self): - # bools - self.assertEqual(pd.concat([Series(dtype=np.bool_), - Series(dtype=np.int32)]).dtype, np.int32) - self.assertEqual(pd.concat([Series(dtype=np.bool_), - Series(dtype=np.float32)]).dtype, - np.object_) - - # datetimelike - self.assertEqual(pd.concat([Series(dtype='m8[ns]'), - Series(dtype=np.bool)]).dtype, np.object_) - self.assertEqual(pd.concat([Series(dtype='m8[ns]'), - Series(dtype=np.int64)]).dtype, np.object_) - self.assertEqual(pd.concat([Series(dtype='M8[ns]'), - Series(dtype=np.bool)]).dtype, np.object_) - self.assertEqual(pd.concat([Series(dtype='M8[ns]'), - Series(dtype=np.int64)]).dtype, np.object_) - self.assertEqual(pd.concat([Series(dtype='M8[ns]'), - Series(dtype=np.bool_), - Series(dtype=np.int64)]).dtype, np.object_) + # booleans + assert pd.concat([Series(dtype=np.bool_), + Series(dtype=np.int32)]).dtype == np.int32 + assert pd.concat([Series(dtype=np.bool_), + Series(dtype=np.float32)]).dtype == np.object_ + + # datetime-like + assert pd.concat([Series(dtype='m8[ns]'), + Series(dtype=np.bool)]).dtype == np.object_ + assert pd.concat([Series(dtype='m8[ns]'), + Series(dtype=np.int64)]).dtype == np.object_ + assert pd.concat([Series(dtype='M8[ns]'), + Series(dtype=np.bool)]).dtype == np.object_ + assert pd.concat([Series(dtype='M8[ns]'), + Series(dtype=np.int64)]).dtype == np.object_ + assert pd.concat([Series(dtype='M8[ns]'), + Series(dtype=np.bool_), + Series(dtype=np.int64)]).dtype == np.object_ # categorical - self.assertEqual(pd.concat([Series(dtype='category'), - Series(dtype='category')]).dtype, - 'category') - self.assertEqual(pd.concat([Series(dtype='category'), - Series(dtype='float64')]).dtype, - 'float64') - self.assertEqual(pd.concat([Series(dtype='category'), - Series(dtype='object')]).dtype, 'object') + assert pd.concat([Series(dtype='category'), + Series(dtype='category')]).dtype == 'category' + assert pd.concat([Series(dtype='category'), + Series(dtype='float64')]).dtype == 'float64' + assert pd.concat([Series(dtype='category'), + Series(dtype='object')]).dtype == 'object' # sparse result = pd.concat([Series(dtype='float64').to_sparse(), Series( dtype='float64').to_sparse()]) - self.assertEqual(result.dtype, np.float64) - self.assertEqual(result.ftype, 'float64:sparse') + assert result.dtype == np.float64 + assert result.ftype == 'float64:sparse' result = pd.concat([Series(dtype='float64').to_sparse(), Series( dtype='float64')]) - self.assertEqual(result.dtype, np.float64) - self.assertEqual(result.ftype, 'float64:sparse') + assert result.dtype == np.float64 + assert result.ftype == 'float64:sparse' result = pd.concat([Series(dtype='float64').to_sparse(), Series( dtype='object')]) - self.assertEqual(result.dtype, np.object_) - self.assertEqual(result.ftype, 'object:dense') + assert result.dtype == np.object_ + assert result.ftype == 'object:dense' def test_combine_first_dt64(self): from pandas.core.tools.datetimes import to_datetime @@ -245,7 +242,7 @@ def test_append_concat(self): rng2 = rng.copy() rng1.name = 'foo' rng2.name = 'bar' - self.assertEqual(rng1.append(rng1).name, 'foo') + assert rng1.append(rng1).name == 'foo' assert rng1.append(rng2).name is None def test_append_concat_tz(self): diff --git a/pandas/tests/series/test_constructors.py b/pandas/tests/series/test_constructors.py index 966861fe3c1e4..a0a68a332f735 100644 --- a/pandas/tests/series/test_constructors.py +++ b/pandas/tests/series/test_constructors.py @@ -58,11 +58,11 @@ def test_constructor(self): assert tm.equalContents(derived.index, self.ts.index) # Ensure new index is not created - self.assertEqual(id(self.ts.index), id(derived.index)) + assert id(self.ts.index) == id(derived.index) # Mixed type Series mixed = Series(['hello', np.NaN], index=[0, 1]) - self.assertEqual(mixed.dtype, np.object_) + assert mixed.dtype == np.object_ assert mixed[1] is np.NaN assert not self.empty.index.is_all_dates @@ -73,7 +73,7 @@ def test_constructor(self): mixed.name = 'Series' rs = Series(mixed).name xp = 'Series' - self.assertEqual(rs, xp) + assert rs == xp # raise on MultiIndex GH4187 m = MultiIndex.from_arrays([[1, 2], [3, 4]]) @@ -248,10 +248,10 @@ def test_constructor_corner(self): def test_constructor_sanitize(self): s = Series(np.array([1., 1., 8.]), dtype='i8') - self.assertEqual(s.dtype, np.dtype('i8')) + assert s.dtype == np.dtype('i8') s = Series(np.array([1., 1., np.nan]), copy=True, dtype='i8') - self.assertEqual(s.dtype, np.dtype('f8')) + assert s.dtype == np.dtype('f8') def test_constructor_copy(self): # GH15125 @@ -266,15 +266,15 @@ def test_constructor_copy(self): # changes to origin of copy does not affect the copy x[0] = 2. assert not x.equals(y) - self.assertEqual(x[0], 2.) - self.assertEqual(y[0], 1.) + assert x[0] == 2. + assert y[0] == 1. def test_constructor_pass_none(self): s = Series(None, index=lrange(5)) - self.assertEqual(s.dtype, np.float64) + assert s.dtype == np.float64 s = Series(None, index=lrange(5), dtype=object) - self.assertEqual(s.dtype, np.object_) + assert s.dtype == np.object_ # GH 7431 # inference on the index @@ -285,12 +285,12 @@ def test_constructor_pass_none(self): def test_constructor_pass_nan_nat(self): # GH 13467 exp = Series([np.nan, np.nan], dtype=np.float64) - self.assertEqual(exp.dtype, np.float64) + assert exp.dtype == np.float64 tm.assert_series_equal(Series([np.nan, np.nan]), exp) tm.assert_series_equal(Series(np.array([np.nan, np.nan])), exp) exp = Series([pd.NaT, pd.NaT]) - self.assertEqual(exp.dtype, 'datetime64[ns]') + assert exp.dtype == 'datetime64[ns]' tm.assert_series_equal(Series([pd.NaT, pd.NaT]), exp) tm.assert_series_equal(Series(np.array([pd.NaT, pd.NaT])), exp) @@ -310,7 +310,7 @@ def test_constructor_dtype_nocast(self): s2 = Series(s, dtype=np.int64) s2[1] = 5 - self.assertEqual(s[1], 5) + assert s[1] == 5 def test_constructor_datelike_coercion(self): @@ -318,8 +318,8 @@ def test_constructor_datelike_coercion(self): # incorrectly infering on dateimelike looking when object dtype is # specified s = Series([Timestamp('20130101'), 'NOV'], dtype=object) - self.assertEqual(s.iloc[0], Timestamp('20130101')) - self.assertEqual(s.iloc[1], 'NOV') + assert s.iloc[0] == Timestamp('20130101') + assert s.iloc[1] == 'NOV' assert s.dtype == object # the dtype was being reset on the slicing and re-inferred to datetime @@ -361,11 +361,11 @@ def test_constructor_dtype_datetime64(self): s = Series([datetime(2001, 1, 2, 0, 0), iNaT], dtype='M8[ns]') assert isnull(s[1]) - self.assertEqual(s.dtype, 'M8[ns]') + assert s.dtype == 'M8[ns]' s = Series([datetime(2001, 1, 2, 0, 0), nan], dtype='M8[ns]') assert isnull(s[1]) - self.assertEqual(s.dtype, 'M8[ns]') + assert s.dtype == 'M8[ns]' # GH3416 dates = [ @@ -375,10 +375,10 @@ def test_constructor_dtype_datetime64(self): ] s = Series(dates) - self.assertEqual(s.dtype, 'M8[ns]') + assert s.dtype == 'M8[ns]' s.iloc[0] = np.nan - self.assertEqual(s.dtype, 'M8[ns]') + assert s.dtype == 'M8[ns]' # invalid astypes for t in ['s', 'D', 'us', 'ms']: @@ -392,15 +392,15 @@ def test_constructor_dtype_datetime64(self): # invalid dates can be help as object result = Series([datetime(2, 1, 1)]) - self.assertEqual(result[0], datetime(2, 1, 1, 0, 0)) + assert result[0] == datetime(2, 1, 1, 0, 0) result = Series([datetime(3000, 1, 1)]) - self.assertEqual(result[0], datetime(3000, 1, 1, 0, 0)) + assert result[0] == datetime(3000, 1, 1, 0, 0) # don't mix types result = Series([Timestamp('20130101'), 1], index=['a', 'b']) - self.assertEqual(result['a'], Timestamp('20130101')) - self.assertEqual(result['b'], 1) + assert result['a'] == Timestamp('20130101') + assert result['b'] == 1 # GH6529 # coerce datetime64 non-ns properly @@ -426,17 +426,17 @@ def test_constructor_dtype_datetime64(self): dtype=object) series1 = Series(dates2, dates) tm.assert_numpy_array_equal(series1.values, dates2) - self.assertEqual(series1.dtype, object) + assert series1.dtype == object # these will correctly infer a datetime s = Series([None, pd.NaT, '2013-08-05 15:30:00.000001']) - self.assertEqual(s.dtype, 'datetime64[ns]') + assert s.dtype == 'datetime64[ns]' s = Series([np.nan, pd.NaT, '2013-08-05 15:30:00.000001']) - self.assertEqual(s.dtype, 'datetime64[ns]') + assert s.dtype == 'datetime64[ns]' s = Series([pd.NaT, None, '2013-08-05 15:30:00.000001']) - self.assertEqual(s.dtype, 'datetime64[ns]') + assert s.dtype == 'datetime64[ns]' s = Series([pd.NaT, np.nan, '2013-08-05 15:30:00.000001']) - self.assertEqual(s.dtype, 'datetime64[ns]') + assert s.dtype == 'datetime64[ns]' # tz-aware (UTC and other tz's) # GH 8411 @@ -488,11 +488,11 @@ def test_constructor_with_datetime_tz(self): # indexing result = s.iloc[0] - self.assertEqual(result, Timestamp('2013-01-01 00:00:00-0500', - tz='US/Eastern', freq='D')) + assert result == Timestamp('2013-01-01 00:00:00-0500', + tz='US/Eastern', freq='D') result = s[0] - self.assertEqual(result, Timestamp('2013-01-01 00:00:00-0500', - tz='US/Eastern', freq='D')) + assert result == Timestamp('2013-01-01 00:00:00-0500', + tz='US/Eastern', freq='D') result = s[Series([True, True, False], index=s.index)] assert_series_equal(result, s[0:2]) @@ -589,7 +589,7 @@ def test_constructor_periodindex(self): expected = Series(pi.asobject) assert_series_equal(s, expected) - self.assertEqual(s.dtype, 'object') + assert s.dtype == 'object' def test_constructor_dict(self): d = {'a': 0., 'b': 1., 'c': 2.} @@ -693,12 +693,12 @@ class A(OrderedDict): def test_constructor_list_of_tuples(self): data = [(1, 1), (2, 2), (2, 3)] s = Series(data) - self.assertEqual(list(s), data) + assert list(s) == data def test_constructor_tuple_of_tuples(self): data = ((1, 1), (2, 2), (2, 3)) s = Series(data) - self.assertEqual(tuple(s), data) + assert tuple(s) == data def test_constructor_set(self): values = set([1, 2, 3, 4, 5]) @@ -714,80 +714,80 @@ def test_fromDict(self): data = {'a': 0, 'b': '1', 'c': '2', 'd': datetime.now()} series = Series(data) - self.assertEqual(series.dtype, np.object_) + assert series.dtype == np.object_ data = {'a': 0, 'b': '1', 'c': '2', 'd': '3'} series = Series(data) - self.assertEqual(series.dtype, np.object_) + assert series.dtype == np.object_ data = {'a': '0', 'b': '1'} series = Series(data, dtype=float) - self.assertEqual(series.dtype, np.float64) + assert series.dtype == np.float64 def test_fromValue(self): nans = Series(np.NaN, index=self.ts.index) - self.assertEqual(nans.dtype, np.float_) - self.assertEqual(len(nans), len(self.ts)) + assert nans.dtype == np.float_ + assert len(nans) == len(self.ts) strings = Series('foo', index=self.ts.index) - self.assertEqual(strings.dtype, np.object_) - self.assertEqual(len(strings), len(self.ts)) + assert strings.dtype == np.object_ + assert len(strings) == len(self.ts) d = datetime.now() dates = Series(d, index=self.ts.index) - self.assertEqual(dates.dtype, 'M8[ns]') - self.assertEqual(len(dates), len(self.ts)) + assert dates.dtype == 'M8[ns]' + assert len(dates) == len(self.ts) # GH12336 # Test construction of categorical series from value categorical = Series(0, index=self.ts.index, dtype="category") expected = Series(0, index=self.ts.index).astype("category") - self.assertEqual(categorical.dtype, 'category') - self.assertEqual(len(categorical), len(self.ts)) + assert categorical.dtype == 'category' + assert len(categorical) == len(self.ts) tm.assert_series_equal(categorical, expected) def test_constructor_dtype_timedelta64(self): # basic td = Series([timedelta(days=i) for i in range(3)]) - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' td = Series([timedelta(days=1)]) - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' td = Series([timedelta(days=1), timedelta(days=2), np.timedelta64( 1, 's')]) - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' # mixed with NaT td = Series([timedelta(days=1), NaT], dtype='m8[ns]') - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' td = Series([timedelta(days=1), np.nan], dtype='m8[ns]') - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' td = Series([np.timedelta64(300000000), pd.NaT], dtype='m8[ns]') - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' # improved inference # GH5689 td = Series([np.timedelta64(300000000), NaT]) - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' # because iNaT is int, not coerced to timedelta td = Series([np.timedelta64(300000000), iNaT]) - self.assertEqual(td.dtype, 'object') + assert td.dtype == 'object' td = Series([np.timedelta64(300000000), np.nan]) - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' td = Series([pd.NaT, np.timedelta64(300000000)]) - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' td = Series([np.timedelta64(1, 's')]) - self.assertEqual(td.dtype, 'timedelta64[ns]') + assert td.dtype == 'timedelta64[ns]' # these are frequency conversion astypes # for t in ['s', 'D', 'us', 'ms']: @@ -807,17 +807,17 @@ def f(): # leave as object here td = Series([timedelta(days=i) for i in range(3)] + ['foo']) - self.assertEqual(td.dtype, 'object') + assert td.dtype == 'object' # these will correctly infer a timedelta s = Series([None, pd.NaT, '1 Day']) - self.assertEqual(s.dtype, 'timedelta64[ns]') + assert s.dtype == 'timedelta64[ns]' s = Series([np.nan, pd.NaT, '1 Day']) - self.assertEqual(s.dtype, 'timedelta64[ns]') + assert s.dtype == 'timedelta64[ns]' s = Series([pd.NaT, None, '1 Day']) - self.assertEqual(s.dtype, 'timedelta64[ns]') + assert s.dtype == 'timedelta64[ns]' s = Series([pd.NaT, np.nan, '1 Day']) - self.assertEqual(s.dtype, 'timedelta64[ns]') + assert s.dtype == 'timedelta64[ns]' def test_NaT_scalar(self): series = Series([0, 1000, 2000, iNaT], dtype='M8[ns]') @@ -838,7 +838,7 @@ def test_constructor_name_hashable(self): for n in [777, 777., 'name', datetime(2001, 11, 11), (1, ), u"\u05D0"]: for data in [[1, 2, 3], np.ones(3), {'a': 0, 'b': 1}]: s = Series(data, name=n) - self.assertEqual(s.name, n) + assert s.name == n def test_constructor_name_unhashable(self): for n in [['name_list'], np.ones(2), {1: 2}]: @@ -847,7 +847,7 @@ def test_constructor_name_unhashable(self): def test_auto_conversion(self): series = Series(list(date_range('1/1/2000', periods=10))) - self.assertEqual(series.dtype, 'M8[ns]') + assert series.dtype == 'M8[ns]' def test_constructor_cant_cast_datetime64(self): msg = "Cannot cast datetime64 to " diff --git a/pandas/tests/series/test_datetime_values.py b/pandas/tests/series/test_datetime_values.py index 13fa3bc782f89..50914eef1abc8 100644 --- a/pandas/tests/series/test_datetime_values.py +++ b/pandas/tests/series/test_datetime_values.py @@ -50,7 +50,7 @@ def compare(s, name): a = getattr(s.dt, prop) b = get_expected(s, prop) if not (is_list_like(a) and is_list_like(b)): - self.assertEqual(a, b) + assert a == b else: tm.assert_series_equal(a, b) @@ -79,10 +79,9 @@ def compare(s, name): tm.assert_series_equal(result, expected) tz_result = result.dt.tz - self.assertEqual(str(tz_result), 'US/Eastern') + assert str(tz_result) == 'US/Eastern' freq_result = s.dt.freq - self.assertEqual(freq_result, DatetimeIndex(s.values, - freq='infer').freq) + assert freq_result == DatetimeIndex(s.values, freq='infer').freq # let's localize, then convert result = s.dt.tz_localize('UTC').dt.tz_convert('US/Eastern') @@ -149,12 +148,11 @@ def compare(s, name): tm.assert_series_equal(result, expected) tz_result = result.dt.tz - self.assertEqual(str(tz_result), 'CET') + assert str(tz_result) == 'CET' freq_result = s.dt.freq - self.assertEqual(freq_result, DatetimeIndex(s.values, - freq='infer').freq) + assert freq_result == DatetimeIndex(s.values, freq='infer').freq - # timedeltaindex + # timedelta index cases = [Series(timedelta_range('1 day', periods=5), index=list('abcde'), name='xxx'), Series(timedelta_range('1 day 01:23:45', periods=5, @@ -183,8 +181,7 @@ def compare(s, name): assert result.dtype == 'float64' freq_result = s.dt.freq - self.assertEqual(freq_result, TimedeltaIndex(s.values, - freq='infer').freq) + assert freq_result == TimedeltaIndex(s.values, freq='infer').freq # both index = date_range('20130101', periods=3, freq='D') @@ -218,7 +215,7 @@ def compare(s, name): getattr(s.dt, prop) freq_result = s.dt.freq - self.assertEqual(freq_result, PeriodIndex(s.values).freq) + assert freq_result == PeriodIndex(s.values).freq # test limited display api def get_dir(s): @@ -387,7 +384,7 @@ def test_sub_of_datetime_from_TimeSeries(self): b = datetime(1993, 6, 22, 13, 30) a = Series([a]) result = to_timedelta(np.abs(a - b)) - self.assertEqual(result.dtype, 'timedelta64[ns]') + assert result.dtype == 'timedelta64[ns]' def test_between(self): s = Series(bdate_range('1/1/2000', periods=20).asobject) diff --git a/pandas/tests/series/test_indexing.py b/pandas/tests/series/test_indexing.py index 954e80facf848..9f5d80411ed17 100644 --- a/pandas/tests/series/test_indexing.py +++ b/pandas/tests/series/test_indexing.py @@ -41,7 +41,7 @@ def test_get(self): result = s.get(25, 0) expected = 0 - self.assertEqual(result, expected) + assert result == expected s = Series(np.array([43, 48, 60, 48, 50, 51, 50, 45, 57, 48, 56, 45, 51, 39, 55, 43, 54, 52, 51, 54]), @@ -54,21 +54,21 @@ def test_get(self): result = s.get(25, 0) expected = 43 - self.assertEqual(result, expected) + assert result == expected # GH 7407 # with a boolean accessor df = pd.DataFrame({'i': [0] * 3, 'b': [False] * 3}) vc = df.i.value_counts() result = vc.get(99, default='Missing') - self.assertEqual(result, 'Missing') + assert result == 'Missing' vc = df.b.value_counts() result = vc.get(False, default='Missing') - self.assertEqual(result, 3) + assert result == 3 result = vc.get(True, default='Missing') - self.assertEqual(result, 'Missing') + assert result == 'Missing' def test_delitem(self): @@ -137,7 +137,7 @@ def test_pop(self): k = df.iloc[4] result = k.pop('B') - self.assertEqual(result, 4) + assert result == 4 expected = Series([0, 0], index=['A', 'C'], name=4) assert_series_equal(k, expected) @@ -146,15 +146,14 @@ def test_getitem_get(self): idx1 = self.series.index[5] idx2 = self.objSeries.index[5] - self.assertEqual(self.series[idx1], self.series.get(idx1)) - self.assertEqual(self.objSeries[idx2], self.objSeries.get(idx2)) + assert self.series[idx1] == self.series.get(idx1) + assert self.objSeries[idx2] == self.objSeries.get(idx2) - self.assertEqual(self.series[idx1], self.series[5]) - self.assertEqual(self.objSeries[idx2], self.objSeries[5]) + assert self.series[idx1] == self.series[5] + assert self.objSeries[idx2] == self.objSeries[5] - self.assertEqual( - self.series.get(-1), self.series.get(self.series.index[-1])) - self.assertEqual(self.series[5], self.series.get(self.series.index[5])) + assert self.series.get(-1) == self.series.get(self.series.index[-1]) + assert self.series[5] == self.series.get(self.series.index[5]) # missing d = self.ts.index[0] - BDay() @@ -191,7 +190,7 @@ def test_iloc(self): def test_iloc_nonunique(self): s = Series([0, 1, 2], index=[0, 1, 0]) - self.assertEqual(s.iloc[2], 2) + assert s.iloc[2] == 2 def test_getitem_regression(self): s = Series(lrange(5), index=lrange(5)) @@ -218,15 +217,15 @@ def test_getitem_setitem_slice_bug(self): def test_getitem_int64(self): idx = np.int64(5) - self.assertEqual(self.ts[idx], self.ts[5]) + assert self.ts[idx] == self.ts[5] def test_getitem_fancy(self): slice1 = self.series[[1, 2, 3]] slice2 = self.objSeries[[1, 2, 3]] - self.assertEqual(self.series.index[2], slice1.index[1]) - self.assertEqual(self.objSeries.index[2], slice2.index[1]) - self.assertEqual(self.series[2], slice1[1]) - self.assertEqual(self.objSeries[2], slice2[1]) + assert self.series.index[2] == slice1.index[1] + assert self.objSeries.index[2] == slice2.index[1] + assert self.series[2] == slice1[1] + assert self.objSeries[2] == slice2[1] def test_getitem_boolean(self): s = self.series @@ -242,8 +241,8 @@ def test_getitem_boolean_empty(self): s = Series([], dtype=np.int64) s.index.name = 'index_name' s = s[s.isnull()] - self.assertEqual(s.index.name, 'index_name') - self.assertEqual(s.dtype, np.int64) + assert s.index.name == 'index_name' + assert s.dtype == np.int64 # GH5877 # indexing with empty series @@ -421,7 +420,7 @@ def test_getitem_setitem_datetimeindex(self): result = ts["1990-01-01 04:00:00"] expected = ts[4] - self.assertEqual(result, expected) + assert result == expected result = ts.copy() result["1990-01-01 04:00:00"] = 0 @@ -446,7 +445,7 @@ def test_getitem_setitem_datetimeindex(self): # repeat all the above with naive datetimes result = ts[datetime(1990, 1, 1, 4)] expected = ts[4] - self.assertEqual(result, expected) + assert result == expected result = ts.copy() result[datetime(1990, 1, 1, 4)] = 0 @@ -470,7 +469,7 @@ def test_getitem_setitem_datetimeindex(self): result = ts[ts.index[4]] expected = ts[4] - self.assertEqual(result, expected) + assert result == expected result = ts[ts.index[4:8]] expected = ts[4:8] @@ -500,7 +499,7 @@ def test_getitem_setitem_periodindex(self): result = ts["1990-01-01 04"] expected = ts[4] - self.assertEqual(result, expected) + assert result == expected result = ts.copy() result["1990-01-01 04"] = 0 @@ -525,7 +524,7 @@ def test_getitem_setitem_periodindex(self): # GH 2782 result = ts[ts.index[4]] expected = ts[4] - self.assertEqual(result, expected) + assert result == expected result = ts[ts.index[4:8]] expected = ts[4:8] @@ -557,7 +556,7 @@ def test_getitem_setitem_integers(self): # caused bug without test s = Series([1, 2, 3], ['a', 'b', 'c']) - self.assertEqual(s.iloc[0], s['a']) + assert s.iloc[0] == s['a'] s.iloc[0] = 5 self.assertAlmostEqual(s['a'], 5) @@ -573,7 +572,7 @@ def test_getitem_ambiguous_keyerror(self): def test_getitem_unordered_dup(self): obj = Series(lrange(5), index=['c', 'a', 'a', 'b', 'b']) assert is_scalar(obj['c']) - self.assertEqual(obj['c'], 0) + assert obj['c'] == 0 def test_getitem_dups_with_missing(self): @@ -600,7 +599,7 @@ def test_getitem_callable(self): # GH 12533 s = pd.Series(4, index=list('ABCD')) result = s[lambda x: 'A'] - self.assertEqual(result, s.loc['A']) + assert result == s.loc['A'] result = s[lambda x: ['A', 'B']] tm.assert_series_equal(result, s.loc[['A', 'B']]) @@ -687,14 +686,14 @@ def f(): def test_slice_floats2(self): s = Series(np.random.rand(10), index=np.arange(10, 20, dtype=float)) - self.assertEqual(len(s.loc[12.0:]), 8) - self.assertEqual(len(s.loc[12.5:]), 7) + assert len(s.loc[12.0:]) == 8 + assert len(s.loc[12.5:]) == 7 i = np.arange(10, 20, dtype=float) i[2] = 12.2 s.index = i - self.assertEqual(len(s.loc[12.0:]), 8) - self.assertEqual(len(s.loc[12.5:]), 7) + assert len(s.loc[12.0:]) == 8 + assert len(s.loc[12.5:]) == 7 def test_slice_float64(self): @@ -787,23 +786,23 @@ def test_set_value(self): idx = self.ts.index[10] res = self.ts.set_value(idx, 0) assert res is self.ts - self.assertEqual(self.ts[idx], 0) + assert self.ts[idx] == 0 # equiv s = self.series.copy() res = s.set_value('foobar', 0) assert res is s - self.assertEqual(res.index[-1], 'foobar') - self.assertEqual(res['foobar'], 0) + assert res.index[-1] == 'foobar' + assert res['foobar'] == 0 s = self.series.copy() s.loc['foobar'] = 0 - self.assertEqual(s.index[-1], 'foobar') - self.assertEqual(s['foobar'], 0) + assert s.index[-1] == 'foobar' + assert s['foobar'] == 0 def test_setslice(self): sl = self.ts[5:20] - self.assertEqual(len(sl), len(sl.index)) + assert len(sl) == len(sl.index) assert sl.index.is_unique def test_basic_getitem_setitem_corner(self): @@ -853,11 +852,11 @@ def test_basic_getitem_with_labels(self): index=['a', 'b', 'c']) expected = Timestamp('2011-01-01', tz='US/Eastern') result = s.loc['a'] - self.assertEqual(result, expected) + assert result == expected result = s.iloc[0] - self.assertEqual(result, expected) + assert result == expected result = s['a'] - self.assertEqual(result, expected) + assert result == expected def test_basic_setitem_with_labels(self): indices = self.ts.index[[5, 10, 15]] @@ -904,17 +903,17 @@ def test_basic_setitem_with_labels(self): expected = Timestamp('2011-01-03', tz='US/Eastern') s2.loc['a'] = expected result = s2.loc['a'] - self.assertEqual(result, expected) + assert result == expected s2 = s.copy() s2.iloc[0] = expected result = s2.iloc[0] - self.assertEqual(result, expected) + assert result == expected s2 = s.copy() s2['a'] = expected result = s2['a'] - self.assertEqual(result, expected) + assert result == expected def test_loc_getitem(self): inds = self.series.index[[3, 4, 7]] @@ -932,8 +931,8 @@ def test_loc_getitem(self): assert_series_equal(self.series.loc[mask], self.series[mask]) # ask for index value - self.assertEqual(self.ts.loc[d1], self.ts[d1]) - self.assertEqual(self.ts.loc[d2], self.ts[d2]) + assert self.ts.loc[d1] == self.ts[d1] + assert self.ts.loc[d2] == self.ts[d2] def test_loc_getitem_not_monotonic(self): d1, d2 = self.ts.index[[5, 15]] @@ -977,7 +976,7 @@ def test_setitem_with_tz(self): for tz in ['US/Eastern', 'UTC', 'Asia/Tokyo']: orig = pd.Series(pd.date_range('2016-01-01', freq='H', periods=3, tz=tz)) - self.assertEqual(orig.dtype, 'datetime64[ns, {0}]'.format(tz)) + assert orig.dtype == 'datetime64[ns, {0}]'.format(tz) # scalar s = orig.copy() @@ -998,7 +997,7 @@ def test_setitem_with_tz(self): # vector vals = pd.Series([pd.Timestamp('2011-01-01', tz=tz), pd.Timestamp('2012-01-01', tz=tz)], index=[1, 2]) - self.assertEqual(vals.dtype, 'datetime64[ns, {0}]'.format(tz)) + assert vals.dtype == 'datetime64[ns, {0}]'.format(tz) s[[1, 2]] = vals exp = pd.Series([pd.Timestamp('2016-01-01 00:00', tz=tz), @@ -1019,7 +1018,7 @@ def test_setitem_with_tz_dst(self): tz = 'US/Eastern' orig = pd.Series(pd.date_range('2016-11-06', freq='H', periods=3, tz=tz)) - self.assertEqual(orig.dtype, 'datetime64[ns, {0}]'.format(tz)) + assert orig.dtype == 'datetime64[ns, {0}]'.format(tz) # scalar s = orig.copy() @@ -1040,7 +1039,7 @@ def test_setitem_with_tz_dst(self): # vector vals = pd.Series([pd.Timestamp('2011-01-01', tz=tz), pd.Timestamp('2012-01-01', tz=tz)], index=[1, 2]) - self.assertEqual(vals.dtype, 'datetime64[ns, {0}]'.format(tz)) + assert vals.dtype == 'datetime64[ns, {0}]'.format(tz) s[[1, 2]] = vals exp = pd.Series([pd.Timestamp('2016-11-06 00:00', tz=tz), @@ -1107,7 +1106,7 @@ def test_where(self): s[mask] = lrange(2, 7) expected = Series(lrange(2, 7) + lrange(5, 10), dtype=dtype) assert_series_equal(s, expected) - self.assertEqual(s.dtype, expected.dtype) + assert s.dtype == expected.dtype # these are allowed operations, but are upcasted for dtype in [np.int64, np.float64]: @@ -1117,7 +1116,7 @@ def test_where(self): s[mask] = values expected = Series(values + lrange(5, 10), dtype='float64') assert_series_equal(s, expected) - self.assertEqual(s.dtype, expected.dtype) + assert s.dtype == expected.dtype # GH 9731 s = Series(np.arange(10), dtype='int64') @@ -1141,7 +1140,7 @@ def test_where(self): s[mask] = lrange(2, 7) expected = Series(lrange(2, 7) + lrange(5, 10), dtype='int64') assert_series_equal(s, expected) - self.assertEqual(s.dtype, expected.dtype) + assert s.dtype == expected.dtype s = Series(np.arange(10), dtype='int64') mask = s > 5 @@ -1506,8 +1505,8 @@ def test_ix_setitem(self): # set index value self.series.loc[d1] = 4 self.series.loc[d2] = 6 - self.assertEqual(self.series[d1], 4) - self.assertEqual(self.series[d2], 6) + assert self.series[d1] == 4 + assert self.series[d2] == 6 def test_where_numeric_with_string(self): # GH 9280 @@ -1639,7 +1638,7 @@ def test_datetime_indexing(self): pytest.raises(KeyError, s.__getitem__, stamp) s[stamp] = 0 - self.assertEqual(s[stamp], 0) + assert s[stamp] == 0 # not monotonic s = Series(len(index), index=index) @@ -1647,7 +1646,7 @@ def test_datetime_indexing(self): pytest.raises(KeyError, s.__getitem__, stamp) s[stamp] = 0 - self.assertEqual(s[stamp], 0) + assert s[stamp] == 0 def test_timedelta_assignment(self): # GH 8209 @@ -1702,7 +1701,7 @@ def test_underlying_data_conversion(self): df_tmp = df.iloc[ck] # noqa df["bb"].iloc[0] = .15 - self.assertEqual(df['bb'].iloc[0], 0.15) + assert df['bb'].iloc[0] == 0.15 pd.set_option('chained_assignment', 'raise') # GH 3217 @@ -1788,10 +1787,10 @@ def _check_align(a, b, how='left', fill=None): assert_series_equal(aa, ea) assert_series_equal(ab, eb) - self.assertEqual(aa.name, 'ts') - self.assertEqual(ea.name, 'ts') - self.assertEqual(ab.name, 'ts') - self.assertEqual(eb.name, 'ts') + assert aa.name == 'ts' + assert ea.name == 'ts' + assert ab.name == 'ts' + assert eb.name == 'ts' for kind in JOIN_TYPES: _check_align(self.ts[2:], self.ts[:-5], how=kind) @@ -1932,13 +1931,13 @@ def test_reindex(self): subSeries = self.series.reindex(subIndex) for idx, val in compat.iteritems(subSeries): - self.assertEqual(val, self.series[idx]) + assert val == self.series[idx] subIndex2 = self.ts.index[10:20] subTS = self.ts.reindex(subIndex2) for idx, val in compat.iteritems(subTS): - self.assertEqual(val, self.ts[idx]) + assert val == self.ts[idx] stuffSeries = self.ts.reindex(subIndex) assert np.isnan(stuffSeries).all() @@ -1947,7 +1946,7 @@ def test_reindex(self): nonContigIndex = self.ts.index[::2] subNonContig = self.ts.reindex(nonContigIndex) for idx, val in compat.iteritems(subNonContig): - self.assertEqual(val, self.ts[idx]) + assert val == self.ts[idx] # return a copy the same index here result = self.ts.reindex() @@ -2070,11 +2069,11 @@ def test_reindex_int(self): reindexed_int = int_ts.reindex(self.ts.index) # if NaNs introduced - self.assertEqual(reindexed_int.dtype, np.float_) + assert reindexed_int.dtype == np.float_ # NO NaNs introduced reindexed_int = int_ts.reindex(int_ts.index[::2]) - self.assertEqual(reindexed_int.dtype, np.int_) + assert reindexed_int.dtype == np.int_ def test_reindex_bool(self): @@ -2086,11 +2085,11 @@ def test_reindex_bool(self): reindexed_bool = bool_ts.reindex(self.ts.index) # if NaNs introduced - self.assertEqual(reindexed_bool.dtype, np.object_) + assert reindexed_bool.dtype == np.object_ # NO NaNs introduced reindexed_bool = bool_ts.reindex(bool_ts.index[::2]) - self.assertEqual(reindexed_bool.dtype, np.bool_) + assert reindexed_bool.dtype == np.bool_ def test_reindex_bool_pad(self): # fail @@ -2224,8 +2223,8 @@ def test_multilevel_preserve_name(self): result = s['foo'] result2 = s.loc['foo'] - self.assertEqual(result.name, s.name) - self.assertEqual(result2.name, s.name) + assert result.name == s.name + assert result2.name == s.name def test_setitem_scalar_into_readonly_backing_data(self): # GH14359: test that you cannot mutate a read only buffer @@ -2238,12 +2237,7 @@ def test_setitem_scalar_into_readonly_backing_data(self): with pytest.raises(ValueError): series[n] = 1 - self.assertEqual( - array[n], - 0, - msg='even though the ValueError was raised, the underlying' - ' array was still mutated!', - ) + assert array[n] == 0 def test_setitem_slice_into_readonly_backing_data(self): # GH14359: test that you cannot mutate a read only buffer @@ -2280,9 +2274,9 @@ def test_index_unique(self): uniques = self.dups.index.unique() expected = DatetimeIndex([datetime(2000, 1, 2), datetime(2000, 1, 3), datetime(2000, 1, 4), datetime(2000, 1, 5)]) - self.assertEqual(uniques.dtype, 'M8[ns]') # sanity + assert uniques.dtype == 'M8[ns]' # sanity tm.assert_index_equal(uniques, expected) - self.assertEqual(self.dups.index.nunique(), 4) + assert self.dups.index.nunique() == 4 # #2563 assert isinstance(uniques, DatetimeIndex) @@ -2293,22 +2287,22 @@ def test_index_unique(self): expected = DatetimeIndex(expected, name='foo') expected = expected.tz_localize('US/Eastern') assert result.tz is not None - self.assertEqual(result.name, 'foo') + assert result.name == 'foo' tm.assert_index_equal(result, expected) # NaT, note this is excluded arr = [1370745748 + t for t in range(20)] + [tslib.iNaT] idx = DatetimeIndex(arr * 3) tm.assert_index_equal(idx.unique(), DatetimeIndex(arr)) - self.assertEqual(idx.nunique(), 20) - self.assertEqual(idx.nunique(dropna=False), 21) + assert idx.nunique() == 20 + assert idx.nunique(dropna=False) == 21 arr = [Timestamp('2013-06-09 02:42:28') + timedelta(seconds=t) for t in range(20)] + [NaT] idx = DatetimeIndex(arr * 3) tm.assert_index_equal(idx.unique(), DatetimeIndex(arr)) - self.assertEqual(idx.nunique(), 20) - self.assertEqual(idx.nunique(dropna=False), 21) + assert idx.nunique() == 20 + assert idx.nunique(dropna=False) == 21 def test_index_dupes_contains(self): d = datetime(2011, 12, 5, 20, 30) @@ -2339,7 +2333,7 @@ def test_duplicate_dates_indexing(self): # new index ts[datetime(2000, 1, 6)] = 0 - self.assertEqual(ts[datetime(2000, 1, 6)], 0) + assert ts[datetime(2000, 1, 6)] == 0 def test_range_slice(self): idx = DatetimeIndex(['1/1/2000', '1/2/2000', '1/2/2000', '1/3/2000', @@ -2516,11 +2510,11 @@ def test_fancy_getitem(self): s = Series(np.arange(len(dti)), index=dti) - self.assertEqual(s[48], 48) - self.assertEqual(s['1/2/2009'], 48) - self.assertEqual(s['2009-1-2'], 48) - self.assertEqual(s[datetime(2009, 1, 2)], 48) - self.assertEqual(s[lib.Timestamp(datetime(2009, 1, 2))], 48) + assert s[48] == 48 + assert s['1/2/2009'] == 48 + assert s['2009-1-2'] == 48 + assert s[datetime(2009, 1, 2)] == 48 + assert s[lib.Timestamp(datetime(2009, 1, 2))] == 48 pytest.raises(KeyError, s.__getitem__, '2009-1-3') assert_series_equal(s['3/6/2009':'2009-06-05'], @@ -2532,9 +2526,9 @@ def test_fancy_setitem(self): s = Series(np.arange(len(dti)), index=dti) s[48] = -1 - self.assertEqual(s[48], -1) + assert s[48] == -1 s['1/2/2009'] = -2 - self.assertEqual(s[48], -2) + assert s[48] == -2 s['1/2/2009':'2009-06-05'] = -3 assert (s[48:54] == -3).all() @@ -2557,7 +2551,7 @@ def test_dti_reset_index_round_trip(self): dti = DatetimeIndex(start='1/1/2001', end='6/1/2001', freq='D') d1 = DataFrame({'v': np.random.rand(len(dti))}, index=dti) d2 = d1.reset_index() - self.assertEqual(d2.dtypes[0], np.dtype('M8[ns]')) + assert d2.dtypes[0] == np.dtype('M8[ns]') d3 = d2.set_index('index') assert_frame_equal(d1, d3, check_names=False) @@ -2566,8 +2560,8 @@ def test_dti_reset_index_round_trip(self): df = DataFrame([[stamp, 12.1]], columns=['Date', 'Value']) df = df.set_index('Date') - self.assertEqual(df.index[0], stamp) - self.assertEqual(df.reset_index()['Date'][0], stamp) + assert df.index[0] == stamp + assert df.reset_index()['Date'][0] == stamp def test_series_set_value(self): # #1561 @@ -2584,7 +2578,7 @@ def test_series_set_value(self): # s = Series(index[:1], index[:1]) # s2 = s.set_value(dates[1], index[1]) - # self.assertEqual(s2.values.dtype, 'M8[ns]') + # assert s2.values.dtype == 'M8[ns]' @slow def test_slice_locs_indexerror(self): @@ -2669,9 +2663,9 @@ def test_nat_operations(self): # GH 8617 s = Series([0, pd.NaT], dtype='m8[ns]') exp = s[0] - self.assertEqual(s.median(), exp) - self.assertEqual(s.min(), exp) - self.assertEqual(s.max(), exp) + assert s.median() == exp + assert s.min() == exp + assert s.max() == exp def test_round_nat(self): # GH14940 diff --git a/pandas/tests/series/test_internals.py b/pandas/tests/series/test_internals.py index 19170c82953ad..31492a4ab214a 100644 --- a/pandas/tests/series/test_internals.py +++ b/pandas/tests/series/test_internals.py @@ -116,7 +116,7 @@ def test_convert_objects(self): # r = s.copy() # r[0] = np.nan # result = r.convert_objects(convert_dates=True,convert_numeric=False) - # self.assertEqual(result.dtype, 'M8[ns]') + # assert result.dtype == 'M8[ns]' # dateutil parses some single letters into today's value as a date for x in 'abcdefghijklmnopqrstuvwxyz': @@ -282,7 +282,7 @@ def test_convert(self): # r = s.copy() # r[0] = np.nan # result = r._convert(convert_dates=True,convert_numeric=False) - # self.assertEqual(result.dtype, 'M8[ns]') + # assert result.dtype == 'M8[ns]' # dateutil parses some single letters into today's value as a date expected = Series([lib.NaT]) diff --git a/pandas/tests/series/test_io.py b/pandas/tests/series/test_io.py index 7a9d0390a2cfa..24bb3bbc7fc16 100644 --- a/pandas/tests/series/test_io.py +++ b/pandas/tests/series/test_io.py @@ -135,12 +135,12 @@ def test_timeseries_periodindex(self): prng = period_range('1/1/2011', '1/1/2012', freq='M') ts = Series(np.random.randn(len(prng)), prng) new_ts = tm.round_trip_pickle(ts) - self.assertEqual(new_ts.index.freq, 'M') + assert new_ts.index.freq == 'M' def test_pickle_preserve_name(self): for n in [777, 777., 'name', datetime(2001, 11, 11), (1, 2)]: unpickled = self._pickle_roundtrip_name(tm.makeTimeSeries(name=n)) - self.assertEqual(unpickled.name, n) + assert unpickled.name == n def _pickle_roundtrip_name(self, obj): @@ -178,7 +178,7 @@ def test_tolist(self): # datetime64 s = Series(self.ts.index) rs = s.tolist() - self.assertEqual(self.ts.index[0], rs[0]) + assert self.ts.index[0] == rs[0] def test_tolist_np_int(self): # GH10904 diff --git a/pandas/tests/series/test_missing.py b/pandas/tests/series/test_missing.py index 251954b5da05e..9937f6a34172e 100644 --- a/pandas/tests/series/test_missing.py +++ b/pandas/tests/series/test_missing.py @@ -190,7 +190,7 @@ def test_datetime64_tz_fillna(self): idx = pd.DatetimeIndex(['2011-01-01 10:00', pd.NaT, '2011-01-03 10:00', pd.NaT], tz=tz) s = pd.Series(idx) - self.assertEqual(s.dtype, 'datetime64[ns, {0}]'.format(tz)) + assert s.dtype == 'datetime64[ns, {0}]'.format(tz) tm.assert_series_equal(pd.isnull(s), null_loc) result = s.fillna(pd.Timestamp('2011-01-02 10:00')) @@ -485,19 +485,19 @@ def test_timedelta64_nan(self): td1 = td.copy() td1[0] = np.nan assert isnull(td1[0]) - self.assertEqual(td1[0].value, iNaT) + assert td1[0].value == iNaT td1[0] = td[0] assert not isnull(td1[0]) td1[1] = iNaT assert isnull(td1[1]) - self.assertEqual(td1[1].value, iNaT) + assert td1[1].value == iNaT td1[1] = td[1] assert not isnull(td1[1]) td1[2] = NaT assert isnull(td1[2]) - self.assertEqual(td1[2].value, iNaT) + assert td1[2].value == iNaT td1[2] = td[2] assert not isnull(td1[2]) @@ -505,7 +505,7 @@ def test_timedelta64_nan(self): # this doesn't work, not sure numpy even supports it # result = td[(td>np.timedelta64(timedelta(days=3))) & # td= 0 tm.assert_almost_equal(out[0, 0], 0.0) @@ -1033,7 +1033,7 @@ def test_group_var_large_inputs(self): self.algo(out, counts, values, labels) - self.assertEqual(counts[0], 10 ** 6) + assert counts[0] == 10 ** 6 tm.assert_almost_equal(out[0, 0], 1.0 / 12, check_less_precise=True) diff --git a/pandas/tests/test_base.py b/pandas/tests/test_base.py index cbcc4dc84e6d0..ed0d61cdbbaf9 100644 --- a/pandas/tests/test_base.py +++ b/pandas/tests/test_base.py @@ -83,7 +83,7 @@ def test_slicing_maintains_type(self): def check_result(self, result, expected, klass=None): klass = klass or self.klass assert isinstance(result, klass) - self.assertEqual(result, expected) + assert result == expected class TestPandasDelegate(tm.TestCase): @@ -219,7 +219,7 @@ def check_ops_properties(self, props, filter=None, ignore_failures=False): np.ndarray): tm.assert_numpy_array_equal(result, expected) else: - self.assertEqual(result, expected) + assert result == expected # freq raises AttributeError on an Int64Index because its not # defined we mostly care about Series here anyhow @@ -337,12 +337,12 @@ def test_ops(self): expected = pd.Period(ordinal=getattr(o._values, op)(), freq=o.freq) try: - self.assertEqual(result, expected) + assert result == expected except TypeError: # comparing tz-aware series with np.array results in # TypeError expected = expected.astype('M8[ns]').astype('int64') - self.assertEqual(result.value, expected) + assert result.value == expected def test_nanops(self): # GH 7261 @@ -350,7 +350,7 @@ def test_nanops(self): for klass in [Index, Series]: obj = klass([np.nan, 2.0]) - self.assertEqual(getattr(obj, op)(), 2.0) + assert getattr(obj, op)() == 2.0 obj = klass([np.nan]) assert pd.isnull(getattr(obj, op)()) @@ -360,33 +360,33 @@ def test_nanops(self): obj = klass([pd.NaT, datetime(2011, 11, 1)]) # check DatetimeIndex monotonic path - self.assertEqual(getattr(obj, op)(), datetime(2011, 11, 1)) + assert getattr(obj, op)() == datetime(2011, 11, 1) obj = klass([pd.NaT, datetime(2011, 11, 1), pd.NaT]) # check DatetimeIndex non-monotonic path - self.assertEqual(getattr(obj, op)(), datetime(2011, 11, 1)) + assert getattr(obj, op)(), datetime(2011, 11, 1) # argmin/max obj = Index(np.arange(5, dtype='int64')) - self.assertEqual(obj.argmin(), 0) - self.assertEqual(obj.argmax(), 4) + assert obj.argmin() == 0 + assert obj.argmax() == 4 obj = Index([np.nan, 1, np.nan, 2]) - self.assertEqual(obj.argmin(), 1) - self.assertEqual(obj.argmax(), 3) + assert obj.argmin() == 1 + assert obj.argmax() == 3 obj = Index([np.nan]) - self.assertEqual(obj.argmin(), -1) - self.assertEqual(obj.argmax(), -1) + assert obj.argmin() == -1 + assert obj.argmax() == -1 obj = Index([pd.NaT, datetime(2011, 11, 1), datetime(2011, 11, 2), pd.NaT]) - self.assertEqual(obj.argmin(), 1) - self.assertEqual(obj.argmax(), 2) + assert obj.argmin() == 1 + assert obj.argmax() == 2 obj = Index([pd.NaT]) - self.assertEqual(obj.argmin(), -1) - self.assertEqual(obj.argmax(), -1) + assert obj.argmin() == -1 + assert obj.argmax() == -1 def test_value_counts_unique_nunique(self): for orig in self.objs: @@ -414,7 +414,7 @@ def test_value_counts_unique_nunique(self): o = klass(rep, index=idx, name='a') # check values has the same dtype as the original - self.assertEqual(o.dtype, orig.dtype) + assert o.dtype == orig.dtype expected_s = Series(range(10, 0, -1), index=expected_index, dtype='int64', name='a') @@ -422,7 +422,7 @@ def test_value_counts_unique_nunique(self): result = o.value_counts() tm.assert_series_equal(result, expected_s) assert result.index.name is None - self.assertEqual(result.name, 'a') + assert result.name == 'a' result = o.unique() if isinstance(o, Index): @@ -430,7 +430,7 @@ def test_value_counts_unique_nunique(self): tm.assert_index_equal(result, orig) elif is_datetimetz(o): # datetimetz Series returns array of Timestamp - self.assertEqual(result[0], orig[0]) + assert result[0] == orig[0] for r in result: assert isinstance(r, pd.Timestamp) tm.assert_numpy_array_equal(result, @@ -438,7 +438,7 @@ def test_value_counts_unique_nunique(self): else: tm.assert_numpy_array_equal(result, orig.values) - self.assertEqual(o.nunique(), len(np.unique(o.values))) + assert o.nunique() == len(np.unique(o.values)) def test_value_counts_unique_nunique_null(self): @@ -469,7 +469,7 @@ def test_value_counts_unique_nunique_null(self): values[0:2] = null_obj # check values has the same dtype as the original - self.assertEqual(values.dtype, o.dtype) + assert values.dtype == o.dtype # create repeated values, 'n'th element is repeated by n+1 # times @@ -490,7 +490,7 @@ def test_value_counts_unique_nunique_null(self): o.name = 'a' # check values has the same dtype as the original - self.assertEqual(o.dtype, orig.dtype) + assert o.dtype == orig.dtype # check values correctly have NaN nanloc = np.zeros(len(o), dtype=np.bool) nanloc[:3] = True @@ -510,11 +510,11 @@ def test_value_counts_unique_nunique_null(self): result_s_na = o.value_counts(dropna=False) tm.assert_series_equal(result_s_na, expected_s_na) assert result_s_na.index.name is None - self.assertEqual(result_s_na.name, 'a') + assert result_s_na.name == 'a' result_s = o.value_counts() tm.assert_series_equal(o.value_counts(), expected_s) assert result_s.index.name is None - self.assertEqual(result_s.name, 'a') + assert result_s.name == 'a' result = o.unique() if isinstance(o, Index): @@ -529,10 +529,10 @@ def test_value_counts_unique_nunique_null(self): tm.assert_numpy_array_equal(result[1:], values[2:]) assert pd.isnull(result[0]) - self.assertEqual(result.dtype, orig.dtype) + assert result.dtype == orig.dtype - self.assertEqual(o.nunique(), 8) - self.assertEqual(o.nunique(dropna=False), 9) + assert o.nunique() == 8 + assert o.nunique(dropna=False) == 9 def test_value_counts_inferred(self): klasses = [Index, Series] @@ -549,7 +549,7 @@ def test_value_counts_inferred(self): exp = np.unique(np.array(s_values, dtype=np.object_)) tm.assert_numpy_array_equal(s.unique(), exp) - self.assertEqual(s.nunique(), 4) + assert s.nunique() == 4 # don't sort, have to sort after the fact as not sorting is # platform-dep hist = s.value_counts(sort=False).sort_values() @@ -666,14 +666,14 @@ def test_value_counts_datetime64(self): else: tm.assert_numpy_array_equal(s.unique(), expected) - self.assertEqual(s.nunique(), 3) + assert s.nunique() == 3 # with NaT s = df['dt'].copy() s = klass([v for v in s.values] + [pd.NaT]) result = s.value_counts() - self.assertEqual(result.index.dtype, 'datetime64[ns]') + assert result.index.dtype == 'datetime64[ns]' tm.assert_series_equal(result, expected_s) result = s.value_counts(dropna=False) @@ -681,7 +681,7 @@ def test_value_counts_datetime64(self): tm.assert_series_equal(result, expected_s) unique = s.unique() - self.assertEqual(unique.dtype, 'datetime64[ns]') + assert unique.dtype == 'datetime64[ns]' # numpy_array_equal cannot compare pd.NaT if isinstance(s, Index): @@ -691,8 +691,8 @@ def test_value_counts_datetime64(self): tm.assert_numpy_array_equal(unique[:3], expected) assert pd.isnull(unique[3]) - self.assertEqual(s.nunique(), 3) - self.assertEqual(s.nunique(dropna=False), 4) + assert s.nunique() == 3 + assert s.nunique(dropna=False) == 4 # timedelta64[ns] td = df.dt - df.dt + timedelta(1) @@ -931,7 +931,7 @@ def test_fillna(self): o = klass(values) # check values has the same dtype as the original - self.assertEqual(o.dtype, orig.dtype) + assert o.dtype == orig.dtype result = o.fillna(fill_value) if isinstance(o, Index): @@ -951,14 +951,12 @@ def test_memory_usage(self): # if there are objects, only deep will pick them up assert res_deep > res else: - self.assertEqual(res, res_deep) + assert res == res_deep if isinstance(o, Series): - self.assertEqual( - (o.memory_usage(index=False) + - o.index.memory_usage()), - o.memory_usage(index=True) - ) + assert ((o.memory_usage(index=False) + + o.index.memory_usage()) == + o.memory_usage(index=True)) # sys.getsizeof will call the .memory_usage with # deep=True, and add on some GC overhead diff --git a/pandas/tests/test_categorical.py b/pandas/tests/test_categorical.py index 708ca92c30cac..515ca8d9cedc5 100644 --- a/pandas/tests/test_categorical.py +++ b/pandas/tests/test_categorical.py @@ -35,8 +35,8 @@ def setUp(self): ordered=True) def test_getitem(self): - self.assertEqual(self.factor[0], 'a') - self.assertEqual(self.factor[-1], 'c') + assert self.factor[0] == 'a' + assert self.factor[-1] == 'c' subf = self.factor[[0, 1, 2]] tm.assert_numpy_array_equal(subf._codes, @@ -82,9 +82,9 @@ def test_setitem(self): # int/positional c = self.factor.copy() c[0] = 'b' - self.assertEqual(c[0], 'b') + assert c[0] == 'b' c[-1] = 'a' - self.assertEqual(c[-1], 'a') + assert c[-1] == 'a' # boolean c = self.factor.copy() @@ -110,7 +110,7 @@ def test_setitem_listlike(self): # we are asserting the code result here # which maps to the -1000 category result = c.codes[np.array([100000]).astype(np.int64)] - self.assertEqual(result, np.array([5], dtype='int8')) + tm.assert_numpy_array_equal(result, np.array([5], dtype='int8')) def test_constructor_unsortable(self): @@ -665,7 +665,7 @@ def test_print(self): "Categories (3, object): [a < b < c]"] expected = "\n".join(expected) actual = repr(self.factor) - self.assertEqual(actual, expected) + assert actual == expected def test_big_print(self): factor = Categorical([0, 1, 2, 0, 1, 2] * 100, ['a', 'b', 'c'], @@ -676,24 +676,24 @@ def test_big_print(self): actual = repr(factor) - self.assertEqual(actual, expected) + assert actual == expected def test_empty_print(self): factor = Categorical([], ["a", "b", "c"]) expected = ("[], Categories (3, object): [a, b, c]") # hack because array_repr changed in numpy > 1.6.x actual = repr(factor) - self.assertEqual(actual, expected) + assert actual == expected - self.assertEqual(expected, actual) + assert expected == actual factor = Categorical([], ["a", "b", "c"], ordered=True) expected = ("[], Categories (3, object): [a < b < c]") actual = repr(factor) - self.assertEqual(expected, actual) + assert expected == actual factor = Categorical([], []) expected = ("[], Categories (0, object): []") - self.assertEqual(expected, repr(factor)) + assert expected == repr(factor) def test_print_none_width(self): # GH10087 @@ -702,7 +702,7 @@ def test_print_none_width(self): "dtype: category\nCategories (4, int64): [1, 2, 3, 4]") with option_context("display.width", None): - self.assertEqual(exp, repr(a)) + assert exp == repr(a) def test_unicode_print(self): if PY3: @@ -716,7 +716,7 @@ def test_unicode_print(self): Length: 60 Categories (3, object): [aaaaa, bb, cccc]""" - self.assertEqual(_rep(c), expected) + assert _rep(c) == expected c = pd.Categorical([u'ああああ', u'いいいいい', u'ううううううう'] * 20) expected = u"""\ @@ -724,7 +724,7 @@ def test_unicode_print(self): Length: 60 Categories (3, object): [ああああ, いいいいい, ううううううう]""" # noqa - self.assertEqual(_rep(c), expected) + assert _rep(c) == expected # unicode option should not affect to Categorical, as it doesn't care # the repr width @@ -735,7 +735,7 @@ def test_unicode_print(self): Length: 60 Categories (3, object): [ああああ, いいいいい, ううううううう]""" # noqa - self.assertEqual(_rep(c), expected) + assert _rep(c) == expected def test_periodindex(self): idx1 = PeriodIndex(['2014-01', '2014-01', '2014-02', '2014-02', @@ -1080,7 +1080,7 @@ def test_remove_unused_categories(self): tm.assert_index_equal(out.categories, Index(['B', 'D', 'F'])) exp_codes = np.array([2, -1, 1, 0, 1, 2, -1], dtype=np.int8) tm.assert_numpy_array_equal(out.codes, exp_codes) - self.assertEqual(out.get_values().tolist(), val) + assert out.get_values().tolist() == val alpha = list('abcdefghijklmnopqrstuvwxyz') val = np.random.choice(alpha[::2], 10000).astype('object') @@ -1088,7 +1088,7 @@ def test_remove_unused_categories(self): cat = pd.Categorical(values=val, categories=alpha) out = cat.remove_unused_categories() - self.assertEqual(out.get_values().tolist(), val.tolist()) + assert out.get_values().tolist() == val.tolist() def test_nan_handling(self): @@ -1156,37 +1156,37 @@ def test_min_max(self): cat = Categorical(["a", "b", "c", "d"], ordered=True) _min = cat.min() _max = cat.max() - self.assertEqual(_min, "a") - self.assertEqual(_max, "d") + assert _min == "a" + assert _max == "d" cat = Categorical(["a", "b", "c", "d"], categories=['d', 'c', 'b', 'a'], ordered=True) _min = cat.min() _max = cat.max() - self.assertEqual(_min, "d") - self.assertEqual(_max, "a") + assert _min == "d" + assert _max == "a" cat = Categorical([np.nan, "b", "c", np.nan], categories=['d', 'c', 'b', 'a'], ordered=True) _min = cat.min() _max = cat.max() assert np.isnan(_min) - self.assertEqual(_max, "b") + assert _max == "b" _min = cat.min(numeric_only=True) - self.assertEqual(_min, "c") + assert _min == "c" _max = cat.max(numeric_only=True) - self.assertEqual(_max, "b") + assert _max == "b" cat = Categorical([np.nan, 1, 2, np.nan], categories=[5, 4, 3, 2, 1], ordered=True) _min = cat.min() _max = cat.max() assert np.isnan(_min) - self.assertEqual(_max, 1) + assert _max == 1 _min = cat.min(numeric_only=True) - self.assertEqual(_min, 2) + assert _min == 2 _max = cat.max(numeric_only=True) - self.assertEqual(_max, 1) + assert _max == 1 def test_unique(self): # categories are reordered based on value when ordered=False @@ -1391,7 +1391,7 @@ def test_sort_values_na_position(self): def test_slicing_directly(self): cat = Categorical(["a", "b", "c", "d", "a", "b", "c"]) sliced = cat[3] - self.assertEqual(sliced, "d") + assert sliced == "d" sliced = cat[3:5] expected = Categorical(["d", "a"], categories=['a', 'b', 'c', 'd']) tm.assert_numpy_array_equal(sliced._codes, expected._codes) @@ -1427,7 +1427,7 @@ def test_shift(self): def test_nbytes(self): cat = pd.Categorical([1, 2, 3]) exp = cat._codes.nbytes + cat._categories.values.nbytes - self.assertEqual(cat.nbytes, exp) + assert cat.nbytes == exp def test_memory_usage(self): cat = pd.Categorical([1, 2, 3]) @@ -1661,8 +1661,8 @@ def test_basic(self): # test basic creation / coercion of categoricals s = Series(self.factor, name='A') - self.assertEqual(s.dtype, 'category') - self.assertEqual(len(s), len(self.factor)) + assert s.dtype == 'category' + assert len(s) == len(self.factor) str(s.values) str(s) @@ -1672,14 +1672,14 @@ def test_basic(self): tm.assert_series_equal(result, s) result = df.iloc[:, 0] tm.assert_series_equal(result, s) - self.assertEqual(len(df), len(self.factor)) + assert len(df) == len(self.factor) str(df.values) str(df) df = DataFrame({'A': s}) result = df['A'] tm.assert_series_equal(result, s) - self.assertEqual(len(df), len(self.factor)) + assert len(df) == len(self.factor) str(df.values) str(df) @@ -1689,8 +1689,8 @@ def test_basic(self): result2 = df['B'] tm.assert_series_equal(result1, s) tm.assert_series_equal(result2, s, check_names=False) - self.assertEqual(result2.name, 'B') - self.assertEqual(len(df), len(self.factor)) + assert result2.name == 'B' + assert len(df) == len(self.factor) str(df.values) str(df) @@ -1703,13 +1703,13 @@ def test_basic(self): expected = x.iloc[0].person_name result = x.person_name.iloc[0] - self.assertEqual(result, expected) + assert result == expected result = x.person_name[0] - self.assertEqual(result, expected) + assert result == expected result = x.person_name.loc[0] - self.assertEqual(result, expected) + assert result == expected def test_creation_astype(self): l = ["a", "b", "c", "a"] @@ -1976,11 +1976,11 @@ def test_series_delegations(self): exp_codes = Series([0, 1, 2, 0], dtype='int8') tm.assert_series_equal(s.cat.codes, exp_codes) - self.assertEqual(s.cat.ordered, True) + assert s.cat.ordered s = s.cat.as_unordered() - self.assertEqual(s.cat.ordered, False) + assert not s.cat.ordered s.cat.as_ordered(inplace=True) - self.assertEqual(s.cat.ordered, True) + assert s.cat.ordered # reorder s = Series(Categorical(["a", "b", "c", "a"], ordered=True)) @@ -2058,7 +2058,7 @@ def test_describe(self): # Categoricals should not show up together with numerical columns result = self.cat.describe() - self.assertEqual(len(result.columns), 1) + assert len(result.columns) == 1 # In a frame, describe() for the cat should be the same as for string # arrays (count, unique, top, freq) @@ -2081,75 +2081,75 @@ def test_repr(self): exp = u("0 1\n1 2\n2 3\n3 4\n" + "dtype: category\nCategories (4, int64): [1, 2, 3, 4]") - self.assertEqual(exp, a.__unicode__()) + assert exp == a.__unicode__() a = pd.Series(pd.Categorical(["a", "b"] * 25)) exp = u("0 a\n1 b\n" + " ..\n" + "48 a\n49 b\n" + "Length: 50, dtype: category\nCategories (2, object): [a, b]") with option_context("display.max_rows", 5): - self.assertEqual(exp, repr(a)) + assert exp == repr(a) levs = list("abcdefghijklmnopqrstuvwxyz") a = pd.Series(pd.Categorical( ["a", "b"], categories=levs, ordered=True)) exp = u("0 a\n1 b\n" + "dtype: category\n" "Categories (26, object): [a < b < c < d ... w < x < y < z]") - self.assertEqual(exp, a.__unicode__()) + assert exp == a.__unicode__() def test_categorical_repr(self): c = pd.Categorical([1, 2, 3]) exp = """[1, 2, 3] Categories (3, int64): [1, 2, 3]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical([1, 2, 3, 1, 2, 3], categories=[1, 2, 3]) exp = """[1, 2, 3, 1, 2, 3] Categories (3, int64): [1, 2, 3]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical([1, 2, 3, 4, 5] * 10) exp = """[1, 2, 3, 4, 5, ..., 1, 2, 3, 4, 5] Length: 50 Categories (5, int64): [1, 2, 3, 4, 5]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(np.arange(20)) exp = """[0, 1, 2, 3, 4, ..., 15, 16, 17, 18, 19] Length: 20 Categories (20, int64): [0, 1, 2, 3, ..., 16, 17, 18, 19]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp def test_categorical_repr_ordered(self): c = pd.Categorical([1, 2, 3], ordered=True) exp = """[1, 2, 3] Categories (3, int64): [1 < 2 < 3]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical([1, 2, 3, 1, 2, 3], categories=[1, 2, 3], ordered=True) exp = """[1, 2, 3, 1, 2, 3] Categories (3, int64): [1 < 2 < 3]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical([1, 2, 3, 4, 5] * 10, ordered=True) exp = """[1, 2, 3, 4, 5, ..., 1, 2, 3, 4, 5] Length: 50 Categories (5, int64): [1 < 2 < 3 < 4 < 5]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(np.arange(20), ordered=True) exp = """[0, 1, 2, 3, 4, ..., 15, 16, 17, 18, 19] Length: 20 Categories (20, int64): [0 < 1 < 2 < 3 ... 16 < 17 < 18 < 19]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp def test_categorical_repr_datetime(self): idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5) @@ -2164,7 +2164,7 @@ def test_categorical_repr_datetime(self): "2011-01-01 10:00:00, 2011-01-01 11:00:00,\n" " 2011-01-01 12:00:00, " "2011-01-01 13:00:00]""") - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx) exp = ( @@ -2177,7 +2177,7 @@ def test_categorical_repr_datetime(self): " 2011-01-01 12:00:00, " "2011-01-01 13:00:00]") - self.assertEqual(repr(c), exp) + assert repr(c) == exp idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5, tz='US/Eastern') @@ -2193,7 +2193,7 @@ def test_categorical_repr_datetime(self): " " "2011-01-01 13:00:00-05:00]") - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx) exp = ( @@ -2209,7 +2209,7 @@ def test_categorical_repr_datetime(self): " " "2011-01-01 13:00:00-05:00]") - self.assertEqual(repr(c), exp) + assert repr(c) == exp def test_categorical_repr_datetime_ordered(self): idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5) @@ -2218,14 +2218,14 @@ def test_categorical_repr_datetime_ordered(self): Categories (5, datetime64[ns]): [2011-01-01 09:00:00 < 2011-01-01 10:00:00 < 2011-01-01 11:00:00 < 2011-01-01 12:00:00 < 2011-01-01 13:00:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx, ordered=True) exp = """[2011-01-01 09:00:00, 2011-01-01 10:00:00, 2011-01-01 11:00:00, 2011-01-01 12:00:00, 2011-01-01 13:00:00, 2011-01-01 09:00:00, 2011-01-01 10:00:00, 2011-01-01 11:00:00, 2011-01-01 12:00:00, 2011-01-01 13:00:00] Categories (5, datetime64[ns]): [2011-01-01 09:00:00 < 2011-01-01 10:00:00 < 2011-01-01 11:00:00 < 2011-01-01 12:00:00 < 2011-01-01 13:00:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5, tz='US/Eastern') @@ -2235,7 +2235,7 @@ def test_categorical_repr_datetime_ordered(self): 2011-01-01 11:00:00-05:00 < 2011-01-01 12:00:00-05:00 < 2011-01-01 13:00:00-05:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx, ordered=True) exp = """[2011-01-01 09:00:00-05:00, 2011-01-01 10:00:00-05:00, 2011-01-01 11:00:00-05:00, 2011-01-01 12:00:00-05:00, 2011-01-01 13:00:00-05:00, 2011-01-01 09:00:00-05:00, 2011-01-01 10:00:00-05:00, 2011-01-01 11:00:00-05:00, 2011-01-01 12:00:00-05:00, 2011-01-01 13:00:00-05:00] @@ -2243,7 +2243,7 @@ def test_categorical_repr_datetime_ordered(self): 2011-01-01 11:00:00-05:00 < 2011-01-01 12:00:00-05:00 < 2011-01-01 13:00:00-05:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp def test_categorical_repr_period(self): idx = pd.period_range('2011-01-01 09:00', freq='H', periods=5) @@ -2252,27 +2252,27 @@ def test_categorical_repr_period(self): Categories (5, period[H]): [2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx) exp = """[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00, 2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00] Categories (5, period[H]): [2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp idx = pd.period_range('2011-01', freq='M', periods=5) c = pd.Categorical(idx) exp = """[2011-01, 2011-02, 2011-03, 2011-04, 2011-05] Categories (5, period[M]): [2011-01, 2011-02, 2011-03, 2011-04, 2011-05]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx) exp = """[2011-01, 2011-02, 2011-03, 2011-04, 2011-05, 2011-01, 2011-02, 2011-03, 2011-04, 2011-05] Categories (5, period[M]): [2011-01, 2011-02, 2011-03, 2011-04, 2011-05]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp def test_categorical_repr_period_ordered(self): idx = pd.period_range('2011-01-01 09:00', freq='H', periods=5) @@ -2281,27 +2281,27 @@ def test_categorical_repr_period_ordered(self): Categories (5, period[H]): [2011-01-01 09:00 < 2011-01-01 10:00 < 2011-01-01 11:00 < 2011-01-01 12:00 < 2011-01-01 13:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx, ordered=True) exp = """[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00, 2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00] Categories (5, period[H]): [2011-01-01 09:00 < 2011-01-01 10:00 < 2011-01-01 11:00 < 2011-01-01 12:00 < 2011-01-01 13:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp idx = pd.period_range('2011-01', freq='M', periods=5) c = pd.Categorical(idx, ordered=True) exp = """[2011-01, 2011-02, 2011-03, 2011-04, 2011-05] Categories (5, period[M]): [2011-01 < 2011-02 < 2011-03 < 2011-04 < 2011-05]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx, ordered=True) exp = """[2011-01, 2011-02, 2011-03, 2011-04, 2011-05, 2011-01, 2011-02, 2011-03, 2011-04, 2011-05] Categories (5, period[M]): [2011-01 < 2011-02 < 2011-03 < 2011-04 < 2011-05]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp def test_categorical_repr_timedelta(self): idx = pd.timedelta_range('1 days', periods=5) @@ -2309,13 +2309,13 @@ def test_categorical_repr_timedelta(self): exp = """[1 days, 2 days, 3 days, 4 days, 5 days] Categories (5, timedelta64[ns]): [1 days, 2 days, 3 days, 4 days, 5 days]""" - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx) exp = """[1 days, 2 days, 3 days, 4 days, 5 days, 1 days, 2 days, 3 days, 4 days, 5 days] Categories (5, timedelta64[ns]): [1 days, 2 days, 3 days, 4 days, 5 days]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp idx = pd.timedelta_range('1 hours', periods=20) c = pd.Categorical(idx) @@ -2325,7 +2325,7 @@ def test_categorical_repr_timedelta(self): 3 days 01:00:00, ..., 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx) exp = """[0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, 4 days 01:00:00, ..., 15 days 01:00:00, 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00] @@ -2334,7 +2334,7 @@ def test_categorical_repr_timedelta(self): 3 days 01:00:00, ..., 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp def test_categorical_repr_timedelta_ordered(self): idx = pd.timedelta_range('1 days', periods=5) @@ -2342,13 +2342,13 @@ def test_categorical_repr_timedelta_ordered(self): exp = """[1 days, 2 days, 3 days, 4 days, 5 days] Categories (5, timedelta64[ns]): [1 days < 2 days < 3 days < 4 days < 5 days]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx, ordered=True) exp = """[1 days, 2 days, 3 days, 4 days, 5 days, 1 days, 2 days, 3 days, 4 days, 5 days] Categories (5, timedelta64[ns]): [1 days < 2 days < 3 days < 4 days < 5 days]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp idx = pd.timedelta_range('1 hours', periods=20) c = pd.Categorical(idx, ordered=True) @@ -2358,7 +2358,7 @@ def test_categorical_repr_timedelta_ordered(self): 3 days 01:00:00 ... 16 days 01:00:00 < 17 days 01:00:00 < 18 days 01:00:00 < 19 days 01:00:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp c = pd.Categorical(idx.append(idx), categories=idx, ordered=True) exp = """[0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, 4 days 01:00:00, ..., 15 days 01:00:00, 16 days 01:00:00, 17 days 01:00:00, 18 days 01:00:00, 19 days 01:00:00] @@ -2367,7 +2367,7 @@ def test_categorical_repr_timedelta_ordered(self): 3 days 01:00:00 ... 16 days 01:00:00 < 17 days 01:00:00 < 18 days 01:00:00 < 19 days 01:00:00]""" # noqa - self.assertEqual(repr(c), exp) + assert repr(c) == exp def test_categorical_series_repr(self): s = pd.Series(pd.Categorical([1, 2, 3])) @@ -2377,7 +2377,7 @@ def test_categorical_series_repr(self): dtype: category Categories (3, int64): [1, 2, 3]""" - self.assertEqual(repr(s), exp) + assert repr(s) == exp s = pd.Series(pd.Categorical(np.arange(10))) exp = """0 0 @@ -2393,7 +2393,7 @@ def test_categorical_series_repr(self): dtype: category Categories (10, int64): [0, 1, 2, 3, ..., 6, 7, 8, 9]""" - self.assertEqual(repr(s), exp) + assert repr(s) == exp def test_categorical_series_repr_ordered(self): s = pd.Series(pd.Categorical([1, 2, 3], ordered=True)) @@ -2403,7 +2403,7 @@ def test_categorical_series_repr_ordered(self): dtype: category Categories (3, int64): [1 < 2 < 3]""" - self.assertEqual(repr(s), exp) + assert repr(s) == exp s = pd.Series(pd.Categorical(np.arange(10), ordered=True)) exp = """0 0 @@ -2419,7 +2419,7 @@ def test_categorical_series_repr_ordered(self): dtype: category Categories (10, int64): [0 < 1 < 2 < 3 ... 6 < 7 < 8 < 9]""" - self.assertEqual(repr(s), exp) + assert repr(s) == exp def test_categorical_series_repr_datetime(self): idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5) @@ -2433,7 +2433,7 @@ def test_categorical_series_repr_datetime(self): Categories (5, datetime64[ns]): [2011-01-01 09:00:00, 2011-01-01 10:00:00, 2011-01-01 11:00:00, 2011-01-01 12:00:00, 2011-01-01 13:00:00]""" # noqa - self.assertEqual(repr(s), exp) + assert repr(s) == exp idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5, tz='US/Eastern') @@ -2448,7 +2448,7 @@ def test_categorical_series_repr_datetime(self): 2011-01-01 11:00:00-05:00, 2011-01-01 12:00:00-05:00, 2011-01-01 13:00:00-05:00]""" # noqa - self.assertEqual(repr(s), exp) + assert repr(s) == exp def test_categorical_series_repr_datetime_ordered(self): idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5) @@ -2462,7 +2462,7 @@ def test_categorical_series_repr_datetime_ordered(self): Categories (5, datetime64[ns]): [2011-01-01 09:00:00 < 2011-01-01 10:00:00 < 2011-01-01 11:00:00 < 2011-01-01 12:00:00 < 2011-01-01 13:00:00]""" # noqa - self.assertEqual(repr(s), exp) + assert repr(s) == exp idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5, tz='US/Eastern') @@ -2477,7 +2477,7 @@ def test_categorical_series_repr_datetime_ordered(self): 2011-01-01 11:00:00-05:00 < 2011-01-01 12:00:00-05:00 < 2011-01-01 13:00:00-05:00]""" # noqa - self.assertEqual(repr(s), exp) + assert repr(s) == exp def test_categorical_series_repr_period(self): idx = pd.period_range('2011-01-01 09:00', freq='H', periods=5) @@ -2491,7 +2491,7 @@ def test_categorical_series_repr_period(self): Categories (5, period[H]): [2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00]""" # noqa - self.assertEqual(repr(s), exp) + assert repr(s) == exp idx = pd.period_range('2011-01', freq='M', periods=5) s = pd.Series(pd.Categorical(idx)) @@ -2503,7 +2503,7 @@ def test_categorical_series_repr_period(self): dtype: category Categories (5, period[M]): [2011-01, 2011-02, 2011-03, 2011-04, 2011-05]""" - self.assertEqual(repr(s), exp) + assert repr(s) == exp def test_categorical_series_repr_period_ordered(self): idx = pd.period_range('2011-01-01 09:00', freq='H', periods=5) @@ -2517,7 +2517,7 @@ def test_categorical_series_repr_period_ordered(self): Categories (5, period[H]): [2011-01-01 09:00 < 2011-01-01 10:00 < 2011-01-01 11:00 < 2011-01-01 12:00 < 2011-01-01 13:00]""" # noqa - self.assertEqual(repr(s), exp) + assert repr(s) == exp idx = pd.period_range('2011-01', freq='M', periods=5) s = pd.Series(pd.Categorical(idx, ordered=True)) @@ -2529,7 +2529,7 @@ def test_categorical_series_repr_period_ordered(self): dtype: category Categories (5, period[M]): [2011-01 < 2011-02 < 2011-03 < 2011-04 < 2011-05]""" - self.assertEqual(repr(s), exp) + assert repr(s) == exp def test_categorical_series_repr_timedelta(self): idx = pd.timedelta_range('1 days', periods=5) @@ -2542,7 +2542,7 @@ def test_categorical_series_repr_timedelta(self): dtype: category Categories (5, timedelta64[ns]): [1 days, 2 days, 3 days, 4 days, 5 days]""" - self.assertEqual(repr(s), exp) + assert repr(s) == exp idx = pd.timedelta_range('1 hours', periods=10) s = pd.Series(pd.Categorical(idx)) @@ -2561,7 +2561,7 @@ def test_categorical_series_repr_timedelta(self): 3 days 01:00:00, ..., 6 days 01:00:00, 7 days 01:00:00, 8 days 01:00:00, 9 days 01:00:00]""" # noqa - self.assertEqual(repr(s), exp) + assert repr(s) == exp def test_categorical_series_repr_timedelta_ordered(self): idx = pd.timedelta_range('1 days', periods=5) @@ -2574,7 +2574,7 @@ def test_categorical_series_repr_timedelta_ordered(self): dtype: category Categories (5, timedelta64[ns]): [1 days < 2 days < 3 days < 4 days < 5 days]""" # noqa - self.assertEqual(repr(s), exp) + assert repr(s) == exp idx = pd.timedelta_range('1 hours', periods=10) s = pd.Series(pd.Categorical(idx, ordered=True)) @@ -2593,25 +2593,25 @@ def test_categorical_series_repr_timedelta_ordered(self): 3 days 01:00:00 ... 6 days 01:00:00 < 7 days 01:00:00 < 8 days 01:00:00 < 9 days 01:00:00]""" # noqa - self.assertEqual(repr(s), exp) + assert repr(s) == exp def test_categorical_index_repr(self): idx = pd.CategoricalIndex(pd.Categorical([1, 2, 3])) exp = """CategoricalIndex([1, 2, 3], categories=[1, 2, 3], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(idx), exp) + assert repr(idx) == exp i = pd.CategoricalIndex(pd.Categorical(np.arange(10))) exp = """CategoricalIndex([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], categories=[0, 1, 2, 3, 4, 5, 6, 7, ...], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp def test_categorical_index_repr_ordered(self): i = pd.CategoricalIndex(pd.Categorical([1, 2, 3], ordered=True)) exp = """CategoricalIndex([1, 2, 3], categories=[1, 2, 3], ordered=True, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp i = pd.CategoricalIndex(pd.Categorical(np.arange(10), ordered=True)) exp = """CategoricalIndex([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], categories=[0, 1, 2, 3, 4, 5, 6, 7, ...], ordered=True, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp def test_categorical_index_repr_datetime(self): idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5) @@ -2621,7 +2621,7 @@ def test_categorical_index_repr_datetime(self): '2011-01-01 13:00:00'], categories=[2011-01-01 09:00:00, 2011-01-01 10:00:00, 2011-01-01 11:00:00, 2011-01-01 12:00:00, 2011-01-01 13:00:00], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5, tz='US/Eastern') @@ -2631,7 +2631,7 @@ def test_categorical_index_repr_datetime(self): '2011-01-01 13:00:00-05:00'], categories=[2011-01-01 09:00:00-05:00, 2011-01-01 10:00:00-05:00, 2011-01-01 11:00:00-05:00, 2011-01-01 12:00:00-05:00, 2011-01-01 13:00:00-05:00], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp def test_categorical_index_repr_datetime_ordered(self): idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5) @@ -2641,7 +2641,7 @@ def test_categorical_index_repr_datetime_ordered(self): '2011-01-01 13:00:00'], categories=[2011-01-01 09:00:00, 2011-01-01 10:00:00, 2011-01-01 11:00:00, 2011-01-01 12:00:00, 2011-01-01 13:00:00], ordered=True, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp idx = pd.date_range('2011-01-01 09:00', freq='H', periods=5, tz='US/Eastern') @@ -2651,7 +2651,7 @@ def test_categorical_index_repr_datetime_ordered(self): '2011-01-01 13:00:00-05:00'], categories=[2011-01-01 09:00:00-05:00, 2011-01-01 10:00:00-05:00, 2011-01-01 11:00:00-05:00, 2011-01-01 12:00:00-05:00, 2011-01-01 13:00:00-05:00], ordered=True, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp i = pd.CategoricalIndex(pd.Categorical(idx.append(idx), ordered=True)) exp = """CategoricalIndex(['2011-01-01 09:00:00-05:00', '2011-01-01 10:00:00-05:00', @@ -2661,24 +2661,24 @@ def test_categorical_index_repr_datetime_ordered(self): '2011-01-01 12:00:00-05:00', '2011-01-01 13:00:00-05:00'], categories=[2011-01-01 09:00:00-05:00, 2011-01-01 10:00:00-05:00, 2011-01-01 11:00:00-05:00, 2011-01-01 12:00:00-05:00, 2011-01-01 13:00:00-05:00], ordered=True, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp def test_categorical_index_repr_period(self): # test all length idx = pd.period_range('2011-01-01 09:00', freq='H', periods=1) i = pd.CategoricalIndex(pd.Categorical(idx)) exp = """CategoricalIndex(['2011-01-01 09:00'], categories=[2011-01-01 09:00], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp idx = pd.period_range('2011-01-01 09:00', freq='H', periods=2) i = pd.CategoricalIndex(pd.Categorical(idx)) exp = """CategoricalIndex(['2011-01-01 09:00', '2011-01-01 10:00'], categories=[2011-01-01 09:00, 2011-01-01 10:00], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp idx = pd.period_range('2011-01-01 09:00', freq='H', periods=3) i = pd.CategoricalIndex(pd.Categorical(idx)) exp = """CategoricalIndex(['2011-01-01 09:00', '2011-01-01 10:00', '2011-01-01 11:00'], categories=[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp idx = pd.period_range('2011-01-01 09:00', freq='H', periods=5) i = pd.CategoricalIndex(pd.Categorical(idx)) @@ -2686,7 +2686,7 @@ def test_categorical_index_repr_period(self): '2011-01-01 12:00', '2011-01-01 13:00'], categories=[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp i = pd.CategoricalIndex(pd.Categorical(idx.append(idx))) exp = """CategoricalIndex(['2011-01-01 09:00', '2011-01-01 10:00', '2011-01-01 11:00', @@ -2695,12 +2695,12 @@ def test_categorical_index_repr_period(self): '2011-01-01 13:00'], categories=[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp idx = pd.period_range('2011-01', freq='M', periods=5) i = pd.CategoricalIndex(pd.Categorical(idx)) exp = """CategoricalIndex(['2011-01', '2011-02', '2011-03', '2011-04', '2011-05'], categories=[2011-01, 2011-02, 2011-03, 2011-04, 2011-05], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp def test_categorical_index_repr_period_ordered(self): idx = pd.period_range('2011-01-01 09:00', freq='H', periods=5) @@ -2709,18 +2709,18 @@ def test_categorical_index_repr_period_ordered(self): '2011-01-01 12:00', '2011-01-01 13:00'], categories=[2011-01-01 09:00, 2011-01-01 10:00, 2011-01-01 11:00, 2011-01-01 12:00, 2011-01-01 13:00], ordered=True, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp idx = pd.period_range('2011-01', freq='M', periods=5) i = pd.CategoricalIndex(pd.Categorical(idx, ordered=True)) exp = """CategoricalIndex(['2011-01', '2011-02', '2011-03', '2011-04', '2011-05'], categories=[2011-01, 2011-02, 2011-03, 2011-04, 2011-05], ordered=True, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp def test_categorical_index_repr_timedelta(self): idx = pd.timedelta_range('1 days', periods=5) i = pd.CategoricalIndex(pd.Categorical(idx)) exp = """CategoricalIndex(['1 days', '2 days', '3 days', '4 days', '5 days'], categories=[1 days 00:00:00, 2 days 00:00:00, 3 days 00:00:00, 4 days 00:00:00, 5 days 00:00:00], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp idx = pd.timedelta_range('1 hours', periods=10) i = pd.CategoricalIndex(pd.Categorical(idx)) @@ -2730,13 +2730,13 @@ def test_categorical_index_repr_timedelta(self): '9 days 01:00:00'], categories=[0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, 4 days 01:00:00, 5 days 01:00:00, 6 days 01:00:00, 7 days 01:00:00, ...], ordered=False, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp def test_categorical_index_repr_timedelta_ordered(self): idx = pd.timedelta_range('1 days', periods=5) i = pd.CategoricalIndex(pd.Categorical(idx, ordered=True)) exp = """CategoricalIndex(['1 days', '2 days', '3 days', '4 days', '5 days'], categories=[1 days 00:00:00, 2 days 00:00:00, 3 days 00:00:00, 4 days 00:00:00, 5 days 00:00:00], ordered=True, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp idx = pd.timedelta_range('1 hours', periods=10) i = pd.CategoricalIndex(pd.Categorical(idx, ordered=True)) @@ -2746,7 +2746,7 @@ def test_categorical_index_repr_timedelta_ordered(self): '9 days 01:00:00'], categories=[0 days 01:00:00, 1 days 01:00:00, 2 days 01:00:00, 3 days 01:00:00, 4 days 01:00:00, 5 days 01:00:00, 6 days 01:00:00, 7 days 01:00:00, ...], ordered=True, dtype='category')""" # noqa - self.assertEqual(repr(i), exp) + assert repr(i) == exp def test_categorical_frame(self): # normal DataFrame @@ -2762,7 +2762,7 @@ def test_categorical_frame(self): 4 2011-01-01 13:00:00-05:00 2011-05""" df = pd.DataFrame({'dt': pd.Categorical(dt), 'p': pd.Categorical(p)}) - self.assertEqual(repr(df), exp) + assert repr(df) == exp def test_info(self): @@ -2800,15 +2800,15 @@ def test_min_max(self): cat = Series(Categorical(["a", "b", "c", "d"], ordered=True)) _min = cat.min() _max = cat.max() - self.assertEqual(_min, "a") - self.assertEqual(_max, "d") + assert _min == "a" + assert _max == "d" cat = Series(Categorical(["a", "b", "c", "d"], categories=[ 'd', 'c', 'b', 'a'], ordered=True)) _min = cat.min() _max = cat.max() - self.assertEqual(_min, "d") - self.assertEqual(_max, "a") + assert _min == "d" + assert _max == "a" cat = Series(Categorical( [np.nan, "b", "c", np.nan], categories=['d', 'c', 'b', 'a' @@ -2816,14 +2816,14 @@ def test_min_max(self): _min = cat.min() _max = cat.max() assert np.isnan(_min) - self.assertEqual(_max, "b") + assert _max == "b" cat = Series(Categorical( [np.nan, 1, 2, np.nan], categories=[5, 4, 3, 2, 1], ordered=True)) _min = cat.min() _max = cat.max() assert np.isnan(_min) - self.assertEqual(_max, 1) + assert _max == 1 def test_mode(self): s = Series(Categorical([1, 1, 2, 4, 5, 5, 5], @@ -3050,7 +3050,7 @@ def test_count(self): s = Series(Categorical([np.nan, 1, 2, np.nan], categories=[5, 4, 3, 2, 1], ordered=True)) result = s.count() - self.assertEqual(result, 2) + assert result == 2 def test_sort_values(self): @@ -3099,13 +3099,13 @@ def test_sort_values(self): res = df.sort_values(by=["string"], ascending=False) exp = np.array(["d", "c", "b", "a"], dtype=np.object_) tm.assert_numpy_array_equal(res["sort"].values.__array__(), exp) - self.assertEqual(res["sort"].dtype, "category") + assert res["sort"].dtype == "category" res = df.sort_values(by=["sort"], ascending=False) exp = df.sort_values(by=["string"], ascending=True) tm.assert_series_equal(res["values"], exp["values"]) - self.assertEqual(res["sort"].dtype, "category") - self.assertEqual(res["unsort"].dtype, "category") + assert res["sort"].dtype == "category" + assert res["unsort"].dtype == "category" # unordered cat, but we allow this df.sort_values(by=["unsort"], ascending=False) @@ -3201,7 +3201,7 @@ def test_slicing_and_getting_ops(self): # single value res_val = df.iloc[2, 0] - self.assertEqual(res_val, exp_val) + assert res_val == exp_val # loc # frame @@ -3221,7 +3221,7 @@ def test_slicing_and_getting_ops(self): # single value res_val = df.loc["j", "cats"] - self.assertEqual(res_val, exp_val) + assert res_val == exp_val # ix # frame @@ -3242,15 +3242,15 @@ def test_slicing_and_getting_ops(self): # single value res_val = df.loc["j", df.columns[0]] - self.assertEqual(res_val, exp_val) + assert res_val == exp_val # iat res_val = df.iat[2, 0] - self.assertEqual(res_val, exp_val) + assert res_val == exp_val # at res_val = df.at["j", "cats"] - self.assertEqual(res_val, exp_val) + assert res_val == exp_val # fancy indexing exp_fancy = df.iloc[[2]] @@ -3262,7 +3262,7 @@ def test_slicing_and_getting_ops(self): # get_value res_val = df.get_value("j", "cats") - self.assertEqual(res_val, exp_val) + assert res_val == exp_val # i : int, slice, or sequence of integers res_row = df.iloc[2] diff --git a/pandas/tests/test_config.py b/pandas/tests/test_config.py index ad5418f4a4a29..ba055b105dc41 100644 --- a/pandas/tests/test_config.py +++ b/pandas/tests/test_config.py @@ -111,9 +111,9 @@ def test_case_insensitive(self): self.cf.register_option('KanBAN', 1, 'doc') assert 'doc' in self.cf.describe_option('kanbaN', _print_desc=False) - self.assertEqual(self.cf.get_option('kanBaN'), 1) + assert self.cf.get_option('kanBaN') == 1 self.cf.set_option('KanBan', 2) - self.assertEqual(self.cf.get_option('kAnBaN'), 2) + assert self.cf.get_option('kAnBaN') == 2 # gets of non-existent keys fail pytest.raises(KeyError, self.cf.get_option, 'no_such_option') @@ -127,8 +127,8 @@ def test_get_option(self): self.cf.register_option('b.b', None, 'doc2') # gets of existing keys succeed - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b.c'), 'hullo') + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b.c') == 'hullo' assert self.cf.get_option('b.b') is None # gets of non-existent keys fail @@ -139,17 +139,17 @@ def test_set_option(self): self.cf.register_option('b.c', 'hullo', 'doc2') self.cf.register_option('b.b', None, 'doc2') - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b.c'), 'hullo') + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b.c') == 'hullo' assert self.cf.get_option('b.b') is None self.cf.set_option('a', 2) self.cf.set_option('b.c', 'wurld') self.cf.set_option('b.b', 1.1) - self.assertEqual(self.cf.get_option('a'), 2) - self.assertEqual(self.cf.get_option('b.c'), 'wurld') - self.assertEqual(self.cf.get_option('b.b'), 1.1) + assert self.cf.get_option('a') == 2 + assert self.cf.get_option('b.c') == 'wurld' + assert self.cf.get_option('b.b') == 1.1 pytest.raises(KeyError, self.cf.set_option, 'no.such.key', None) @@ -167,15 +167,15 @@ def test_set_option_multiple(self): self.cf.register_option('b.c', 'hullo', 'doc2') self.cf.register_option('b.b', None, 'doc2') - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b.c'), 'hullo') + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b.c') == 'hullo' assert self.cf.get_option('b.b') is None self.cf.set_option('a', '2', 'b.c', None, 'b.b', 10.0) - self.assertEqual(self.cf.get_option('a'), '2') + assert self.cf.get_option('a') == '2' assert self.cf.get_option('b.c') is None - self.assertEqual(self.cf.get_option('b.b'), 10.0) + assert self.cf.get_option('b.b') == 10.0 def test_validation(self): self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int) @@ -203,36 +203,36 @@ def test_reset_option(self): self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int) self.cf.register_option('b.c', 'hullo', 'doc2', validator=self.cf.is_str) - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b.c'), 'hullo') + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b.c') == 'hullo' self.cf.set_option('a', 2) self.cf.set_option('b.c', 'wurld') - self.assertEqual(self.cf.get_option('a'), 2) - self.assertEqual(self.cf.get_option('b.c'), 'wurld') + assert self.cf.get_option('a') == 2 + assert self.cf.get_option('b.c') == 'wurld' self.cf.reset_option('a') - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b.c'), 'wurld') + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b.c') == 'wurld' self.cf.reset_option('b.c') - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b.c'), 'hullo') + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b.c') == 'hullo' def test_reset_option_all(self): self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int) self.cf.register_option('b.c', 'hullo', 'doc2', validator=self.cf.is_str) - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b.c'), 'hullo') + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b.c') == 'hullo' self.cf.set_option('a', 2) self.cf.set_option('b.c', 'wurld') - self.assertEqual(self.cf.get_option('a'), 2) - self.assertEqual(self.cf.get_option('b.c'), 'wurld') + assert self.cf.get_option('a') == 2 + assert self.cf.get_option('b.c') == 'wurld' self.cf.reset_option("all") - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b.c'), 'hullo') + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b.c') == 'hullo' def test_deprecate_option(self): # we can deprecate non-existent options @@ -248,7 +248,7 @@ def test_deprecate_option(self): else: self.fail("Nonexistent option didn't raise KeyError") - self.assertEqual(len(w), 1) # should have raised one warning + assert len(w) == 1 # should have raised one warning assert 'deprecated' in str(w[-1]) # we get the default message self.cf.register_option('a', 1, 'doc', validator=self.cf.is_int) @@ -260,7 +260,7 @@ def test_deprecate_option(self): warnings.simplefilter('always') self.cf.get_option('a') - self.assertEqual(len(w), 1) # should have raised one warning + assert len(w) == 1 # should have raised one warning assert 'eprecated' in str(w[-1]) # we get the default message assert 'nifty_ver' in str(w[-1]) # with the removal_ver quoted @@ -272,51 +272,51 @@ def test_deprecate_option(self): warnings.simplefilter('always') self.cf.get_option('b.c') - self.assertEqual(len(w), 1) # should have raised one warning + assert len(w) == 1 # should have raised one warning assert 'zounds!' in str(w[-1]) # we get the custom message # test rerouting keys self.cf.register_option('d.a', 'foo', 'doc2') self.cf.register_option('d.dep', 'bar', 'doc2') - self.assertEqual(self.cf.get_option('d.a'), 'foo') - self.assertEqual(self.cf.get_option('d.dep'), 'bar') + assert self.cf.get_option('d.a') == 'foo' + assert self.cf.get_option('d.dep') == 'bar' self.cf.deprecate_option('d.dep', rkey='d.a') # reroute d.dep to d.a with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') - self.assertEqual(self.cf.get_option('d.dep'), 'foo') + assert self.cf.get_option('d.dep') == 'foo' - self.assertEqual(len(w), 1) # should have raised one warning + assert len(w) == 1 # should have raised one warning assert 'eprecated' in str(w[-1]) # we get the custom message with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') self.cf.set_option('d.dep', 'baz') # should overwrite "d.a" - self.assertEqual(len(w), 1) # should have raised one warning + assert len(w) == 1 # should have raised one warning assert 'eprecated' in str(w[-1]) # we get the custom message with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') - self.assertEqual(self.cf.get_option('d.dep'), 'baz') + assert self.cf.get_option('d.dep') == 'baz' - self.assertEqual(len(w), 1) # should have raised one warning + assert len(w) == 1 # should have raised one warning assert 'eprecated' in str(w[-1]) # we get the custom message def test_config_prefix(self): with self.cf.config_prefix("base"): self.cf.register_option('a', 1, "doc1") self.cf.register_option('b', 2, "doc2") - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b'), 2) + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b') == 2 self.cf.set_option('a', 3) self.cf.set_option('b', 4) - self.assertEqual(self.cf.get_option('a'), 3) - self.assertEqual(self.cf.get_option('b'), 4) + assert self.cf.get_option('a') == 3 + assert self.cf.get_option('b') == 4 - self.assertEqual(self.cf.get_option('base.a'), 3) - self.assertEqual(self.cf.get_option('base.b'), 4) + assert self.cf.get_option('base.a') == 3 + assert self.cf.get_option('base.b') == 4 assert 'doc1' in self.cf.describe_option('base.a', _print_desc=False) assert 'doc2' in self.cf.describe_option('base.b', _print_desc=False) @@ -324,8 +324,8 @@ def test_config_prefix(self): self.cf.reset_option('base.b') with self.cf.config_prefix("base"): - self.assertEqual(self.cf.get_option('a'), 1) - self.assertEqual(self.cf.get_option('b'), 2) + assert self.cf.get_option('a') == 1 + assert self.cf.get_option('b') == 2 def test_callback(self): k = [None] @@ -340,21 +340,21 @@ def callback(key): del k[-1], v[-1] self.cf.set_option("d.a", "fooz") - self.assertEqual(k[-1], "d.a") - self.assertEqual(v[-1], "fooz") + assert k[-1] == "d.a" + assert v[-1] == "fooz" del k[-1], v[-1] self.cf.set_option("d.b", "boo") - self.assertEqual(k[-1], "d.b") - self.assertEqual(v[-1], "boo") + assert k[-1] == "d.b" + assert v[-1] == "boo" del k[-1], v[-1] self.cf.reset_option("d.b") - self.assertEqual(k[-1], "d.b") + assert k[-1] == "d.b" def test_set_ContextManager(self): def eq(val): - self.assertEqual(self.cf.get_option("a"), val) + assert self.cf.get_option("a") == val self.cf.register_option('a', 0) eq(0) @@ -384,22 +384,22 @@ def f3(key): self.cf.register_option('c', 0, cb=f3) options = self.cf.options - self.assertEqual(options.a, 0) + assert options.a == 0 with self.cf.option_context("a", 15): - self.assertEqual(options.a, 15) + assert options.a == 15 options.a = 500 - self.assertEqual(self.cf.get_option("a"), 500) + assert self.cf.get_option("a") == 500 self.cf.reset_option("a") - self.assertEqual(options.a, self.cf.get_option("a", 0)) + assert options.a == self.cf.get_option("a", 0) pytest.raises(KeyError, f) pytest.raises(KeyError, f2) # make sure callback kicks when using this form of setting options.c = 1 - self.assertEqual(len(holder), 1) + assert len(holder) == 1 def test_option_context_scope(self): # Ensure that creating a context does not affect the existing @@ -414,11 +414,11 @@ def test_option_context_scope(self): # Ensure creating contexts didn't affect the current context. ctx = self.cf.option_context(option_name, context_value) - self.assertEqual(self.cf.get_option(option_name), original_value) + assert self.cf.get_option(option_name) == original_value # Ensure the correct value is available inside the context. with ctx: - self.assertEqual(self.cf.get_option(option_name), context_value) + assert self.cf.get_option(option_name) == context_value # Ensure the current context is reset - self.assertEqual(self.cf.get_option(option_name), original_value) + assert self.cf.get_option(option_name) == original_value diff --git a/pandas/tests/test_multilevel.py b/pandas/tests/test_multilevel.py index e4ed194b75bcd..5b2057f830102 100644 --- a/pandas/tests/test_multilevel.py +++ b/pandas/tests/test_multilevel.py @@ -259,7 +259,7 @@ def test_series_getitem(self): result = s[2000, 3, 10] expected = s[49] - self.assertEqual(result, expected) + assert result == expected # fancy expected = s.reindex(s.index[49:51]) @@ -404,9 +404,9 @@ def test_frame_setitem_multi_column(self): sliced_b1 = df['B', '1'] tm.assert_series_equal(sliced_a1, sliced_b1, check_names=False) tm.assert_series_equal(sliced_a2, sliced_b1, check_names=False) - self.assertEqual(sliced_a1.name, ('A', '1')) - self.assertEqual(sliced_a2.name, ('A', '2')) - self.assertEqual(sliced_b1.name, ('B', '1')) + assert sliced_a1.name == ('A', '1') + assert sliced_a2.name == ('A', '2') + assert sliced_b1.name == ('B', '1') def test_getitem_tuple_plus_slice(self): # GH #671 @@ -557,7 +557,7 @@ def test_xs_level0(self): result = df.xs('a', level=0) expected = df.xs('a') - self.assertEqual(len(result), 2) + assert len(result) == 2 tm.assert_frame_equal(result, expected) def test_xs_level_series(self): @@ -667,19 +667,19 @@ def test_setitem_change_dtype(self): def test_frame_setitem_ix(self): self.frame.loc[('bar', 'two'), 'B'] = 5 - self.assertEqual(self.frame.loc[('bar', 'two'), 'B'], 5) + assert self.frame.loc[('bar', 'two'), 'B'] == 5 # with integer labels df = self.frame.copy() df.columns = lrange(3) df.loc[('bar', 'two'), 1] = 7 - self.assertEqual(df.loc[('bar', 'two'), 1], 7) + assert df.loc[('bar', 'two'), 1] == 7 with catch_warnings(record=True): df = self.frame.copy() df.columns = lrange(3) df.ix[('bar', 'two'), 1] = 7 - self.assertEqual(df.loc[('bar', 'two'), 1], 7) + assert df.loc[('bar', 'two'), 1] == 7 def test_fancy_slice_partial(self): result = self.frame.loc['bar':'baz'] @@ -724,12 +724,11 @@ def test_delevel_infer_dtype(self): def test_reset_index_with_drop(self): deleveled = self.ymd.reset_index(drop=True) - self.assertEqual(len(deleveled.columns), len(self.ymd.columns)) + assert len(deleveled.columns) == len(self.ymd.columns) deleveled = self.series.reset_index() assert isinstance(deleveled, DataFrame) - self.assertEqual(len(deleveled.columns), - len(self.series.index.levels) + 1) + assert len(deleveled.columns) == len(self.series.index.levels) + 1 deleveled = self.series.reset_index(drop=True) assert isinstance(deleveled, Series) @@ -942,7 +941,7 @@ def test_stack_mixed_dtype(self): result = df['foo'].stack().sort_index() tm.assert_series_equal(stacked['foo'], result, check_names=False) assert result.name is None - self.assertEqual(stacked['bar'].dtype, np.float_) + assert stacked['bar'].dtype == np.float_ def test_unstack_bug(self): df = DataFrame({'state': ['naive', 'naive', 'naive', 'activ', 'activ', @@ -961,11 +960,11 @@ def test_unstack_bug(self): def test_stack_unstack_preserve_names(self): unstacked = self.frame.unstack() - self.assertEqual(unstacked.index.name, 'first') - self.assertEqual(unstacked.columns.names, ['exp', 'second']) + assert unstacked.index.name == 'first' + assert unstacked.columns.names == ['exp', 'second'] restacked = unstacked.stack() - self.assertEqual(restacked.index.names, self.frame.index.names) + assert restacked.index.names == self.frame.index.names def test_unstack_level_name(self): result = self.frame.unstack('second') @@ -986,7 +985,7 @@ def test_stack_unstack_multiple(self): unstacked = self.ymd.unstack(['year', 'month']) expected = self.ymd.unstack('year').unstack('month') tm.assert_frame_equal(unstacked, expected) - self.assertEqual(unstacked.columns.names, expected.columns.names) + assert unstacked.columns.names == expected.columns.names # series s = self.ymd['A'] @@ -998,7 +997,7 @@ def test_stack_unstack_multiple(self): restacked = restacked.sort_index(level=0) tm.assert_frame_equal(restacked, self.ymd) - self.assertEqual(restacked.index.names, self.ymd.index.names) + assert restacked.index.names == self.ymd.index.names # GH #451 unstacked = self.ymd.unstack([1, 2]) @@ -1191,7 +1190,7 @@ def test_unstack_unobserved_keys(self): df = DataFrame(np.random.randn(4, 2), index=index) result = df.unstack() - self.assertEqual(len(result.columns), 4) + assert len(result.columns) == 4 recons = result.stack() tm.assert_frame_equal(recons, df) @@ -1351,12 +1350,12 @@ def test_count(self): result = series.count(level='b') expect = self.series.count(level=1) tm.assert_series_equal(result, expect, check_names=False) - self.assertEqual(result.index.name, 'b') + assert result.index.name == 'b' result = series.count(level='a') expect = self.series.count(level=0) tm.assert_series_equal(result, expect, check_names=False) - self.assertEqual(result.index.name, 'a') + assert result.index.name == 'a' pytest.raises(KeyError, series.count, 'x') pytest.raises(KeyError, frame.count, level='x') @@ -1465,7 +1464,7 @@ def test_groupby_multilevel(self): # TODO groupby with level_values drops names tm.assert_frame_equal(result, expected, check_names=False) - self.assertEqual(result.index.names, self.ymd.index.names[:2]) + assert result.index.names == self.ymd.index.names[:2] result2 = self.ymd.groupby(level=self.ymd.index.names[:2]).mean() tm.assert_frame_equal(result, result2) @@ -1483,13 +1482,13 @@ def test_multilevel_consolidate(self): def test_ix_preserve_names(self): result = self.ymd.loc[2000] result2 = self.ymd['A'].loc[2000] - self.assertEqual(result.index.names, self.ymd.index.names[1:]) - self.assertEqual(result2.index.names, self.ymd.index.names[1:]) + assert result.index.names == self.ymd.index.names[1:] + assert result2.index.names == self.ymd.index.names[1:] result = self.ymd.loc[2000, 2] result2 = self.ymd['A'].loc[2000, 2] - self.assertEqual(result.index.name, self.ymd.index.names[2]) - self.assertEqual(result2.index.name, self.ymd.index.names[2]) + assert result.index.name == self.ymd.index.names[2] + assert result2.index.name == self.ymd.index.names[2] def test_partial_set(self): # GH #397 @@ -1509,7 +1508,7 @@ def test_partial_set(self): # this works...for now df['A'].iloc[14] = 5 - self.assertEqual(df['A'][14], 5) + assert df['A'][14] == 5 def test_unstack_preserve_types(self): # GH #403 @@ -1517,9 +1516,9 @@ def test_unstack_preserve_types(self): self.ymd['F'] = 2 unstacked = self.ymd.unstack('month') - self.assertEqual(unstacked['A', 1].dtype, np.float64) - self.assertEqual(unstacked['E', 1].dtype, np.object_) - self.assertEqual(unstacked['F', 1].dtype, np.float64) + assert unstacked['A', 1].dtype == np.float64 + assert unstacked['E', 1].dtype == np.object_ + assert unstacked['F', 1].dtype == np.float64 def test_unstack_group_index_overflow(self): labels = np.tile(np.arange(500), 2) @@ -1530,7 +1529,7 @@ def test_unstack_group_index_overflow(self): s = Series(np.arange(1000), index=index) result = s.unstack() - self.assertEqual(result.shape, (500, 2)) + assert result.shape == (500, 2) # test roundtrip stacked = result.stack() @@ -1542,7 +1541,7 @@ def test_unstack_group_index_overflow(self): s = Series(np.arange(1000), index=index) result = s.unstack(0) - self.assertEqual(result.shape, (500, 2)) + assert result.shape == (500, 2) # put it in middle index = MultiIndex(levels=[level] * 4 + [[0, 1]] + [level] * 4, @@ -1551,7 +1550,7 @@ def test_unstack_group_index_overflow(self): s = Series(np.arange(1000), index=index) result = s.unstack(4) - self.assertEqual(result.shape, (500, 2)) + assert result.shape == (500, 2) def test_getitem_lowerdim_corner(self): pytest.raises(KeyError, self.frame.loc.__getitem__, @@ -1559,7 +1558,7 @@ def test_getitem_lowerdim_corner(self): # in theory should be inserting in a sorted space???? self.frame.loc[('bar', 'three'), 'B'] = 0 - self.assertEqual(self.frame.sort_index().loc[('bar', 'three'), 'B'], 0) + assert self.frame.sort_index().loc[('bar', 'three'), 'B'] == 0 # --------------------------------------------------------------------- # AMBIGUOUS CASES! @@ -1659,12 +1658,12 @@ def test_mixed_depth_get(self): result = df['a'] expected = df['a', '', ''] tm.assert_series_equal(result, expected, check_names=False) - self.assertEqual(result.name, 'a') + assert result.name == 'a' result = df['routine1', 'result1'] expected = df['routine1', 'result1', ''] tm.assert_series_equal(result, expected, check_names=False) - self.assertEqual(result.name, ('routine1', 'result1')) + assert result.name == ('routine1', 'result1') def test_mixed_depth_insert(self): arrays = [['a', 'top', 'top', 'routine1', 'routine1', 'routine2'], @@ -1747,7 +1746,7 @@ def test_mixed_depth_pop(self): expected = df2.pop(('a', '', '')) tm.assert_series_equal(expected, result, check_names=False) tm.assert_frame_equal(df1, df2) - self.assertEqual(result.name, 'a') + assert result.name == 'a' expected = df1['top'] df1 = df1.drop(['top'], axis=1) @@ -1845,7 +1844,7 @@ def test_drop_preserve_names(self): df = DataFrame(np.random.randn(6, 3), index=index) result = df.drop([(0, 2)]) - self.assertEqual(result.index.names, ('one', 'two')) + assert result.index.names == ('one', 'two') def test_unicode_repr_issues(self): levels = [Index([u('a/\u03c3'), u('b/\u03c3'), u('c/\u03c3')]), @@ -1944,9 +1943,9 @@ def test_indexing_over_hashtable_size_cutoff(self): MultiIndex.from_arrays((["a"] * n, np.arange(n)))) # hai it works! - self.assertEqual(s[("a", 5)], 5) - self.assertEqual(s[("a", 6)], 6) - self.assertEqual(s[("a", 7)], 7) + assert s[("a", 5)] == 5 + assert s[("a", 6)] == 6 + assert s[("a", 7)] == 7 _index._SIZE_CUTOFF = old_cutoff @@ -1998,7 +1997,7 @@ def test_duplicate_groupby_issues(self): s = Series(dt, index=idx) result = s.groupby(s.index).first() - self.assertEqual(len(result), 3) + assert len(result) == 3 def test_duplicate_mi(self): # GH 4516 @@ -2353,7 +2352,7 @@ class TestSorted(Base, tm.TestCase): def test_sort_index_preserve_levels(self): result = self.frame.sort_index() - self.assertEqual(result.index.names, self.frame.index.names) + assert result.index.names == self.frame.index.names def test_sorting_repr_8017(self): @@ -2375,7 +2374,7 @@ def test_sorting_repr_8017(self): # check that the repr is good # make sure that we have a correct sparsified repr # e.g. only 1 header of read - self.assertEqual(str(df2).splitlines()[0].split(), ['red']) + assert str(df2).splitlines()[0].split() == ['red'] # GH 8017 # sorting fails after columns added @@ -2406,7 +2405,7 @@ def test_sort_index_level(self): a_sorted = self.frame['A'].sort_index(level=0) # preserve names - self.assertEqual(a_sorted.index.names, self.frame.index.names) + assert a_sorted.index.names == self.frame.index.names # inplace rs = self.frame.copy() @@ -2469,7 +2468,7 @@ def test_is_lexsorted(self): index = MultiIndex(levels=levels, labels=[[0, 0, 1, 0, 1, 1], [0, 1, 0, 2, 2, 1]]) assert not index.is_lexsorted() - self.assertEqual(index.lexsort_depth, 0) + assert index.lexsort_depth == 0 def test_getitem_multilevel_index_tuple_not_sorted(self): index_columns = list("abc") diff --git a/pandas/tests/test_nanops.py b/pandas/tests/test_nanops.py index 92d7f29366c69..35d0198ae06a9 100644 --- a/pandas/tests/test_nanops.py +++ b/pandas/tests/test_nanops.py @@ -346,8 +346,8 @@ def test_nanmean_overflow(self): s = Series(a, index=range(500), dtype=np.int64) result = s.mean() np_result = s.values.mean() - self.assertEqual(result, a) - self.assertEqual(result, np_result) + assert result == a + assert result == np_result assert result.dtype == np.float64 def test_returned_dtype(self): @@ -746,12 +746,13 @@ class TestEnsureNumeric(tm.TestCase): def test_numeric_values(self): # Test integer - self.assertEqual(nanops._ensure_numeric(1), 1, 'Failed for int') + assert nanops._ensure_numeric(1) == 1 + # Test float - self.assertEqual(nanops._ensure_numeric(1.1), 1.1, 'Failed for float') + assert nanops._ensure_numeric(1.1) == 1.1 + # Test complex - self.assertEqual(nanops._ensure_numeric(1 + 2j), 1 + 2j, - 'Failed for complex') + assert nanops._ensure_numeric(1 + 2j) == 1 + 2j def test_ndarray(self): # Test numeric ndarray @@ -887,7 +888,7 @@ def test_nanstd_roundoff(self): data = Series(766897346 * np.ones(10)) for ddof in range(3): result = data.std(ddof=ddof) - self.assertEqual(result, 0.0) + assert result == 0.0 @property def prng(self): @@ -908,7 +909,7 @@ def test_constant_series(self): for val in [3075.2, 3075.3, 3075.5]: data = val * np.ones(300) skew = nanops.nanskew(data) - self.assertEqual(skew, 0.0) + assert skew == 0.0 def test_all_finite(self): alpha, beta = 0.3, 0.1 @@ -958,7 +959,7 @@ def test_constant_series(self): for val in [3075.2, 3075.3, 3075.5]: data = val * np.ones(300) kurt = nanops.nankurt(data) - self.assertEqual(kurt, 0.0) + assert kurt == 0.0 def test_all_finite(self): alpha, beta = 0.3, 0.1 diff --git a/pandas/tests/test_panel.py b/pandas/tests/test_panel.py index c9894ad9a9acf..a692f6b26c61e 100644 --- a/pandas/tests/test_panel.py +++ b/pandas/tests/test_panel.py @@ -222,9 +222,9 @@ def test_set_axis(self): assert self.panel.minor_axis is new_minor def test_get_axis_number(self): - self.assertEqual(self.panel._get_axis_number('items'), 0) - self.assertEqual(self.panel._get_axis_number('major'), 1) - self.assertEqual(self.panel._get_axis_number('minor'), 2) + assert self.panel._get_axis_number('items') == 0 + assert self.panel._get_axis_number('major') == 1 + assert self.panel._get_axis_number('minor') == 2 with tm.assert_raises_regex(ValueError, "No axis named foo"): self.panel._get_axis_number('foo') @@ -233,9 +233,9 @@ def test_get_axis_number(self): self.panel.__ge__(self.panel, axis='foo') def test_get_axis_name(self): - self.assertEqual(self.panel._get_axis_name(0), 'items') - self.assertEqual(self.panel._get_axis_name(1), 'major_axis') - self.assertEqual(self.panel._get_axis_name(2), 'minor_axis') + assert self.panel._get_axis_name(0) == 'items' + assert self.panel._get_axis_name(1) == 'major_axis' + assert self.panel._get_axis_name(2) == 'minor_axis' def test_get_plane_axes(self): # what to do here? @@ -303,8 +303,7 @@ def test_iteritems(self): for k, v in self.panel.iteritems(): pass - self.assertEqual(len(list(self.panel.iteritems())), - len(self.panel.items)) + assert len(list(self.panel.iteritems())) == len(self.panel.items) def test_combineFrame(self): with catch_warnings(record=True): @@ -432,8 +431,8 @@ def test_abs(self): expected = np.abs(s) assert_series_equal(result, expected) assert_series_equal(result2, expected) - self.assertEqual(result.name, 'A') - self.assertEqual(result2.name, 'A') + assert result.name == 'A' + assert result2.name == 'A' class CheckIndexing(object): @@ -497,16 +496,16 @@ def test_setitem(self): # scalar self.panel['ItemG'] = 1 self.panel['ItemE'] = True - self.assertEqual(self.panel['ItemG'].values.dtype, np.int64) - self.assertEqual(self.panel['ItemE'].values.dtype, np.bool_) + assert self.panel['ItemG'].values.dtype == np.int64 + assert self.panel['ItemE'].values.dtype == np.bool_ # object dtype self.panel['ItemQ'] = 'foo' - self.assertEqual(self.panel['ItemQ'].values.dtype, np.object_) + assert self.panel['ItemQ'].values.dtype == np.object_ # boolean dtype self.panel['ItemP'] = self.panel['ItemA'] > 0 - self.assertEqual(self.panel['ItemP'].values.dtype, np.bool_) + assert self.panel['ItemP'].values.dtype == np.bool_ pytest.raises(TypeError, self.panel.__setitem__, 'foo', self.panel.loc[['ItemP']]) @@ -560,7 +559,7 @@ def test_major_xs(self): result = xs['ItemA'] assert_series_equal(result, ref.xs(idx), check_names=False) - self.assertEqual(result.name, 'ItemA') + assert result.name == 'ItemA' # not contained idx = self.panel.major_axis[0] - BDay() @@ -570,8 +569,8 @@ def test_major_xs_mixed(self): with catch_warnings(record=True): self.panel['ItemD'] = 'foo' xs = self.panel.major_xs(self.panel.major_axis[0]) - self.assertEqual(xs['ItemA'].dtype, np.float64) - self.assertEqual(xs['ItemD'].dtype, np.object_) + assert xs['ItemA'].dtype == np.float64 + assert xs['ItemD'].dtype == np.object_ def test_minor_xs(self): with catch_warnings(record=True): @@ -590,8 +589,8 @@ def test_minor_xs_mixed(self): self.panel['ItemD'] = 'foo' xs = self.panel.minor_xs('D') - self.assertEqual(xs['ItemA'].dtype, np.float64) - self.assertEqual(xs['ItemD'].dtype, np.object_) + assert xs['ItemA'].dtype == np.float64 + assert xs['ItemD'].dtype == np.object_ def test_xs(self): with catch_warnings(record=True): @@ -985,16 +984,16 @@ def test_constructor_cast(self): def test_constructor_empty_panel(self): with catch_warnings(record=True): empty = Panel() - self.assertEqual(len(empty.items), 0) - self.assertEqual(len(empty.major_axis), 0) - self.assertEqual(len(empty.minor_axis), 0) + assert len(empty.items) == 0 + assert len(empty.major_axis) == 0 + assert len(empty.minor_axis) == 0 def test_constructor_observe_dtype(self): with catch_warnings(record=True): # GH #411 panel = Panel(items=lrange(3), major_axis=lrange(3), minor_axis=lrange(3), dtype='O') - self.assertEqual(panel.values.dtype, np.object_) + assert panel.values.dtype == np.object_ def test_constructor_dtypes(self): with catch_warnings(record=True): @@ -1002,7 +1001,7 @@ def test_constructor_dtypes(self): def _check_dtype(panel, dtype): for i in panel.items: - self.assertEqual(panel[i].values.dtype.name, dtype) + assert panel[i].values.dtype.name == dtype # only nan holding types allowed here for dtype in ['float64', 'float32', 'object']: @@ -1173,8 +1172,8 @@ def test_from_dict_mixed_orient(self): panel = Panel.from_dict(data, orient='minor') - self.assertEqual(panel['foo'].values.dtype, np.object_) - self.assertEqual(panel['A'].values.dtype, np.float64) + assert panel['foo'].values.dtype == np.object_ + assert panel['A'].values.dtype == np.float64 def test_constructor_error_msgs(self): with catch_warnings(record=True): @@ -1709,7 +1708,7 @@ def test_to_frame(self): assert_panel_equal(unfiltered.to_panel(), self.panel) # names - self.assertEqual(unfiltered.index.names, ('major', 'minor')) + assert unfiltered.index.names == ('major', 'minor') # unsorted, round trip df = self.panel.to_frame(filter_observations=False) @@ -1726,8 +1725,8 @@ def test_to_frame(self): df.columns.name = 'baz' rdf = df.to_panel().to_frame() - self.assertEqual(rdf.index.names, df.index.names) - self.assertEqual(rdf.columns.names, df.columns.names) + assert rdf.index.names == df.index.names + assert rdf.columns.names == df.columns.names def test_to_frame_mixed(self): with catch_warnings(record=True): @@ -1737,7 +1736,7 @@ def test_to_frame_mixed(self): lp = panel.to_frame() wp = lp.to_panel() - self.assertEqual(wp['bool'].values.dtype, np.bool_) + assert wp['bool'].values.dtype == np.bool_ # Previously, this was mutating the underlying # index and changing its name assert_frame_equal(wp['bool'], panel['bool'], check_names=False) @@ -2591,18 +2590,16 @@ def test_axis_dummies(self): from pandas.core.reshape.reshape import make_axis_dummies minor_dummies = make_axis_dummies(self.panel, 'minor').astype(np.uint8) - self.assertEqual(len(minor_dummies.columns), - len(self.panel.index.levels[1])) + assert len(minor_dummies.columns) == len(self.panel.index.levels[1]) major_dummies = make_axis_dummies(self.panel, 'major').astype(np.uint8) - self.assertEqual(len(major_dummies.columns), - len(self.panel.index.levels[0])) + assert len(major_dummies.columns) == len(self.panel.index.levels[0]) mapping = {'A': 'one', 'B': 'one', 'C': 'two', 'D': 'two'} transformed = make_axis_dummies(self.panel, 'minor', transform=mapping.get).astype(np.uint8) - self.assertEqual(len(transformed.columns), 2) + assert len(transformed.columns) == 2 tm.assert_index_equal(transformed.columns, Index(['one', 'two'])) # TODO: test correctness @@ -2638,12 +2635,12 @@ def test_count(self): major_count = self.panel.count(level=0)['ItemA'] labels = index.labels[0] for i, idx in enumerate(index.levels[0]): - self.assertEqual(major_count[i], (labels == i).sum()) + assert major_count[i] == (labels == i).sum() minor_count = self.panel.count(level=1)['ItemA'] labels = index.labels[1] for i, idx in enumerate(index.levels[1]): - self.assertEqual(minor_count[i], (labels == i).sum()) + assert minor_count[i] == (labels == i).sum() def test_join(self): with catch_warnings(record=True): @@ -2652,7 +2649,7 @@ def test_join(self): joined = lp1.join(lp2) - self.assertEqual(len(joined.columns), 3) + assert len(joined.columns) == 3 pytest.raises(Exception, lp1.join, self.panel.filter(['ItemB', 'ItemC'])) @@ -2665,11 +2662,11 @@ def test_pivot(self): np.array(['a', 'b', 'c', 'd', 'e']), np.array([1, 2, 3, 5, 4.])) df = pivot(one, two, three) - self.assertEqual(df['a'][1], 1) - self.assertEqual(df['b'][2], 2) - self.assertEqual(df['c'][3], 3) - self.assertEqual(df['d'][4], 5) - self.assertEqual(df['e'][5], 4) + assert df['a'][1] == 1 + assert df['b'][2] == 2 + assert df['c'][3] == 3 + assert df['d'][4] == 5 + assert df['e'][5] == 4 assert_frame_equal(df, _slow_pivot(one, two, three)) # weird overlap, TODO: test? diff --git a/pandas/tests/test_panel4d.py b/pandas/tests/test_panel4d.py index 05ce239b9c5a3..f2a1414957d44 100644 --- a/pandas/tests/test_panel4d.py +++ b/pandas/tests/test_panel4d.py @@ -194,16 +194,16 @@ def test_set_axis(self): assert self.panel4d.minor_axis is new_minor def test_get_axis_number(self): - self.assertEqual(self.panel4d._get_axis_number('labels'), 0) - self.assertEqual(self.panel4d._get_axis_number('items'), 1) - self.assertEqual(self.panel4d._get_axis_number('major'), 2) - self.assertEqual(self.panel4d._get_axis_number('minor'), 3) + assert self.panel4d._get_axis_number('labels') == 0 + assert self.panel4d._get_axis_number('items') == 1 + assert self.panel4d._get_axis_number('major') == 2 + assert self.panel4d._get_axis_number('minor') == 3 def test_get_axis_name(self): - self.assertEqual(self.panel4d._get_axis_name(0), 'labels') - self.assertEqual(self.panel4d._get_axis_name(1), 'items') - self.assertEqual(self.panel4d._get_axis_name(2), 'major_axis') - self.assertEqual(self.panel4d._get_axis_name(3), 'minor_axis') + assert self.panel4d._get_axis_name(0) == 'labels' + assert self.panel4d._get_axis_name(1) == 'items' + assert self.panel4d._get_axis_name(2) == 'major_axis' + assert self.panel4d._get_axis_name(3) == 'minor_axis' def test_arith(self): with catch_warnings(record=True): @@ -234,8 +234,8 @@ def test_keys(self): def test_iteritems(self): """Test panel4d.iteritems()""" - self.assertEqual(len(list(self.panel4d.iteritems())), - len(self.panel4d.labels)) + assert (len(list(self.panel4d.iteritems())) == + len(self.panel4d.labels)) def test_combinePanel4d(self): with catch_warnings(record=True): @@ -374,16 +374,16 @@ def test_setitem(self): # scalar self.panel4d['lG'] = 1 self.panel4d['lE'] = True - self.assertEqual(self.panel4d['lG'].values.dtype, np.int64) - self.assertEqual(self.panel4d['lE'].values.dtype, np.bool_) + assert self.panel4d['lG'].values.dtype == np.int64 + assert self.panel4d['lE'].values.dtype == np.bool_ # object dtype self.panel4d['lQ'] = 'foo' - self.assertEqual(self.panel4d['lQ'].values.dtype, np.object_) + assert self.panel4d['lQ'].values.dtype == np.object_ # boolean dtype self.panel4d['lP'] = self.panel4d['l1'] > 0 - self.assertEqual(self.panel4d['lP'].values.dtype, np.bool_) + assert self.panel4d['lP'].values.dtype == np.bool_ def test_setitem_by_indexer(self): @@ -484,8 +484,8 @@ def test_major_xs_mixed(self): self.panel4d['l4'] = 'foo' with catch_warnings(record=True): xs = self.panel4d.major_xs(self.panel4d.major_axis[0]) - self.assertEqual(xs['l1']['A'].dtype, np.float64) - self.assertEqual(xs['l4']['A'].dtype, np.object_) + assert xs['l1']['A'].dtype == np.float64 + assert xs['l4']['A'].dtype == np.object_ def test_minor_xs(self): ref = self.panel4d['l1']['ItemA'] @@ -504,8 +504,8 @@ def test_minor_xs_mixed(self): with catch_warnings(record=True): xs = self.panel4d.minor_xs('D') - self.assertEqual(xs['l1'].T['ItemA'].dtype, np.float64) - self.assertEqual(xs['l4'].T['ItemA'].dtype, np.object_) + assert xs['l1'].T['ItemA'].dtype == np.float64 + assert xs['l4'].T['ItemA'].dtype == np.object_ def test_xs(self): l1 = self.panel4d.xs('l1', axis=0) diff --git a/pandas/tests/test_resample.py b/pandas/tests/test_resample.py index 37e22f101612b..276e9a12c1993 100644 --- a/pandas/tests/test_resample.py +++ b/pandas/tests/test_resample.py @@ -71,12 +71,12 @@ def test_api(self): r = self.series.resample('H') result = r.mean() assert isinstance(result, Series) - self.assertEqual(len(result), 217) + assert len(result) == 217 r = self.series.to_frame().resample('H') result = r.mean() assert isinstance(result, DataFrame) - self.assertEqual(len(result), 217) + assert len(result) == 217 def test_api_changes_v018(self): @@ -186,13 +186,13 @@ def f(): check_stacklevel=False): result = self.series.resample('H')[0] expected = self.series.resample('H').mean()[0] - self.assertEqual(result, expected) + assert result == expected with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = self.series.resample('H')['2005-01-09 23:00:00'] expected = self.series.resample('H').mean()['2005-01-09 23:00:00'] - self.assertEqual(result, expected) + assert result == expected def test_groupby_resample_api(self): @@ -254,7 +254,7 @@ def test_getitem(self): tm.assert_index_equal(r._selected_obj.columns, self.frame.columns) r = self.frame.resample('H')['B'] - self.assertEqual(r._selected_obj.name, self.frame.columns[1]) + assert r._selected_obj.name == self.frame.columns[1] # technically this is allowed r = self.frame.resample('H')['A', 'B'] @@ -771,7 +771,7 @@ def test_resample_empty_series(self): expected = s.copy() expected.index = s.index._shallow_copy(freq=freq) assert_index_equal(result.index, expected.index) - self.assertEqual(result.index.freq, expected.index.freq) + assert result.index.freq == expected.index.freq assert_series_equal(result, expected, check_dtype=False) def test_resample_empty_dataframe(self): @@ -788,7 +788,7 @@ def test_resample_empty_dataframe(self): expected = f.copy() expected.index = f.index._shallow_copy(freq=freq) assert_index_equal(result.index, expected.index) - self.assertEqual(result.index.freq, expected.index.freq) + assert result.index.freq == expected.index.freq assert_frame_equal(result, expected, check_dtype=False) # test size for GH13212 (currently stays as df) @@ -884,7 +884,7 @@ def test_custom_grouper(self): for f in funcs: g._cython_agg_general(f) - self.assertEqual(g.ngroups, 2593) + assert g.ngroups == 2593 assert notnull(g.mean()).all() # construct expected val @@ -901,8 +901,8 @@ def test_custom_grouper(self): index=dti, dtype='float64') r = df.groupby(b).agg(np.sum) - self.assertEqual(len(r.columns), 10) - self.assertEqual(len(r.index), 2593) + assert len(r.columns) == 10 + assert len(r.index) == 2593 def test_resample_basic(self): rng = date_range('1/1/2000 00:00:00', '1/1/2000 00:13:00', freq='min', @@ -914,7 +914,7 @@ def test_resample_basic(self): expected = Series([s[0], s[1:6].mean(), s[6:11].mean(), s[11:].mean()], index=exp_idx) assert_series_equal(result, expected) - self.assertEqual(result.index.name, 'index') + assert result.index.name == 'index' result = s.resample('5min', closed='left', label='right').mean() @@ -958,7 +958,7 @@ def _ohlc(group): '5min', closed='right', label='right'), arg)() expected = s.groupby(grouplist).agg(func) - self.assertEqual(result.index.name, 'index') + assert result.index.name == 'index' if arg == 'ohlc': expected = DataFrame(expected.values.tolist()) expected.columns = ['open', 'high', 'low', 'close'] @@ -1116,51 +1116,51 @@ def test_resample_basic_from_daily(self): # to weekly result = s.resample('w-sun').last() - self.assertEqual(len(result), 3) + assert len(result) == 3 assert (result.index.dayofweek == [6, 6, 6]).all() - self.assertEqual(result.iloc[0], s['1/2/2005']) - self.assertEqual(result.iloc[1], s['1/9/2005']) - self.assertEqual(result.iloc[2], s.iloc[-1]) + assert result.iloc[0] == s['1/2/2005'] + assert result.iloc[1] == s['1/9/2005'] + assert result.iloc[2] == s.iloc[-1] result = s.resample('W-MON').last() - self.assertEqual(len(result), 2) + assert len(result) == 2 assert (result.index.dayofweek == [0, 0]).all() - self.assertEqual(result.iloc[0], s['1/3/2005']) - self.assertEqual(result.iloc[1], s['1/10/2005']) + assert result.iloc[0] == s['1/3/2005'] + assert result.iloc[1] == s['1/10/2005'] result = s.resample('W-TUE').last() - self.assertEqual(len(result), 2) + assert len(result) == 2 assert (result.index.dayofweek == [1, 1]).all() - self.assertEqual(result.iloc[0], s['1/4/2005']) - self.assertEqual(result.iloc[1], s['1/10/2005']) + assert result.iloc[0] == s['1/4/2005'] + assert result.iloc[1] == s['1/10/2005'] result = s.resample('W-WED').last() - self.assertEqual(len(result), 2) + assert len(result) == 2 assert (result.index.dayofweek == [2, 2]).all() - self.assertEqual(result.iloc[0], s['1/5/2005']) - self.assertEqual(result.iloc[1], s['1/10/2005']) + assert result.iloc[0] == s['1/5/2005'] + assert result.iloc[1] == s['1/10/2005'] result = s.resample('W-THU').last() - self.assertEqual(len(result), 2) + assert len(result) == 2 assert (result.index.dayofweek == [3, 3]).all() - self.assertEqual(result.iloc[0], s['1/6/2005']) - self.assertEqual(result.iloc[1], s['1/10/2005']) + assert result.iloc[0] == s['1/6/2005'] + assert result.iloc[1] == s['1/10/2005'] result = s.resample('W-FRI').last() - self.assertEqual(len(result), 2) + assert len(result) == 2 assert (result.index.dayofweek == [4, 4]).all() - self.assertEqual(result.iloc[0], s['1/7/2005']) - self.assertEqual(result.iloc[1], s['1/10/2005']) + assert result.iloc[0] == s['1/7/2005'] + assert result.iloc[1] == s['1/10/2005'] # to biz day result = s.resample('B').last() - self.assertEqual(len(result), 7) + assert len(result) == 7 assert (result.index.dayofweek == [4, 0, 1, 2, 3, 4, 0]).all() - self.assertEqual(result.iloc[0], s['1/2/2005']) - self.assertEqual(result.iloc[1], s['1/3/2005']) - self.assertEqual(result.iloc[5], s['1/9/2005']) - self.assertEqual(result.index.name, 'index') + assert result.iloc[0] == s['1/2/2005'] + assert result.iloc[1] == s['1/3/2005'] + assert result.iloc[5] == s['1/9/2005'] + assert result.index.name == 'index' def test_resample_upsampling_picked_but_not_correct(self): @@ -1169,7 +1169,7 @@ def test_resample_upsampling_picked_but_not_correct(self): series = Series(1, index=dates) result = series.resample('D').mean() - self.assertEqual(result.index[0], dates[0]) + assert result.index[0] == dates[0] # GH 5955 # incorrect deciding to upsample when the axis frequency matches the @@ -1230,7 +1230,7 @@ def test_resample_loffset(self): loffset=Minute(1)).mean() assert_series_equal(result, expected) - self.assertEqual(result.index.freq, Minute(5)) + assert result.index.freq == Minute(5) # from daily dti = DatetimeIndex(start=datetime(2005, 1, 1), @@ -1240,7 +1240,7 @@ def test_resample_loffset(self): # to weekly result = ser.resample('w-sun').last() expected = ser.resample('w-sun', loffset=-bday).last() - self.assertEqual(result.index[0] - bday, expected.index[0]) + assert result.index[0] - bday == expected.index[0] def test_resample_loffset_count(self): # GH 12725 @@ -1273,11 +1273,11 @@ def test_resample_upsample(self): # to minutely, by padding result = s.resample('Min').pad() - self.assertEqual(len(result), 12961) - self.assertEqual(result[0], s[0]) - self.assertEqual(result[-1], s[-1]) + assert len(result) == 12961 + assert result[0] == s[0] + assert result[-1] == s[-1] - self.assertEqual(result.index.name, 'index') + assert result.index.name == 'index' def test_resample_how_method(self): # GH9915 @@ -1320,20 +1320,20 @@ def test_resample_ohlc(self): expect = s.groupby(grouper).agg(lambda x: x[-1]) result = s.resample('5Min').ohlc() - self.assertEqual(len(result), len(expect)) - self.assertEqual(len(result.columns), 4) + assert len(result) == len(expect) + assert len(result.columns) == 4 xs = result.iloc[-2] - self.assertEqual(xs['open'], s[-6]) - self.assertEqual(xs['high'], s[-6:-1].max()) - self.assertEqual(xs['low'], s[-6:-1].min()) - self.assertEqual(xs['close'], s[-2]) + assert xs['open'] == s[-6] + assert xs['high'] == s[-6:-1].max() + assert xs['low'] == s[-6:-1].min() + assert xs['close'] == s[-2] xs = result.iloc[0] - self.assertEqual(xs['open'], s[0]) - self.assertEqual(xs['high'], s[:5].max()) - self.assertEqual(xs['low'], s[:5].min()) - self.assertEqual(xs['close'], s[4]) + assert xs['open'] == s[0] + assert xs['high'] == s[:5].max() + assert xs['low'] == s[:5].min() + assert xs['close'] == s[4] def test_resample_ohlc_result(self): @@ -1410,9 +1410,9 @@ def test_resample_reresample(self): s = Series(np.random.rand(len(dti)), dti) bs = s.resample('B', closed='right', label='right').mean() result = bs.resample('8H').mean() - self.assertEqual(len(result), 22) + assert len(result) == 22 assert isinstance(result.index.freq, offsets.DateOffset) - self.assertEqual(result.index.freq, offsets.Hour(8)) + assert result.index.freq == offsets.Hour(8) def test_resample_timestamp_to_period(self): ts = _simple_ts('1/1/1990', '1/1/2000') @@ -1465,7 +1465,7 @@ def test_downsample_non_unique(self): result = ts.resample('M').mean() expected = ts.groupby(lambda x: x.month).mean() - self.assertEqual(len(result), 2) + assert len(result) == 2 assert_almost_equal(result[0], expected[1]) assert_almost_equal(result[1], expected[2]) @@ -1665,10 +1665,10 @@ def test_resample_dtype_preservation(self): ).set_index('date') result = df.resample('1D').ffill() - self.assertEqual(result.val.dtype, np.int32) + assert result.val.dtype == np.int32 result = df.groupby('group').resample('1D').ffill() - self.assertEqual(result.val.dtype, np.int32) + assert result.val.dtype == np.int32 def test_weekly_resample_buglet(self): # #1327 @@ -1742,7 +1742,7 @@ def test_resample_anchored_intraday(self): ts = _simple_ts('2012-04-29 23:00', '2012-04-30 5:00', freq='h') resampled = ts.resample('M').mean() - self.assertEqual(len(resampled), 1) + assert len(resampled) == 1 def test_resample_anchored_monthstart(self): ts = _simple_ts('1/1/2000', '12/31/2002') @@ -1768,13 +1768,11 @@ def test_resample_anchored_multiday(self): # Ensure left closing works result = s.resample('2200L').mean() - self.assertEqual(result.index[-1], - pd.Timestamp('2014-10-15 23:00:02.000')) + assert result.index[-1] == pd.Timestamp('2014-10-15 23:00:02.000') # Ensure right closing works result = s.resample('2200L', label='right').mean() - self.assertEqual(result.index[-1], - pd.Timestamp('2014-10-15 23:00:04.200')) + assert result.index[-1] == pd.Timestamp('2014-10-15 23:00:04.200') def test_corner_cases(self): # miscellaneous test coverage @@ -1789,13 +1787,13 @@ def test_corner_cases(self): len0pts = _simple_pts('2007-01', '2010-05', freq='M')[:0] # it works result = len0pts.resample('A-DEC').mean() - self.assertEqual(len(result), 0) + assert len(result) == 0 # resample to periods ts = _simple_ts('2000-04-28', '2000-04-30 11:00', freq='h') result = ts.resample('M', kind='period').mean() - self.assertEqual(len(result), 1) - self.assertEqual(result.index[0], Period('2000-04', freq='M')) + assert len(result) == 1 + assert result.index[0] == Period('2000-04', freq='M') def test_anchored_lowercase_buglet(self): dates = date_range('4/16/2012 20:00', periods=50000, freq='s') @@ -1941,7 +1939,7 @@ def test_resample_nunique(self): g = df.groupby(pd.Grouper(freq='D')) expected = df.groupby(pd.TimeGrouper('D')).ID.apply(lambda x: x.nunique()) - self.assertEqual(expected.name, 'ID') + assert expected.name == 'ID' for t in [r, g]: result = r.ID.nunique() @@ -2691,8 +2689,8 @@ def test_resample_bms_2752(self): foo = pd.Series(index=pd.bdate_range('20000101', '20000201')) res1 = foo.resample("BMS").mean() res2 = foo.resample("BMS").mean().resample("B").mean() - self.assertEqual(res1.index[0], Timestamp('20000103')) - self.assertEqual(res1.index[0], res2.index[0]) + assert res1.index[0] == Timestamp('20000103') + assert res1.index[0] == res2.index[0] # def test_monthly_convention_span(self): # rng = period_range('2000-01', periods=3, freq='M') @@ -2969,11 +2967,11 @@ def test_consistency_with_window(self): df = self.frame expected = pd.Int64Index([1, 2, 3], name='A') result = df.groupby('A').resample('2s').mean() - self.assertEqual(result.index.nlevels, 2) + assert result.index.nlevels == 2 tm.assert_index_equal(result.index.levels[0], expected) result = df.groupby('A').rolling(20).mean() - self.assertEqual(result.index.nlevels, 2) + assert result.index.nlevels == 2 tm.assert_index_equal(result.index.levels[0], expected) def test_median_duplicate_columns(self): @@ -3219,7 +3217,7 @@ def test_aggregate_with_nat(self): dt_result = getattr(dt_grouped, func)() assert_series_equal(expected, dt_result) # GH 9925 - self.assertEqual(dt_result.index.name, 'key') + assert dt_result.index.name == 'key' # if NaT is included, 'var', 'std', 'mean', 'first','last' # and 'nth' doesn't work yet diff --git a/pandas/tests/test_strings.py b/pandas/tests/test_strings.py index 5b9797ce76a45..412a88e13bb23 100644 --- a/pandas/tests/test_strings.py +++ b/pandas/tests/test_strings.py @@ -54,7 +54,7 @@ def test_iter(self): # desired behavior is to iterate until everything would be nan on the # next iter so make sure the last element of the iterator was 'l' in # this case since 'wikitravel' is the longest string - self.assertEqual(s.dropna().values.item(), 'l') + assert s.dropna().values.item() == 'l' def test_iter_empty(self): ds = Series([], dtype=object) @@ -66,8 +66,8 @@ def test_iter_empty(self): # nothing to iterate over so nothing defined values should remain # unchanged - self.assertEqual(i, 100) - self.assertEqual(s, 1) + assert i == 100 + assert s == 1 def test_iter_single_element(self): ds = Series(['a']) @@ -87,8 +87,8 @@ def test_iter_object_try_string(self): for i, s in enumerate(ds.str): pass - self.assertEqual(i, 100) - self.assertEqual(s, 'h') + assert i == 100 + assert s == 'h' def test_cat(self): one = np.array(['a', 'a', 'b', 'b', 'c', NA], dtype=np.object_) @@ -97,23 +97,23 @@ def test_cat(self): # single array result = strings.str_cat(one) exp = 'aabbc' - self.assertEqual(result, exp) + assert result == exp result = strings.str_cat(one, na_rep='NA') exp = 'aabbcNA' - self.assertEqual(result, exp) + assert result == exp result = strings.str_cat(one, na_rep='-') exp = 'aabbc-' - self.assertEqual(result, exp) + assert result == exp result = strings.str_cat(one, sep='_', na_rep='NA') exp = 'a_a_b_b_c_NA' - self.assertEqual(result, exp) + assert result == exp result = strings.str_cat(two, sep='-') exp = 'a-b-d-foo' - self.assertEqual(result, exp) + assert result == exp # Multiple arrays result = strings.str_cat(one, [two], na_rep='NA') @@ -177,7 +177,7 @@ def test_contains(self): values = ['foo', 'xyz', 'fooommm__foo', 'mmm_'] result = strings.str_contains(values, pat) expected = np.array([False, False, True, True]) - self.assertEqual(result.dtype, np.bool_) + assert result.dtype == np.bool_ tm.assert_numpy_array_equal(result, expected) # case insensitive using regex @@ -220,13 +220,13 @@ def test_contains(self): dtype=np.object_) result = strings.str_contains(values, pat) expected = np.array([False, False, True, True]) - self.assertEqual(result.dtype, np.bool_) + assert result.dtype == np.bool_ tm.assert_numpy_array_equal(result, expected) # na values = Series(['om', 'foo', np.nan]) res = values.str.contains('foo', na="foo") - self.assertEqual(res.loc[2], "foo") + assert res.loc[2] == "foo" def test_startswith(self): values = Series(['om', NA, 'foo_nom', 'nom', 'bar_foo', NA, 'foo']) @@ -381,13 +381,11 @@ def test_swapcase(self): def test_casemethods(self): values = ['aaa', 'bbb', 'CCC', 'Dddd', 'eEEE'] s = Series(values) - self.assertEqual(s.str.lower().tolist(), [v.lower() for v in values]) - self.assertEqual(s.str.upper().tolist(), [v.upper() for v in values]) - self.assertEqual(s.str.title().tolist(), [v.title() for v in values]) - self.assertEqual(s.str.capitalize().tolist(), [ - v.capitalize() for v in values]) - self.assertEqual(s.str.swapcase().tolist(), [ - v.swapcase() for v in values]) + assert s.str.lower().tolist() == [v.lower() for v in values] + assert s.str.upper().tolist() == [v.upper() for v in values] + assert s.str.title().tolist() == [v.title() for v in values] + assert s.str.capitalize().tolist() == [v.capitalize() for v in values] + assert s.str.swapcase().tolist() == [v.swapcase() for v in values] def test_replace(self): values = Series(['fooBAD__barBAD', NA]) @@ -668,7 +666,7 @@ def test_extract_expand_False(self): # single group renames series/index properly s_or_idx = klass(['A1', 'A2']) result = s_or_idx.str.extract(r'(?PA)\d', expand=False) - self.assertEqual(result.name, 'uno') + assert result.name == 'uno' exp = klass(['A', 'A'], name='uno') if klass == Series: @@ -772,7 +770,7 @@ def check_index(index): r = s.str.extract(r'(?P[a-z])', expand=False) e = Series(['a', 'b', 'c'], name='sue') tm.assert_series_equal(r, e) - self.assertEqual(r.name, e.name) + assert r.name == e.name def test_extract_expand_True(self): # Contains tests like those in test_match and some others. @@ -1220,7 +1218,7 @@ def test_empty_str_methods(self): # (extract) on empty series tm.assert_series_equal(empty_str, empty.str.cat(empty)) - self.assertEqual('', empty.str.cat()) + assert '' == empty.str.cat() tm.assert_series_equal(empty_str, empty.str.title()) tm.assert_series_equal(empty_int, empty.str.count('a')) tm.assert_series_equal(empty_bool, empty.str.contains('a')) @@ -1322,20 +1320,13 @@ def test_ismethods(self): tm.assert_series_equal(str_s.str.isupper(), Series(upper_e)) tm.assert_series_equal(str_s.str.istitle(), Series(title_e)) - self.assertEqual(str_s.str.isalnum().tolist(), [v.isalnum() - for v in values]) - self.assertEqual(str_s.str.isalpha().tolist(), [v.isalpha() - for v in values]) - self.assertEqual(str_s.str.isdigit().tolist(), [v.isdigit() - for v in values]) - self.assertEqual(str_s.str.isspace().tolist(), [v.isspace() - for v in values]) - self.assertEqual(str_s.str.islower().tolist(), [v.islower() - for v in values]) - self.assertEqual(str_s.str.isupper().tolist(), [v.isupper() - for v in values]) - self.assertEqual(str_s.str.istitle().tolist(), [v.istitle() - for v in values]) + assert str_s.str.isalnum().tolist() == [v.isalnum() for v in values] + assert str_s.str.isalpha().tolist() == [v.isalpha() for v in values] + assert str_s.str.isdigit().tolist() == [v.isdigit() for v in values] + assert str_s.str.isspace().tolist() == [v.isspace() for v in values] + assert str_s.str.islower().tolist() == [v.islower() for v in values] + assert str_s.str.isupper().tolist() == [v.isupper() for v in values] + assert str_s.str.istitle().tolist() == [v.istitle() for v in values] def test_isnumeric(self): # 0x00bc: ¼ VULGAR FRACTION ONE QUARTER @@ -1350,10 +1341,8 @@ def test_isnumeric(self): tm.assert_series_equal(s.str.isdecimal(), Series(decimal_e)) unicodes = [u'A', u'3', u'¼', u'★', u'፸', u'3', u'four'] - self.assertEqual(s.str.isnumeric().tolist(), [ - v.isnumeric() for v in unicodes]) - self.assertEqual(s.str.isdecimal().tolist(), [ - v.isdecimal() for v in unicodes]) + assert s.str.isnumeric().tolist() == [v.isnumeric() for v in unicodes] + assert s.str.isdecimal().tolist() == [v.isdecimal() for v in unicodes] values = ['A', np.nan, u'¼', u'★', np.nan, u'3', 'four'] s = Series(values) @@ -1962,9 +1951,9 @@ def test_split_noargs(self): s = Series(['Wes McKinney', 'Travis Oliphant']) result = s.str.split() expected = ['Travis', 'Oliphant'] - self.assertEqual(result[1], expected) + assert result[1] == expected result = s.str.rsplit() - self.assertEqual(result[1], expected) + assert result[1] == expected def test_split_maxsplit(self): # re.split 0, str.split -1 @@ -2027,14 +2016,14 @@ def test_split_to_multiindex_expand(self): result = idx.str.split('_', expand=True) exp = idx tm.assert_index_equal(result, exp) - self.assertEqual(result.nlevels, 1) + assert result.nlevels == 1 idx = Index(['some_equal_splits', 'with_no_nans']) result = idx.str.split('_', expand=True) exp = MultiIndex.from_tuples([('some', 'equal', 'splits'), ( 'with', 'no', 'nans')]) tm.assert_index_equal(result, exp) - self.assertEqual(result.nlevels, 3) + assert result.nlevels == 3 idx = Index(['some_unequal_splits', 'one_of_these_things_is_not']) result = idx.str.split('_', expand=True) @@ -2042,7 +2031,7 @@ def test_split_to_multiindex_expand(self): ), ('one', 'of', 'these', 'things', 'is', 'not')]) tm.assert_index_equal(result, exp) - self.assertEqual(result.nlevels, 6) + assert result.nlevels == 6 with tm.assert_raises_regex(ValueError, "expand must be"): idx.str.split('_', expand="not_a_boolean") @@ -2081,21 +2070,21 @@ def test_rsplit_to_multiindex_expand(self): result = idx.str.rsplit('_', expand=True) exp = idx tm.assert_index_equal(result, exp) - self.assertEqual(result.nlevels, 1) + assert result.nlevels == 1 idx = Index(['some_equal_splits', 'with_no_nans']) result = idx.str.rsplit('_', expand=True) exp = MultiIndex.from_tuples([('some', 'equal', 'splits'), ( 'with', 'no', 'nans')]) tm.assert_index_equal(result, exp) - self.assertEqual(result.nlevels, 3) + assert result.nlevels == 3 idx = Index(['some_equal_splits', 'with_no_nans']) result = idx.str.rsplit('_', expand=True, n=1) exp = MultiIndex.from_tuples([('some_equal', 'splits'), ('with_no', 'nans')]) tm.assert_index_equal(result, exp) - self.assertEqual(result.nlevels, 2) + assert result.nlevels == 2 def test_split_with_name(self): # GH 12617 @@ -2184,9 +2173,9 @@ def test_partition_series(self): # compare to standard lib values = Series(['A_B_C', 'B_C_D', 'E_F_G', 'EFGHEF']) result = values.str.partition('_', expand=False).tolist() - self.assertEqual(result, [v.partition('_') for v in values]) + assert result == [v.partition('_') for v in values] result = values.str.rpartition('_', expand=False).tolist() - self.assertEqual(result, [v.rpartition('_') for v in values]) + assert result == [v.rpartition('_') for v in values] def test_partition_index(self): values = Index(['a_b_c', 'c_d_e', 'f_g_h']) @@ -2195,25 +2184,25 @@ def test_partition_index(self): exp = Index(np.array([('a', '_', 'b_c'), ('c', '_', 'd_e'), ('f', '_', 'g_h')])) tm.assert_index_equal(result, exp) - self.assertEqual(result.nlevels, 1) + assert result.nlevels == 1 result = values.str.rpartition('_', expand=False) exp = Index(np.array([('a_b', '_', 'c'), ('c_d', '_', 'e'), ( 'f_g', '_', 'h')])) tm.assert_index_equal(result, exp) - self.assertEqual(result.nlevels, 1) + assert result.nlevels == 1 result = values.str.partition('_') exp = Index([('a', '_', 'b_c'), ('c', '_', 'd_e'), ('f', '_', 'g_h')]) tm.assert_index_equal(result, exp) assert isinstance(result, MultiIndex) - self.assertEqual(result.nlevels, 3) + assert result.nlevels == 3 result = values.str.rpartition('_') exp = Index([('a_b', '_', 'c'), ('c_d', '_', 'e'), ('f_g', '_', 'h')]) tm.assert_index_equal(result, exp) assert isinstance(result, MultiIndex) - self.assertEqual(result.nlevels, 3) + assert result.nlevels == 3 def test_partition_to_dataframe(self): values = Series(['a_b_c', 'c_d_e', NA, 'f_g_h']) @@ -2604,20 +2593,20 @@ def test_match_findall_flags(self): pat = r'([A-Z0-9._%+-]+)@([A-Z0-9.-]+)\.([A-Z]{2,4})' result = data.str.extract(pat, flags=re.IGNORECASE, expand=True) - self.assertEqual(result.iloc[0].tolist(), ['dave', 'google', 'com']) + assert result.iloc[0].tolist() == ['dave', 'google', 'com'] result = data.str.match(pat, flags=re.IGNORECASE) - self.assertEqual(result[0], True) + assert result[0] result = data.str.findall(pat, flags=re.IGNORECASE) - self.assertEqual(result[0][0], ('dave', 'google', 'com')) + assert result[0][0] == ('dave', 'google', 'com') result = data.str.count(pat, flags=re.IGNORECASE) - self.assertEqual(result[0], 1) + assert result[0] == 1 with tm.assert_produces_warning(UserWarning): result = data.str.contains(pat, flags=re.IGNORECASE) - self.assertEqual(result[0], True) + assert result[0] def test_encode_decode(self): base = Series([u('a'), u('b'), u('a\xe4')]) @@ -2685,11 +2674,11 @@ def test_cat_on_filtered_index(self): str_month = df.month.astype('str') str_both = str_year.str.cat(str_month, sep=' ') - self.assertEqual(str_both.loc[1], '2011 2') + assert str_both.loc[1] == '2011 2' str_multiple = str_year.str.cat([str_month, str_month], sep=' ') - self.assertEqual(str_multiple.loc[1], '2011 2 2') + assert str_multiple.loc[1] == '2011 2 2' def test_str_cat_raises_intuitive_error(self): # https://github.com/pandas-dev/pandas/issues/11334 @@ -2721,13 +2710,13 @@ def test_index_str_accessor_visibility(self): idx = Index(values) assert isinstance(Series(values).str, StringMethods) assert isinstance(idx.str, StringMethods) - self.assertEqual(idx.inferred_type, tp) + assert idx.inferred_type == tp for values, tp in cases: idx = Index(values) assert isinstance(Series(values).str, StringMethods) assert isinstance(idx.str, StringMethods) - self.assertEqual(idx.inferred_type, tp) + assert idx.inferred_type == tp cases = [([1, np.nan], 'floating'), ([datetime(2011, 1, 1)], 'datetime64'), @@ -2739,11 +2728,11 @@ def test_index_str_accessor_visibility(self): Series(values).str with tm.assert_raises_regex(AttributeError, message): idx.str - self.assertEqual(idx.inferred_type, tp) + assert idx.inferred_type == tp # MultiIndex has mixed dtype, but not allow to use accessor idx = MultiIndex.from_tuples([('a', 'b'), ('a', 'b')]) - self.assertEqual(idx.inferred_type, 'mixed') + assert idx.inferred_type == 'mixed' message = 'Can only use .str accessor with Index, not MultiIndex' with tm.assert_raises_regex(AttributeError, message): idx.str diff --git a/pandas/tests/test_take.py b/pandas/tests/test_take.py index 9fb61998f6c54..617d268be8f67 100644 --- a/pandas/tests/test_take.py +++ b/pandas/tests/test_take.py @@ -353,7 +353,7 @@ def test_1d_bool(self): tm.assert_numpy_array_equal(result, expected) result = algos.take_1d(arr, [0, 2, -1]) - self.assertEqual(result.dtype, np.object_) + assert result.dtype == np.object_ def test_2d_bool(self): arr = np.array([[0, 1, 0], [1, 0, 1], [0, 1, 1]], dtype=bool) @@ -367,7 +367,7 @@ def test_2d_bool(self): tm.assert_numpy_array_equal(result, expected) result = algos.take_nd(arr, [0, 2, -1]) - self.assertEqual(result.dtype, np.object_) + assert result.dtype == np.object_ def test_2d_float32(self): arr = np.random.randn(4, 3).astype(np.float32) diff --git a/pandas/tests/test_testing.py b/pandas/tests/test_testing.py index 80db5eb49c127..2c0cd55205a5a 100644 --- a/pandas/tests/test_testing.py +++ b/pandas/tests/test_testing.py @@ -726,8 +726,8 @@ def test_RNGContext(self): with RNGContext(0): with RNGContext(1): - self.assertEqual(np.random.randn(), expected1) - self.assertEqual(np.random.randn(), expected0) + assert np.random.randn() == expected1 + assert np.random.randn() == expected0 class TestLocale(tm.TestCase): diff --git a/pandas/tests/test_util.py b/pandas/tests/test_util.py index 6581e7688a32f..80eb5bb9dfe16 100644 --- a/pandas/tests/test_util.py +++ b/pandas/tests/test_util.py @@ -7,6 +7,7 @@ from collections import OrderedDict import pytest +from pandas.compat import intern from pandas.util._move import move_into_mutable_buffer, BadMove, stolenbuf from pandas.util.decorators import deprecate_kwarg from pandas.util.validators import (validate_args, validate_kwargs, @@ -50,19 +51,19 @@ def test_dict_deprecate_kwarg(self): x = 'yes' with tm.assert_produces_warning(FutureWarning): result = self.f2(old=x) - self.assertEqual(result, True) + assert result def test_missing_deprecate_kwarg(self): x = 'bogus' with tm.assert_produces_warning(FutureWarning): result = self.f2(old=x) - self.assertEqual(result, 'bogus') + assert result == 'bogus' def test_callable_deprecate_kwarg(self): x = 5 with tm.assert_produces_warning(FutureWarning): result = self.f3(old=x) - self.assertEqual(result, x + 1) + assert result == x + 1 with pytest.raises(TypeError): self.f3(old='hello') @@ -358,7 +359,7 @@ def test_exactly_one_ref(self): as_stolen_buf = move_into_mutable_buffer(b[:-3]) # materialize as bytearray to show that it is mutable - self.assertEqual(bytearray(as_stolen_buf), b'test') + assert bytearray(as_stolen_buf) == b'test' @pytest.mark.skipif( sys.version_info[0] > 2, @@ -393,12 +394,7 @@ def ref_capture(ob): # be the same instance. move_into_mutable_buffer(ref_capture(intern(make_string()))) # noqa - self.assertEqual( - refcount[0], - 1, - msg='The BadMove was probably raised for refcount reasons instead' - ' of interning reasons', - ) + assert refcount[0] == 1 def test_numpy_errstate_is_default(): @@ -468,7 +464,7 @@ def test_set_locale(self): new_lang, new_enc = normalized_locale.split('.') new_enc = codecs.lookup(enc).name normalized_locale = new_lang, new_enc - self.assertEqual(normalized_locale, new_locale) + assert normalized_locale == new_locale current_locale = locale.getlocale() - self.assertEqual(current_locale, CURRENT_LOCALE) + assert current_locale == CURRENT_LOCALE diff --git a/pandas/tests/test_window.py b/pandas/tests/test_window.py index 7979e7d77a49d..55be6302036f1 100644 --- a/pandas/tests/test_window.py +++ b/pandas/tests/test_window.py @@ -57,7 +57,7 @@ def test_getitem(self): tm.assert_index_equal(r._selected_obj.columns, self.frame.columns) r = self.frame.rolling(window=5)[1] - self.assertEqual(r._selected_obj.name, self.frame.columns[1]) + assert r._selected_obj.name == self.frame.columns[1] # technically this is allowed r = self.frame.rolling(window=5)[1, 3] @@ -281,8 +281,8 @@ def test_preserve_metadata(self): s2 = s.rolling(30).sum() s3 = s.rolling(20).sum() - self.assertEqual(s2.name, 'foo') - self.assertEqual(s3.name, 'foo') + assert s2.name == 'foo' + assert s3.name == 'foo' def test_how_compat(self): # in prior versions, we would allow how to be used in the resample @@ -859,14 +859,14 @@ def test_cmov_window_corner(self): vals = np.array([]) with catch_warnings(record=True): rs = mom.rolling_window(vals, 5, 'boxcar', center=True) - self.assertEqual(len(rs), 0) + assert len(rs) == 0 # shorter than window vals = np.random.randn(5) with catch_warnings(record=True): rs = mom.rolling_window(vals, 10, 'boxcar') assert np.isnan(rs).all() - self.assertEqual(len(rs), 5) + assert len(rs) == 5 def test_cmov_window_frame(self): # Gh 8238 @@ -1382,7 +1382,7 @@ def get_result(obj, window, min_periods=None, freq=None, center=False): frame_result = get_result(self.frame, window=50) assert isinstance(series_result, Series) - self.assertEqual(type(frame_result), DataFrame) + assert type(frame_result) == DataFrame # check time_rule works if has_time_rule: @@ -1689,14 +1689,14 @@ def _check_ew_ndarray(self, func, preserve_nan=False, name=None): # pass in ints result2 = func(np.arange(50), span=10) - self.assertEqual(result2.dtype, np.float_) + assert result2.dtype == np.float_ def _check_ew_structures(self, func, name): series_result = getattr(self.series.ewm(com=10), name)() assert isinstance(series_result, Series) frame_result = getattr(self.frame.ewm(com=10), name)() - self.assertEqual(type(frame_result), DataFrame) + assert type(frame_result) == DataFrame class TestPairwise(object): @@ -2911,7 +2911,7 @@ def _check_expanding_structures(self, func): series_result = func(self.series) assert isinstance(series_result, Series) frame_result = func(self.frame) - self.assertEqual(type(frame_result), DataFrame) + assert type(frame_result) == DataFrame def _check_expanding(self, func, static_comp, has_min_periods=True, has_time_rule=True, preserve_nan=True): @@ -3031,10 +3031,10 @@ def test_rolling_min_max_numeric_types(self): # correctness result = (DataFrame(np.arange(20, dtype=data_type)) .rolling(window=5).max()) - self.assertEqual(result.dtypes[0], np.dtype("f8")) + assert result.dtypes[0] == np.dtype("f8") result = (DataFrame(np.arange(20, dtype=data_type)) .rolling(window=5).min()) - self.assertEqual(result.dtypes[0], np.dtype("f8")) + assert result.dtypes[0] == np.dtype("f8") class TestGrouperGrouping(tm.TestCase): diff --git a/pandas/tests/tools/test_numeric.py b/pandas/tests/tools/test_numeric.py index 45b736102aa3d..b298df4f4b5d8 100644 --- a/pandas/tests/tools/test_numeric.py +++ b/pandas/tests/tools/test_numeric.py @@ -156,16 +156,16 @@ def test_type_check(self): to_numeric(df, errors=errors) def test_scalar(self): - self.assertEqual(pd.to_numeric(1), 1) - self.assertEqual(pd.to_numeric(1.1), 1.1) + assert pd.to_numeric(1) == 1 + assert pd.to_numeric(1.1) == 1.1 - self.assertEqual(pd.to_numeric('1'), 1) - self.assertEqual(pd.to_numeric('1.1'), 1.1) + assert pd.to_numeric('1') == 1 + assert pd.to_numeric('1.1') == 1.1 with pytest.raises(ValueError): to_numeric('XX', errors='raise') - self.assertEqual(to_numeric('XX', errors='ignore'), 'XX') + assert to_numeric('XX', errors='ignore') == 'XX' assert np.isnan(to_numeric('XX', errors='coerce')) def test_numeric_dtypes(self): diff --git a/pandas/tests/tseries/test_frequencies.py b/pandas/tests/tseries/test_frequencies.py index 894269aaf451a..a78150e9cf728 100644 --- a/pandas/tests/tseries/test_frequencies.py +++ b/pandas/tests/tseries/test_frequencies.py @@ -345,97 +345,92 @@ def _assert_depr(freq, expected, aliases): class TestFrequencyCode(tm.TestCase): def test_freq_code(self): - self.assertEqual(frequencies.get_freq('A'), 1000) - self.assertEqual(frequencies.get_freq('3A'), 1000) - self.assertEqual(frequencies.get_freq('-1A'), 1000) + assert frequencies.get_freq('A') == 1000 + assert frequencies.get_freq('3A') == 1000 + assert frequencies.get_freq('-1A') == 1000 - self.assertEqual(frequencies.get_freq('W'), 4000) - self.assertEqual(frequencies.get_freq('W-MON'), 4001) - self.assertEqual(frequencies.get_freq('W-FRI'), 4005) + assert frequencies.get_freq('W') == 4000 + assert frequencies.get_freq('W-MON') == 4001 + assert frequencies.get_freq('W-FRI') == 4005 for freqstr, code in compat.iteritems(frequencies._period_code_map): result = frequencies.get_freq(freqstr) - self.assertEqual(result, code) + assert result == code result = frequencies.get_freq_group(freqstr) - self.assertEqual(result, code // 1000 * 1000) + assert result == code // 1000 * 1000 result = frequencies.get_freq_group(code) - self.assertEqual(result, code // 1000 * 1000) + assert result == code // 1000 * 1000 def test_freq_group(self): - self.assertEqual(frequencies.get_freq_group('A'), 1000) - self.assertEqual(frequencies.get_freq_group('3A'), 1000) - self.assertEqual(frequencies.get_freq_group('-1A'), 1000) - self.assertEqual(frequencies.get_freq_group('A-JAN'), 1000) - self.assertEqual(frequencies.get_freq_group('A-MAY'), 1000) - self.assertEqual(frequencies.get_freq_group(offsets.YearEnd()), 1000) - self.assertEqual(frequencies.get_freq_group( - offsets.YearEnd(month=1)), 1000) - self.assertEqual(frequencies.get_freq_group( - offsets.YearEnd(month=5)), 1000) - - self.assertEqual(frequencies.get_freq_group('W'), 4000) - self.assertEqual(frequencies.get_freq_group('W-MON'), 4000) - self.assertEqual(frequencies.get_freq_group('W-FRI'), 4000) - self.assertEqual(frequencies.get_freq_group(offsets.Week()), 4000) - self.assertEqual(frequencies.get_freq_group( - offsets.Week(weekday=1)), 4000) - self.assertEqual(frequencies.get_freq_group( - offsets.Week(weekday=5)), 4000) + assert frequencies.get_freq_group('A') == 1000 + assert frequencies.get_freq_group('3A') == 1000 + assert frequencies.get_freq_group('-1A') == 1000 + assert frequencies.get_freq_group('A-JAN') == 1000 + assert frequencies.get_freq_group('A-MAY') == 1000 + assert frequencies.get_freq_group(offsets.YearEnd()) == 1000 + assert frequencies.get_freq_group(offsets.YearEnd(month=1)) == 1000 + assert frequencies.get_freq_group(offsets.YearEnd(month=5)) == 1000 + + assert frequencies.get_freq_group('W') == 4000 + assert frequencies.get_freq_group('W-MON') == 4000 + assert frequencies.get_freq_group('W-FRI') == 4000 + assert frequencies.get_freq_group(offsets.Week()) == 4000 + assert frequencies.get_freq_group(offsets.Week(weekday=1)) == 4000 + assert frequencies.get_freq_group(offsets.Week(weekday=5)) == 4000 def test_get_to_timestamp_base(self): tsb = frequencies.get_to_timestamp_base - self.assertEqual(tsb(frequencies.get_freq_code('D')[0]), - frequencies.get_freq_code('D')[0]) - self.assertEqual(tsb(frequencies.get_freq_code('W')[0]), - frequencies.get_freq_code('D')[0]) - self.assertEqual(tsb(frequencies.get_freq_code('M')[0]), - frequencies.get_freq_code('D')[0]) + assert (tsb(frequencies.get_freq_code('D')[0]) == + frequencies.get_freq_code('D')[0]) + assert (tsb(frequencies.get_freq_code('W')[0]) == + frequencies.get_freq_code('D')[0]) + assert (tsb(frequencies.get_freq_code('M')[0]) == + frequencies.get_freq_code('D')[0]) - self.assertEqual(tsb(frequencies.get_freq_code('S')[0]), - frequencies.get_freq_code('S')[0]) - self.assertEqual(tsb(frequencies.get_freq_code('T')[0]), - frequencies.get_freq_code('S')[0]) - self.assertEqual(tsb(frequencies.get_freq_code('H')[0]), - frequencies.get_freq_code('S')[0]) + assert (tsb(frequencies.get_freq_code('S')[0]) == + frequencies.get_freq_code('S')[0]) + assert (tsb(frequencies.get_freq_code('T')[0]) == + frequencies.get_freq_code('S')[0]) + assert (tsb(frequencies.get_freq_code('H')[0]) == + frequencies.get_freq_code('S')[0]) def test_freq_to_reso(self): Reso = frequencies.Resolution - self.assertEqual(Reso.get_str_from_freq('A'), 'year') - self.assertEqual(Reso.get_str_from_freq('Q'), 'quarter') - self.assertEqual(Reso.get_str_from_freq('M'), 'month') - self.assertEqual(Reso.get_str_from_freq('D'), 'day') - self.assertEqual(Reso.get_str_from_freq('H'), 'hour') - self.assertEqual(Reso.get_str_from_freq('T'), 'minute') - self.assertEqual(Reso.get_str_from_freq('S'), 'second') - self.assertEqual(Reso.get_str_from_freq('L'), 'millisecond') - self.assertEqual(Reso.get_str_from_freq('U'), 'microsecond') - self.assertEqual(Reso.get_str_from_freq('N'), 'nanosecond') + assert Reso.get_str_from_freq('A') == 'year' + assert Reso.get_str_from_freq('Q') == 'quarter' + assert Reso.get_str_from_freq('M') == 'month' + assert Reso.get_str_from_freq('D') == 'day' + assert Reso.get_str_from_freq('H') == 'hour' + assert Reso.get_str_from_freq('T') == 'minute' + assert Reso.get_str_from_freq('S') == 'second' + assert Reso.get_str_from_freq('L') == 'millisecond' + assert Reso.get_str_from_freq('U') == 'microsecond' + assert Reso.get_str_from_freq('N') == 'nanosecond' for freq in ['A', 'Q', 'M', 'D', 'H', 'T', 'S', 'L', 'U', 'N']: # check roundtrip result = Reso.get_freq(Reso.get_str_from_freq(freq)) - self.assertEqual(freq, result) + assert freq == result for freq in ['D', 'H', 'T', 'S', 'L', 'U']: result = Reso.get_freq(Reso.get_str(Reso.get_reso_from_freq(freq))) - self.assertEqual(freq, result) + assert freq == result def test_resolution_bumping(self): - # GH 14378 + # see gh-14378 Reso = frequencies.Resolution - self.assertEqual(Reso.get_stride_from_decimal(1.5, 'T'), (90, 'S')) - self.assertEqual(Reso.get_stride_from_decimal(62.4, 'T'), (3744, 'S')) - self.assertEqual(Reso.get_stride_from_decimal(1.04, 'H'), (3744, 'S')) - self.assertEqual(Reso.get_stride_from_decimal(1, 'D'), (1, 'D')) - self.assertEqual(Reso.get_stride_from_decimal(0.342931, 'H'), - (1234551600, 'U')) - self.assertEqual(Reso.get_stride_from_decimal(1.2345, 'D'), - (106660800, 'L')) + assert Reso.get_stride_from_decimal(1.5, 'T') == (90, 'S') + assert Reso.get_stride_from_decimal(62.4, 'T') == (3744, 'S') + assert Reso.get_stride_from_decimal(1.04, 'H') == (3744, 'S') + assert Reso.get_stride_from_decimal(1, 'D') == (1, 'D') + assert (Reso.get_stride_from_decimal(0.342931, 'H') == + (1234551600, 'U')) + assert Reso.get_stride_from_decimal(1.2345, 'D') == (106660800, 'L') with pytest.raises(ValueError): Reso.get_stride_from_decimal(0.5, 'N') @@ -445,54 +440,54 @@ def test_resolution_bumping(self): Reso.get_stride_from_decimal(0.3429324798798269273987982, 'H') def test_get_freq_code(self): - # freqstr - self.assertEqual(frequencies.get_freq_code('A'), - (frequencies.get_freq('A'), 1)) - self.assertEqual(frequencies.get_freq_code('3D'), - (frequencies.get_freq('D'), 3)) - self.assertEqual(frequencies.get_freq_code('-2M'), - (frequencies.get_freq('M'), -2)) + # frequency str + assert (frequencies.get_freq_code('A') == + (frequencies.get_freq('A'), 1)) + assert (frequencies.get_freq_code('3D') == + (frequencies.get_freq('D'), 3)) + assert (frequencies.get_freq_code('-2M') == + (frequencies.get_freq('M'), -2)) # tuple - self.assertEqual(frequencies.get_freq_code(('D', 1)), - (frequencies.get_freq('D'), 1)) - self.assertEqual(frequencies.get_freq_code(('A', 3)), - (frequencies.get_freq('A'), 3)) - self.assertEqual(frequencies.get_freq_code(('M', -2)), - (frequencies.get_freq('M'), -2)) + assert (frequencies.get_freq_code(('D', 1)) == + (frequencies.get_freq('D'), 1)) + assert (frequencies.get_freq_code(('A', 3)) == + (frequencies.get_freq('A'), 3)) + assert (frequencies.get_freq_code(('M', -2)) == + (frequencies.get_freq('M'), -2)) + # numeric tuple - self.assertEqual(frequencies.get_freq_code((1000, 1)), (1000, 1)) + assert frequencies.get_freq_code((1000, 1)) == (1000, 1) # offsets - self.assertEqual(frequencies.get_freq_code(offsets.Day()), - (frequencies.get_freq('D'), 1)) - self.assertEqual(frequencies.get_freq_code(offsets.Day(3)), - (frequencies.get_freq('D'), 3)) - self.assertEqual(frequencies.get_freq_code(offsets.Day(-2)), - (frequencies.get_freq('D'), -2)) - - self.assertEqual(frequencies.get_freq_code(offsets.MonthEnd()), - (frequencies.get_freq('M'), 1)) - self.assertEqual(frequencies.get_freq_code(offsets.MonthEnd(3)), - (frequencies.get_freq('M'), 3)) - self.assertEqual(frequencies.get_freq_code(offsets.MonthEnd(-2)), - (frequencies.get_freq('M'), -2)) - - self.assertEqual(frequencies.get_freq_code(offsets.Week()), - (frequencies.get_freq('W'), 1)) - self.assertEqual(frequencies.get_freq_code(offsets.Week(3)), - (frequencies.get_freq('W'), 3)) - self.assertEqual(frequencies.get_freq_code(offsets.Week(-2)), - (frequencies.get_freq('W'), -2)) - - # monday is weekday=0 - self.assertEqual(frequencies.get_freq_code(offsets.Week(weekday=1)), - (frequencies.get_freq('W-TUE'), 1)) - self.assertEqual(frequencies.get_freq_code(offsets.Week(3, weekday=0)), - (frequencies.get_freq('W-MON'), 3)) - self.assertEqual( - frequencies.get_freq_code(offsets.Week(-2, weekday=4)), - (frequencies.get_freq('W-FRI'), -2)) + assert (frequencies.get_freq_code(offsets.Day()) == + (frequencies.get_freq('D'), 1)) + assert (frequencies.get_freq_code(offsets.Day(3)) == + (frequencies.get_freq('D'), 3)) + assert (frequencies.get_freq_code(offsets.Day(-2)) == + (frequencies.get_freq('D'), -2)) + + assert (frequencies.get_freq_code(offsets.MonthEnd()) == + (frequencies.get_freq('M'), 1)) + assert (frequencies.get_freq_code(offsets.MonthEnd(3)) == + (frequencies.get_freq('M'), 3)) + assert (frequencies.get_freq_code(offsets.MonthEnd(-2)) == + (frequencies.get_freq('M'), -2)) + + assert (frequencies.get_freq_code(offsets.Week()) == + (frequencies.get_freq('W'), 1)) + assert (frequencies.get_freq_code(offsets.Week(3)) == + (frequencies.get_freq('W'), 3)) + assert (frequencies.get_freq_code(offsets.Week(-2)) == + (frequencies.get_freq('W'), -2)) + + # Monday is weekday=0 + assert (frequencies.get_freq_code(offsets.Week(weekday=1)) == + (frequencies.get_freq('W-TUE'), 1)) + assert (frequencies.get_freq_code(offsets.Week(3, weekday=0)) == + (frequencies.get_freq('W-MON'), 3)) + assert (frequencies.get_freq_code(offsets.Week(-2, weekday=4)) == + (frequencies.get_freq('W-FRI'), -2)) _dti = DatetimeIndex @@ -510,18 +505,18 @@ def test_raise_if_too_few(self): def test_business_daily(self): index = _dti(['12/31/1998', '1/3/1999', '1/4/1999']) - self.assertEqual(frequencies.infer_freq(index), 'B') + assert frequencies.infer_freq(index) == 'B' def test_day(self): self._check_tick(timedelta(1), 'D') def test_day_corner(self): index = _dti(['1/1/2000', '1/2/2000', '1/3/2000']) - self.assertEqual(frequencies.infer_freq(index), 'D') + assert frequencies.infer_freq(index) == 'D' def test_non_datetimeindex(self): dates = to_datetime(['1/1/2000', '1/2/2000', '1/3/2000']) - self.assertEqual(frequencies.infer_freq(dates), 'D') + assert frequencies.infer_freq(dates) == 'D' def test_hour(self): self._check_tick(timedelta(hours=1), 'H') @@ -550,7 +545,7 @@ def _check_tick(self, base_delta, code): exp_freq = '%d%s' % (i, code) else: exp_freq = code - self.assertEqual(frequencies.infer_freq(index), exp_freq) + assert frequencies.infer_freq(index) == exp_freq index = _dti([b + base_delta * 7] + [b + base_delta * j for j in range( 3)]) @@ -595,7 +590,7 @@ def test_monthly(self): def test_monthly_ambiguous(self): rng = _dti(['1/31/2000', '2/29/2000', '3/31/2000']) - self.assertEqual(rng.inferred_freq, 'M') + assert rng.inferred_freq == 'M' def test_business_monthly(self): self._check_generated_range('1/1/2000', 'BM') @@ -617,7 +612,7 @@ def test_business_annual(self): def test_annual_ambiguous(self): rng = _dti(['1/31/2000', '1/31/2001', '1/31/2002']) - self.assertEqual(rng.inferred_freq, 'A-JAN') + assert rng.inferred_freq == 'A-JAN' def _check_generated_range(self, start, freq): freq = freq.upper() @@ -625,7 +620,7 @@ def _check_generated_range(self, start, freq): gen = date_range(start, periods=7, freq=freq) index = _dti(gen.values) if not freq.startswith('Q-'): - self.assertEqual(frequencies.infer_freq(index), gen.freqstr) + assert frequencies.infer_freq(index) == gen.freqstr else: inf_freq = frequencies.infer_freq(index) is_dec_range = inf_freq == 'Q-DEC' and gen.freqstr in ( @@ -640,7 +635,7 @@ def _check_generated_range(self, start, freq): index = _dti(gen.values) if not freq.startswith('Q-'): - self.assertEqual(frequencies.infer_freq(index), gen.freqstr) + assert frequencies.infer_freq(index) == gen.freqstr else: inf_freq = frequencies.infer_freq(index) is_dec_range = inf_freq == 'Q-DEC' and gen.freqstr in ( @@ -655,15 +650,15 @@ def _check_generated_range(self, start, freq): def test_infer_freq(self): rng = period_range('1959Q2', '2009Q3', freq='Q') rng = Index(rng.to_timestamp('D', how='e').asobject) - self.assertEqual(rng.inferred_freq, 'Q-DEC') + assert rng.inferred_freq == 'Q-DEC' rng = period_range('1959Q2', '2009Q3', freq='Q-NOV') rng = Index(rng.to_timestamp('D', how='e').asobject) - self.assertEqual(rng.inferred_freq, 'Q-NOV') + assert rng.inferred_freq == 'Q-NOV' rng = period_range('1959Q2', '2009Q3', freq='Q-OCT') rng = Index(rng.to_timestamp('D', how='e').asobject) - self.assertEqual(rng.inferred_freq, 'Q-OCT') + assert rng.inferred_freq == 'Q-OCT' def test_infer_freq_tz(self): @@ -683,7 +678,7 @@ def test_infer_freq_tz(self): 'US/Pacific', 'US/Eastern']: for expected, dates in compat.iteritems(freqs): idx = DatetimeIndex(dates, tz=tz) - self.assertEqual(idx.inferred_freq, expected) + assert idx.inferred_freq == expected def test_infer_freq_tz_transition(self): # Tests for #8772 @@ -699,7 +694,7 @@ def test_infer_freq_tz_transition(self): for freq in freqs: idx = date_range(date_pair[0], date_pair[ 1], freq=freq, tz=tz) - self.assertEqual(idx.inferred_freq, freq) + assert idx.inferred_freq == freq index = date_range("2013-11-03", periods=5, freq="3H").tz_localize("America/Chicago") @@ -711,21 +706,21 @@ def test_infer_freq_businesshour(self): ['2014-07-01 09:00', '2014-07-01 10:00', '2014-07-01 11:00', '2014-07-01 12:00', '2014-07-01 13:00', '2014-07-01 14:00']) # hourly freq in a day must result in 'H' - self.assertEqual(idx.inferred_freq, 'H') + assert idx.inferred_freq == 'H' idx = DatetimeIndex( ['2014-07-01 09:00', '2014-07-01 10:00', '2014-07-01 11:00', '2014-07-01 12:00', '2014-07-01 13:00', '2014-07-01 14:00', '2014-07-01 15:00', '2014-07-01 16:00', '2014-07-02 09:00', '2014-07-02 10:00', '2014-07-02 11:00']) - self.assertEqual(idx.inferred_freq, 'BH') + assert idx.inferred_freq == 'BH' idx = DatetimeIndex( ['2014-07-04 09:00', '2014-07-04 10:00', '2014-07-04 11:00', '2014-07-04 12:00', '2014-07-04 13:00', '2014-07-04 14:00', '2014-07-04 15:00', '2014-07-04 16:00', '2014-07-07 09:00', '2014-07-07 10:00', '2014-07-07 11:00']) - self.assertEqual(idx.inferred_freq, 'BH') + assert idx.inferred_freq == 'BH' idx = DatetimeIndex( ['2014-07-04 09:00', '2014-07-04 10:00', '2014-07-04 11:00', @@ -736,12 +731,12 @@ def test_infer_freq_businesshour(self): '2014-07-07 16:00', '2014-07-08 09:00', '2014-07-08 10:00', '2014-07-08 11:00', '2014-07-08 12:00', '2014-07-08 13:00', '2014-07-08 14:00', '2014-07-08 15:00', '2014-07-08 16:00']) - self.assertEqual(idx.inferred_freq, 'BH') + assert idx.inferred_freq == 'BH' def test_not_monotonic(self): rng = _dti(['1/31/2000', '1/31/2001', '1/31/2002']) rng = rng[::-1] - self.assertEqual(rng.inferred_freq, '-1A-JAN') + assert rng.inferred_freq == '-1A-JAN' def test_non_datetimeindex2(self): rng = _dti(['1/31/2000', '1/31/2001', '1/31/2002']) @@ -749,7 +744,7 @@ def test_non_datetimeindex2(self): vals = rng.to_pydatetime() result = frequencies.infer_freq(vals) - self.assertEqual(result, rng.inferred_freq) + assert result == rng.inferred_freq def test_invalid_index_types(self): @@ -771,7 +766,7 @@ def test_string_datetimelike_compat(self): '2004-04']) result = frequencies.infer_freq(Index(['2004-01', '2004-02', '2004-03', '2004-04'])) - self.assertEqual(result, expected) + assert result == expected def test_series(self): diff --git a/pandas/tests/tseries/test_holiday.py b/pandas/tests/tseries/test_holiday.py index c87f580582335..109adaaa7e0b0 100644 --- a/pandas/tests/tseries/test_holiday.py +++ b/pandas/tests/tseries/test_holiday.py @@ -49,9 +49,9 @@ def test_calendar(self): Timestamp(self.start_date), Timestamp(self.end_date)) - self.assertEqual(list(holidays.to_pydatetime()), self.holiday_list) - self.assertEqual(list(holidays_1.to_pydatetime()), self.holiday_list) - self.assertEqual(list(holidays_2.to_pydatetime()), self.holiday_list) + assert list(holidays.to_pydatetime()) == self.holiday_list + assert list(holidays_1.to_pydatetime()) == self.holiday_list + assert list(holidays_2.to_pydatetime()) == self.holiday_list def test_calendar_caching(self): # Test for issue #9552 @@ -82,8 +82,7 @@ def test_calendar_observance_dates(self): def test_rule_from_name(self): USFedCal = get_calendar('USFederalHolidayCalendar') - self.assertEqual(USFedCal.rule_from_name( - 'Thanksgiving'), USThanksgivingDay) + assert USFedCal.rule_from_name('Thanksgiving') == USThanksgivingDay class TestHoliday(tm.TestCase): @@ -93,17 +92,12 @@ def setUp(self): self.end_date = datetime(2020, 12, 31) def check_results(self, holiday, start, end, expected): - self.assertEqual(list(holiday.dates(start, end)), expected) + assert list(holiday.dates(start, end)) == expected + # Verify that timezone info is preserved. - self.assertEqual( - list( - holiday.dates( - utc.localize(Timestamp(start)), - utc.localize(Timestamp(end)), - ) - ), - [utc.localize(dt) for dt in expected], - ) + assert (list(holiday.dates(utc.localize(Timestamp(start)), + utc.localize(Timestamp(end)))) == + [utc.localize(dt) for dt in expected]) def test_usmemorialday(self): self.check_results(holiday=USMemorialDay, @@ -234,7 +228,7 @@ def test_holidays_within_dates(self): for rule, dates in compat.iteritems(holidays): empty_dates = rule.dates(start_date, end_date) - self.assertEqual(empty_dates.tolist(), []) + assert empty_dates.tolist() == [] if isinstance(dates, tuple): dates = [dates] @@ -266,17 +260,15 @@ def test_special_holidays(self): end_date=datetime(2012, 12, 31), offset=DateOffset(weekday=MO(1))) - self.assertEqual(base_date, - holiday_1.dates(self.start_date, self.end_date)) - self.assertEqual(base_date, - holiday_2.dates(self.start_date, self.end_date)) + assert base_date == holiday_1.dates(self.start_date, self.end_date) + assert base_date == holiday_2.dates(self.start_date, self.end_date) def test_get_calendar(self): class TestCalendar(AbstractHolidayCalendar): rules = [] calendar = get_calendar('TestCalendar') - self.assertEqual(TestCalendar, calendar.__class__) + assert TestCalendar == calendar.__class__ def test_factory(self): class_1 = HolidayCalendarFactory('MemorialDay', @@ -287,9 +279,9 @@ def test_factory(self): USThanksgivingDay) class_3 = HolidayCalendarFactory('Combined', class_1, class_2) - self.assertEqual(len(class_1.rules), 1) - self.assertEqual(len(class_2.rules), 1) - self.assertEqual(len(class_3.rules), 2) + assert len(class_1.rules) == 1 + assert len(class_2.rules) == 1 + assert len(class_3.rules) == 2 class TestObservanceRules(tm.TestCase): @@ -304,64 +296,65 @@ def setUp(self): self.tu = datetime(2014, 4, 15) def test_next_monday(self): - self.assertEqual(next_monday(self.sa), self.mo) - self.assertEqual(next_monday(self.su), self.mo) + assert next_monday(self.sa) == self.mo + assert next_monday(self.su) == self.mo def test_next_monday_or_tuesday(self): - self.assertEqual(next_monday_or_tuesday(self.sa), self.mo) - self.assertEqual(next_monday_or_tuesday(self.su), self.tu) - self.assertEqual(next_monday_or_tuesday(self.mo), self.tu) + assert next_monday_or_tuesday(self.sa) == self.mo + assert next_monday_or_tuesday(self.su) == self.tu + assert next_monday_or_tuesday(self.mo) == self.tu def test_previous_friday(self): - self.assertEqual(previous_friday(self.sa), self.fr) - self.assertEqual(previous_friday(self.su), self.fr) + assert previous_friday(self.sa) == self.fr + assert previous_friday(self.su) == self.fr def test_sunday_to_monday(self): - self.assertEqual(sunday_to_monday(self.su), self.mo) + assert sunday_to_monday(self.su) == self.mo def test_nearest_workday(self): - self.assertEqual(nearest_workday(self.sa), self.fr) - self.assertEqual(nearest_workday(self.su), self.mo) - self.assertEqual(nearest_workday(self.mo), self.mo) + assert nearest_workday(self.sa) == self.fr + assert nearest_workday(self.su) == self.mo + assert nearest_workday(self.mo) == self.mo def test_weekend_to_monday(self): - self.assertEqual(weekend_to_monday(self.sa), self.mo) - self.assertEqual(weekend_to_monday(self.su), self.mo) - self.assertEqual(weekend_to_monday(self.mo), self.mo) + assert weekend_to_monday(self.sa) == self.mo + assert weekend_to_monday(self.su) == self.mo + assert weekend_to_monday(self.mo) == self.mo def test_next_workday(self): - self.assertEqual(next_workday(self.sa), self.mo) - self.assertEqual(next_workday(self.su), self.mo) - self.assertEqual(next_workday(self.mo), self.tu) + assert next_workday(self.sa) == self.mo + assert next_workday(self.su) == self.mo + assert next_workday(self.mo) == self.tu def test_previous_workday(self): - self.assertEqual(previous_workday(self.sa), self.fr) - self.assertEqual(previous_workday(self.su), self.fr) - self.assertEqual(previous_workday(self.tu), self.mo) + assert previous_workday(self.sa) == self.fr + assert previous_workday(self.su) == self.fr + assert previous_workday(self.tu) == self.mo def test_before_nearest_workday(self): - self.assertEqual(before_nearest_workday(self.sa), self.th) - self.assertEqual(before_nearest_workday(self.su), self.fr) - self.assertEqual(before_nearest_workday(self.tu), self.mo) + assert before_nearest_workday(self.sa) == self.th + assert before_nearest_workday(self.su) == self.fr + assert before_nearest_workday(self.tu) == self.mo def test_after_nearest_workday(self): - self.assertEqual(after_nearest_workday(self.sa), self.mo) - self.assertEqual(after_nearest_workday(self.su), self.tu) - self.assertEqual(after_nearest_workday(self.fr), self.mo) + assert after_nearest_workday(self.sa) == self.mo + assert after_nearest_workday(self.su) == self.tu + assert after_nearest_workday(self.fr) == self.mo class TestFederalHolidayCalendar(tm.TestCase): - # Test for issue 10278 def test_no_mlk_before_1984(self): + # see gh-10278 class MLKCalendar(AbstractHolidayCalendar): rules = [USMartinLutherKingJr] holidays = MLKCalendar().holidays(start='1984', end='1988').to_pydatetime().tolist() + # Testing to make sure holiday is not incorrectly observed before 1986 - self.assertEqual(holidays, [datetime(1986, 1, 20, 0, 0), datetime( - 1987, 1, 19, 0, 0)]) + assert holidays == [datetime(1986, 1, 20, 0, 0), + datetime(1987, 1, 19, 0, 0)] def test_memorial_day(self): class MemorialDay(AbstractHolidayCalendar): @@ -369,23 +362,23 @@ class MemorialDay(AbstractHolidayCalendar): holidays = MemorialDay().holidays(start='1971', end='1980').to_pydatetime().tolist() - # Fixes 5/31 error and checked manually against wikipedia - self.assertEqual(holidays, [datetime(1971, 5, 31, 0, 0), - datetime(1972, 5, 29, 0, 0), - datetime(1973, 5, 28, 0, 0), - datetime(1974, 5, 27, 0, - 0), datetime(1975, 5, 26, 0, 0), - datetime(1976, 5, 31, 0, - 0), datetime(1977, 5, 30, 0, 0), - datetime(1978, 5, 29, 0, - 0), datetime(1979, 5, 28, 0, 0)]) + # Fixes 5/31 error and checked manually against Wikipedia + assert holidays == [datetime(1971, 5, 31, 0, 0), + datetime(1972, 5, 29, 0, 0), + datetime(1973, 5, 28, 0, 0), + datetime(1974, 5, 27, 0, 0), + datetime(1975, 5, 26, 0, 0), + datetime(1976, 5, 31, 0, 0), + datetime(1977, 5, 30, 0, 0), + datetime(1978, 5, 29, 0, 0), + datetime(1979, 5, 28, 0, 0)] -class TestHolidayConflictingArguments(tm.TestCase): - # GH 10217 +class TestHolidayConflictingArguments(tm.TestCase): def test_both_offset_observance_raises(self): + # see gh-10217 with pytest.raises(NotImplementedError): Holiday("Cyber Monday", month=11, day=1, offset=[DateOffset(weekday=SA(4))], diff --git a/pandas/tests/tseries/test_offsets.py b/pandas/tests/tseries/test_offsets.py index 08f17fc358a47..ce4208a8cea69 100644 --- a/pandas/tests/tseries/test_offsets.py +++ b/pandas/tests/tseries/test_offsets.py @@ -155,7 +155,7 @@ def test_apply_out_of_range(self): t = Timestamp('20080101', tz=tz) result = t + offset assert isinstance(result, datetime) - self.assertEqual(t.tzinfo, result.tzinfo) + assert t.tzinfo == result.tzinfo except (tslib.OutOfBoundsDatetime): raise @@ -230,13 +230,13 @@ def test_return_type(self): def test_offset_n(self): for offset_klass in self.offset_types: offset = self._get_offset(offset_klass) - self.assertEqual(offset.n, 1) + assert offset.n == 1 neg_offset = offset * -1 - self.assertEqual(neg_offset.n, -1) + assert neg_offset.n == -1 mul_offset = offset * 3 - self.assertEqual(mul_offset.n, 3) + assert mul_offset.n == 3 def test_offset_freqstr(self): for offset_klass in self.offset_types: @@ -247,7 +247,7 @@ def test_offset_freqstr(self): "", 'LWOM-SAT', ): code = get_offset(freqstr) - self.assertEqual(offset.rule_code, code) + assert offset.rule_code == code def _check_offsetfunc_works(self, offset, funcname, dt, expected, normalize=False): @@ -256,11 +256,11 @@ def _check_offsetfunc_works(self, offset, funcname, dt, expected, result = func(dt) assert isinstance(result, Timestamp) - self.assertEqual(result, expected) + assert result == expected result = func(Timestamp(dt)) assert isinstance(result, Timestamp) - self.assertEqual(result, expected) + assert result == expected # see gh-14101 exp_warning = None @@ -277,9 +277,9 @@ def _check_offsetfunc_works(self, offset, funcname, dt, expected, result = func(ts) assert isinstance(result, Timestamp) if normalize is False: - self.assertEqual(result, expected + Nano(5)) + assert result == expected + Nano(5) else: - self.assertEqual(result, expected) + assert result == expected if isinstance(dt, np.datetime64): # test tz when input is datetime or Timestamp @@ -295,11 +295,11 @@ def _check_offsetfunc_works(self, offset, funcname, dt, expected, result = func(dt_tz) assert isinstance(result, Timestamp) - self.assertEqual(result, expected_localize) + assert result == expected_localize result = func(Timestamp(dt, tz=tz)) assert isinstance(result, Timestamp) - self.assertEqual(result, expected_localize) + assert result == expected_localize # see gh-14101 exp_warning = None @@ -316,9 +316,9 @@ def _check_offsetfunc_works(self, offset, funcname, dt, expected, result = func(ts) assert isinstance(result, Timestamp) if normalize is False: - self.assertEqual(result, expected_localize + Nano(5)) + assert result == expected_localize + Nano(5) else: - self.assertEqual(result, expected_localize) + assert result == expected_localize def test_apply(self): sdt = datetime(2011, 1, 1, 9, 0) @@ -466,14 +466,14 @@ def test_add(self): result_ts = Timestamp(dt) + offset_s for result in [result_dt, result_ts]: assert isinstance(result, Timestamp) - self.assertEqual(result, expected) + assert result == expected tm._skip_if_no_pytz() for tz in self.timezones: expected_localize = expected.tz_localize(tz) result = Timestamp(dt, tz=tz) + offset_s assert isinstance(result, Timestamp) - self.assertEqual(result, expected_localize) + assert result == expected_localize # normalize=True offset_s = self._get_offset(offset, normalize=True) @@ -483,13 +483,13 @@ def test_add(self): result_ts = Timestamp(dt) + offset_s for result in [result_dt, result_ts]: assert isinstance(result, Timestamp) - self.assertEqual(result, expected) + assert result == expected for tz in self.timezones: expected_localize = expected.tz_localize(tz) result = Timestamp(dt, tz=tz) + offset_s assert isinstance(result, Timestamp) - self.assertEqual(result, expected_localize) + assert result == expected_localize def test_pickle_v0_15_2(self): offsets = {'DateOffset': DateOffset(years=1), @@ -558,10 +558,10 @@ def test_different_normalize_equals(self): offset = BDay() offset2 = BDay() offset2.normalize = True - self.assertEqual(offset, offset2) + assert offset == offset2 def test_repr(self): - self.assertEqual(repr(self.offset), '') + assert repr(self.offset) == '' assert repr(self.offset2) == '<2 * BusinessDays>' expected = '' @@ -573,49 +573,49 @@ def test_with_offset(self): assert (self.d + offset) == datetime(2008, 1, 2, 2) def testEQ(self): - self.assertEqual(self.offset2, self.offset2) + assert self.offset2 == self.offset2 def test_mul(self): pass def test_hash(self): - self.assertEqual(hash(self.offset2), hash(self.offset2)) + assert hash(self.offset2) == hash(self.offset2) def testCall(self): - self.assertEqual(self.offset2(self.d), datetime(2008, 1, 3)) + assert self.offset2(self.d) == datetime(2008, 1, 3) def testRAdd(self): - self.assertEqual(self.d + self.offset2, self.offset2 + self.d) + assert self.d + self.offset2 == self.offset2 + self.d def testSub(self): off = self.offset2 pytest.raises(Exception, off.__sub__, self.d) - self.assertEqual(2 * off - off, off) + assert 2 * off - off == off - self.assertEqual(self.d - self.offset2, self.d + BDay(-2)) + assert self.d - self.offset2 == self.d + BDay(-2) def testRSub(self): - self.assertEqual(self.d - self.offset2, (-self.offset2).apply(self.d)) + assert self.d - self.offset2 == (-self.offset2).apply(self.d) def testMult1(self): - self.assertEqual(self.d + 10 * self.offset, self.d + BDay(10)) + assert self.d + 10 * self.offset == self.d + BDay(10) def testMult2(self): - self.assertEqual(self.d + (-5 * BDay(-10)), self.d + BDay(50)) + assert self.d + (-5 * BDay(-10)) == self.d + BDay(50) def testRollback1(self): - self.assertEqual(BDay(10).rollback(self.d), self.d) + assert BDay(10).rollback(self.d) == self.d def testRollback2(self): - self.assertEqual( - BDay(10).rollback(datetime(2008, 1, 5)), datetime(2008, 1, 4)) + assert (BDay(10).rollback(datetime(2008, 1, 5)) == + datetime(2008, 1, 4)) def testRollforward1(self): - self.assertEqual(BDay(10).rollforward(self.d), self.d) + assert BDay(10).rollforward(self.d) == self.d def testRollforward2(self): - self.assertEqual( - BDay(10).rollforward(datetime(2008, 1, 5)), datetime(2008, 1, 7)) + assert (BDay(10).rollforward(datetime(2008, 1, 5)) == + datetime(2008, 1, 7)) def test_roll_date_object(self): offset = BDay() @@ -623,17 +623,17 @@ def test_roll_date_object(self): dt = date(2012, 9, 15) result = offset.rollback(dt) - self.assertEqual(result, datetime(2012, 9, 14)) + assert result == datetime(2012, 9, 14) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2012, 9, 17)) + assert result == datetime(2012, 9, 17) offset = offsets.Day() result = offset.rollback(dt) - self.assertEqual(result, datetime(2012, 9, 15)) + assert result == datetime(2012, 9, 15) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2012, 9, 15)) + assert result == datetime(2012, 9, 15) def test_onOffset(self): tests = [(BDay(), datetime(2008, 1, 1), True), @@ -691,25 +691,25 @@ def test_apply_large_n(self): dt = datetime(2012, 10, 23) result = dt + BDay(10) - self.assertEqual(result, datetime(2012, 11, 6)) + assert result == datetime(2012, 11, 6) result = dt + BDay(100) - BDay(100) - self.assertEqual(result, dt) + assert result == dt off = BDay() * 6 rs = datetime(2012, 1, 1) - off xp = datetime(2011, 12, 23) - self.assertEqual(rs, xp) + assert rs == xp st = datetime(2011, 12, 18) rs = st + off xp = datetime(2011, 12, 26) - self.assertEqual(rs, xp) + assert rs == xp off = BDay() * 10 rs = datetime(2014, 1, 5) + off # see #5890 xp = datetime(2014, 1, 17) - self.assertEqual(rs, xp) + assert rs == xp def test_apply_corner(self): pytest.raises(TypeError, BDay().apply, BMonthEnd()) @@ -753,34 +753,30 @@ def test_different_normalize_equals(self): offset = self._offset() offset2 = self._offset() offset2.normalize = True - self.assertEqual(offset, offset2) + assert offset == offset2 def test_repr(self): - self.assertEqual(repr(self.offset1), '') - self.assertEqual(repr(self.offset2), - '<3 * BusinessHours: BH=09:00-17:00>') - self.assertEqual(repr(self.offset3), - '<-1 * BusinessHour: BH=09:00-17:00>') - self.assertEqual(repr(self.offset4), - '<-4 * BusinessHours: BH=09:00-17:00>') - - self.assertEqual(repr(self.offset5), '') - self.assertEqual(repr(self.offset6), '') - self.assertEqual(repr(self.offset7), - '<-2 * BusinessHours: BH=21:30-06:30>') + assert repr(self.offset1) == '' + assert repr(self.offset2) == '<3 * BusinessHours: BH=09:00-17:00>' + assert repr(self.offset3) == '<-1 * BusinessHour: BH=09:00-17:00>' + assert repr(self.offset4) == '<-4 * BusinessHours: BH=09:00-17:00>' + + assert repr(self.offset5) == '' + assert repr(self.offset6) == '' + assert repr(self.offset7) == '<-2 * BusinessHours: BH=21:30-06:30>' def test_with_offset(self): expected = Timestamp('2014-07-01 13:00') - self.assertEqual(self.d + BusinessHour() * 3, expected) - self.assertEqual(self.d + BusinessHour(n=3), expected) + assert self.d + BusinessHour() * 3 == expected + assert self.d + BusinessHour(n=3) == expected def testEQ(self): for offset in [self.offset1, self.offset2, self.offset3, self.offset4]: - self.assertEqual(offset, offset) + assert offset == offset self.assertNotEqual(BusinessHour(), BusinessHour(-1)) - self.assertEqual(BusinessHour(start='09:00'), BusinessHour()) + assert BusinessHour(start='09:00') == BusinessHour() self.assertNotEqual(BusinessHour(start='09:00'), BusinessHour(start='09:01')) self.assertNotEqual(BusinessHour(start='09:00', end='17:00'), @@ -788,90 +784,83 @@ def testEQ(self): def test_hash(self): for offset in [self.offset1, self.offset2, self.offset3, self.offset4]: - self.assertEqual(hash(offset), hash(offset)) + assert hash(offset) == hash(offset) def testCall(self): - self.assertEqual(self.offset1(self.d), datetime(2014, 7, 1, 11)) - self.assertEqual(self.offset2(self.d), datetime(2014, 7, 1, 13)) - self.assertEqual(self.offset3(self.d), datetime(2014, 6, 30, 17)) - self.assertEqual(self.offset4(self.d), datetime(2014, 6, 30, 14)) + assert self.offset1(self.d) == datetime(2014, 7, 1, 11) + assert self.offset2(self.d) == datetime(2014, 7, 1, 13) + assert self.offset3(self.d) == datetime(2014, 6, 30, 17) + assert self.offset4(self.d) == datetime(2014, 6, 30, 14) def testRAdd(self): - self.assertEqual(self.d + self.offset2, self.offset2 + self.d) + assert self.d + self.offset2 == self.offset2 + self.d def testSub(self): off = self.offset2 pytest.raises(Exception, off.__sub__, self.d) - self.assertEqual(2 * off - off, off) + assert 2 * off - off == off - self.assertEqual(self.d - self.offset2, self.d + self._offset(-3)) + assert self.d - self.offset2 == self.d + self._offset(-3) def testRSub(self): - self.assertEqual(self.d - self.offset2, (-self.offset2).apply(self.d)) + assert self.d - self.offset2 == (-self.offset2).apply(self.d) def testMult1(self): - self.assertEqual(self.d + 5 * self.offset1, self.d + self._offset(5)) + assert self.d + 5 * self.offset1 == self.d + self._offset(5) def testMult2(self): - self.assertEqual(self.d + (-3 * self._offset(-2)), - self.d + self._offset(6)) + assert self.d + (-3 * self._offset(-2)) == self.d + self._offset(6) def testRollback1(self): - self.assertEqual(self.offset1.rollback(self.d), self.d) - self.assertEqual(self.offset2.rollback(self.d), self.d) - self.assertEqual(self.offset3.rollback(self.d), self.d) - self.assertEqual(self.offset4.rollback(self.d), self.d) - self.assertEqual(self.offset5.rollback(self.d), - datetime(2014, 6, 30, 14, 30)) - self.assertEqual(self.offset6.rollback( - self.d), datetime(2014, 7, 1, 5, 0)) - self.assertEqual(self.offset7.rollback( - self.d), datetime(2014, 7, 1, 6, 30)) + assert self.offset1.rollback(self.d) == self.d + assert self.offset2.rollback(self.d) == self.d + assert self.offset3.rollback(self.d) == self.d + assert self.offset4.rollback(self.d) == self.d + assert self.offset5.rollback(self.d) == datetime(2014, 6, 30, 14, 30) + assert self.offset6.rollback(self.d) == datetime(2014, 7, 1, 5, 0) + assert self.offset7.rollback(self.d) == datetime(2014, 7, 1, 6, 30) d = datetime(2014, 7, 1, 0) - self.assertEqual(self.offset1.rollback(d), datetime(2014, 6, 30, 17)) - self.assertEqual(self.offset2.rollback(d), datetime(2014, 6, 30, 17)) - self.assertEqual(self.offset3.rollback(d), datetime(2014, 6, 30, 17)) - self.assertEqual(self.offset4.rollback(d), datetime(2014, 6, 30, 17)) - self.assertEqual(self.offset5.rollback( - d), datetime(2014, 6, 30, 14, 30)) - self.assertEqual(self.offset6.rollback(d), d) - self.assertEqual(self.offset7.rollback(d), d) + assert self.offset1.rollback(d) == datetime(2014, 6, 30, 17) + assert self.offset2.rollback(d) == datetime(2014, 6, 30, 17) + assert self.offset3.rollback(d) == datetime(2014, 6, 30, 17) + assert self.offset4.rollback(d) == datetime(2014, 6, 30, 17) + assert self.offset5.rollback(d) == datetime(2014, 6, 30, 14, 30) + assert self.offset6.rollback(d) == d + assert self.offset7.rollback(d) == d - self.assertEqual(self._offset(5).rollback(self.d), self.d) + assert self._offset(5).rollback(self.d) == self.d def testRollback2(self): - self.assertEqual(self._offset(-3) - .rollback(datetime(2014, 7, 5, 15, 0)), - datetime(2014, 7, 4, 17, 0)) + assert (self._offset(-3).rollback(datetime(2014, 7, 5, 15, 0)) == + datetime(2014, 7, 4, 17, 0)) def testRollforward1(self): - self.assertEqual(self.offset1.rollforward(self.d), self.d) - self.assertEqual(self.offset2.rollforward(self.d), self.d) - self.assertEqual(self.offset3.rollforward(self.d), self.d) - self.assertEqual(self.offset4.rollforward(self.d), self.d) - self.assertEqual(self.offset5.rollforward( - self.d), datetime(2014, 7, 1, 11, 0)) - self.assertEqual(self.offset6.rollforward( - self.d), datetime(2014, 7, 1, 20, 0)) - self.assertEqual(self.offset7.rollforward( - self.d), datetime(2014, 7, 1, 21, 30)) + assert self.offset1.rollforward(self.d) == self.d + assert self.offset2.rollforward(self.d) == self.d + assert self.offset3.rollforward(self.d) == self.d + assert self.offset4.rollforward(self.d) == self.d + assert (self.offset5.rollforward(self.d) == + datetime(2014, 7, 1, 11, 0)) + assert (self.offset6.rollforward(self.d) == + datetime(2014, 7, 1, 20, 0)) + assert (self.offset7.rollforward(self.d) == + datetime(2014, 7, 1, 21, 30)) d = datetime(2014, 7, 1, 0) - self.assertEqual(self.offset1.rollforward(d), datetime(2014, 7, 1, 9)) - self.assertEqual(self.offset2.rollforward(d), datetime(2014, 7, 1, 9)) - self.assertEqual(self.offset3.rollforward(d), datetime(2014, 7, 1, 9)) - self.assertEqual(self.offset4.rollforward(d), datetime(2014, 7, 1, 9)) - self.assertEqual(self.offset5.rollforward(d), datetime(2014, 7, 1, 11)) - self.assertEqual(self.offset6.rollforward(d), d) - self.assertEqual(self.offset7.rollforward(d), d) + assert self.offset1.rollforward(d) == datetime(2014, 7, 1, 9) + assert self.offset2.rollforward(d) == datetime(2014, 7, 1, 9) + assert self.offset3.rollforward(d) == datetime(2014, 7, 1, 9) + assert self.offset4.rollforward(d) == datetime(2014, 7, 1, 9) + assert self.offset5.rollforward(d) == datetime(2014, 7, 1, 11) + assert self.offset6.rollforward(d) == d + assert self.offset7.rollforward(d) == d - self.assertEqual(self._offset(5).rollforward(self.d), self.d) + assert self._offset(5).rollforward(self.d) == self.d def testRollforward2(self): - self.assertEqual(self._offset(-3) - .rollforward(datetime(2014, 7, 5, 16, 0)), - datetime(2014, 7, 7, 9)) + assert (self._offset(-3).rollforward(datetime(2014, 7, 5, 16, 0)) == + datetime(2014, 7, 7, 9)) def test_roll_date_object(self): offset = BusinessHour() @@ -879,10 +868,10 @@ def test_roll_date_object(self): dt = datetime(2014, 7, 6, 15, 0) result = offset.rollback(dt) - self.assertEqual(result, datetime(2014, 7, 4, 17)) + assert result == datetime(2014, 7, 4, 17) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2014, 7, 7, 9)) + assert result == datetime(2014, 7, 7, 9) def test_normalize(self): tests = [] @@ -924,7 +913,7 @@ def test_normalize(self): for offset, cases in tests: for dt, expected in compat.iteritems(cases): - self.assertEqual(offset.apply(dt), expected) + assert offset.apply(dt) == expected def test_onOffset(self): tests = [] @@ -963,7 +952,7 @@ def test_onOffset(self): for offset, cases in tests: for dt, expected in compat.iteritems(cases): - self.assertEqual(offset.onOffset(dt), expected) + assert offset.onOffset(dt) == expected def test_opening_time(self): tests = [] @@ -1127,8 +1116,8 @@ def test_opening_time(self): for _offsets, cases in tests: for offset in _offsets: for dt, (exp_next, exp_prev) in compat.iteritems(cases): - self.assertEqual(offset._next_opening_time(dt), exp_next) - self.assertEqual(offset._prev_opening_time(dt), exp_prev) + assert offset._next_opening_time(dt) == exp_next + assert offset._prev_opening_time(dt) == exp_prev def test_apply(self): tests = [] @@ -1457,93 +1446,89 @@ def test_different_normalize_equals(self): offset = self._offset() offset2 = self._offset() offset2.normalize = True - self.assertEqual(offset, offset2) + assert offset == offset2 def test_repr(self): - self.assertEqual(repr(self.offset1), - '') - self.assertEqual(repr(self.offset2), - '') + assert repr(self.offset1) == '' + assert repr(self.offset2) == '' def test_with_offset(self): expected = Timestamp('2014-07-01 13:00') - self.assertEqual(self.d + CustomBusinessHour() * 3, expected) - self.assertEqual(self.d + CustomBusinessHour(n=3), expected) + assert self.d + CustomBusinessHour() * 3 == expected + assert self.d + CustomBusinessHour(n=3) == expected def testEQ(self): for offset in [self.offset1, self.offset2]: - self.assertEqual(offset, offset) + assert offset == offset - self.assertNotEqual(CustomBusinessHour(), CustomBusinessHour(-1)) - self.assertEqual(CustomBusinessHour(start='09:00'), - CustomBusinessHour()) - self.assertNotEqual(CustomBusinessHour(start='09:00'), - CustomBusinessHour(start='09:01')) - self.assertNotEqual(CustomBusinessHour(start='09:00', end='17:00'), - CustomBusinessHour(start='17:00', end='09:01')) + assert CustomBusinessHour() != CustomBusinessHour(-1) + assert (CustomBusinessHour(start='09:00') == + CustomBusinessHour()) + assert (CustomBusinessHour(start='09:00') != + CustomBusinessHour(start='09:01')) + assert (CustomBusinessHour(start='09:00', end='17:00') != + CustomBusinessHour(start='17:00', end='09:01')) - self.assertNotEqual(CustomBusinessHour(weekmask='Tue Wed Thu Fri'), - CustomBusinessHour(weekmask='Mon Tue Wed Thu Fri')) - self.assertNotEqual(CustomBusinessHour(holidays=['2014-06-27']), - CustomBusinessHour(holidays=['2014-06-28'])) + assert (CustomBusinessHour(weekmask='Tue Wed Thu Fri') != + CustomBusinessHour(weekmask='Mon Tue Wed Thu Fri')) + assert (CustomBusinessHour(holidays=['2014-06-27']) != + CustomBusinessHour(holidays=['2014-06-28'])) def test_hash(self): - self.assertEqual(hash(self.offset1), hash(self.offset1)) - self.assertEqual(hash(self.offset2), hash(self.offset2)) + assert hash(self.offset1) == hash(self.offset1) + assert hash(self.offset2) == hash(self.offset2) def testCall(self): - self.assertEqual(self.offset1(self.d), datetime(2014, 7, 1, 11)) - self.assertEqual(self.offset2(self.d), datetime(2014, 7, 1, 11)) + assert self.offset1(self.d) == datetime(2014, 7, 1, 11) + assert self.offset2(self.d) == datetime(2014, 7, 1, 11) def testRAdd(self): - self.assertEqual(self.d + self.offset2, self.offset2 + self.d) + assert self.d + self.offset2 == self.offset2 + self.d def testSub(self): off = self.offset2 pytest.raises(Exception, off.__sub__, self.d) - self.assertEqual(2 * off - off, off) + assert 2 * off - off == off - self.assertEqual(self.d - self.offset2, self.d - (2 * off - off)) + assert self.d - self.offset2 == self.d - (2 * off - off) def testRSub(self): - self.assertEqual(self.d - self.offset2, (-self.offset2).apply(self.d)) + assert self.d - self.offset2 == (-self.offset2).apply(self.d) def testMult1(self): - self.assertEqual(self.d + 5 * self.offset1, self.d + self._offset(5)) + assert self.d + 5 * self.offset1 == self.d + self._offset(5) def testMult2(self): - self.assertEqual(self.d + (-3 * self._offset(-2)), - self.d + self._offset(6)) + assert self.d + (-3 * self._offset(-2)) == self.d + self._offset(6) def testRollback1(self): - self.assertEqual(self.offset1.rollback(self.d), self.d) - self.assertEqual(self.offset2.rollback(self.d), self.d) + assert self.offset1.rollback(self.d) == self.d + assert self.offset2.rollback(self.d) == self.d d = datetime(2014, 7, 1, 0) + # 2014/07/01 is Tuesday, 06/30 is Monday(holiday) - self.assertEqual(self.offset1.rollback(d), datetime(2014, 6, 27, 17)) + assert self.offset1.rollback(d) == datetime(2014, 6, 27, 17) # 2014/6/30 and 2014/6/27 are holidays - self.assertEqual(self.offset2.rollback(d), datetime(2014, 6, 26, 17)) + assert self.offset2.rollback(d) == datetime(2014, 6, 26, 17) def testRollback2(self): - self.assertEqual(self._offset(-3) - .rollback(datetime(2014, 7, 5, 15, 0)), - datetime(2014, 7, 4, 17, 0)) + assert (self._offset(-3).rollback(datetime(2014, 7, 5, 15, 0)) == + datetime(2014, 7, 4, 17, 0)) def testRollforward1(self): - self.assertEqual(self.offset1.rollforward(self.d), self.d) - self.assertEqual(self.offset2.rollforward(self.d), self.d) + assert self.offset1.rollforward(self.d) == self.d + assert self.offset2.rollforward(self.d) == self.d d = datetime(2014, 7, 1, 0) - self.assertEqual(self.offset1.rollforward(d), datetime(2014, 7, 1, 9)) - self.assertEqual(self.offset2.rollforward(d), datetime(2014, 7, 1, 9)) + assert self.offset1.rollforward(d) == datetime(2014, 7, 1, 9) + assert self.offset2.rollforward(d) == datetime(2014, 7, 1, 9) def testRollforward2(self): - self.assertEqual(self._offset(-3) - .rollforward(datetime(2014, 7, 5, 16, 0)), - datetime(2014, 7, 7, 9)) + assert (self._offset(-3).rollforward(datetime(2014, 7, 5, 16, 0)) == + datetime(2014, 7, 7, 9)) def test_roll_date_object(self): offset = BusinessHour() @@ -1551,10 +1536,10 @@ def test_roll_date_object(self): dt = datetime(2014, 7, 6, 15, 0) result = offset.rollback(dt) - self.assertEqual(result, datetime(2014, 7, 4, 17)) + assert result == datetime(2014, 7, 4, 17) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2014, 7, 7, 9)) + assert result == datetime(2014, 7, 7, 9) def test_normalize(self): tests = [] @@ -1598,7 +1583,7 @@ def test_normalize(self): for offset, cases in tests: for dt, expected in compat.iteritems(cases): - self.assertEqual(offset.apply(dt), expected) + assert offset.apply(dt) == expected def test_onOffset(self): tests = [] @@ -1614,7 +1599,7 @@ def test_onOffset(self): for offset, cases in tests: for dt, expected in compat.iteritems(cases): - self.assertEqual(offset.onOffset(dt), expected) + assert offset.onOffset(dt) == expected def test_apply(self): tests = [] @@ -1702,7 +1687,7 @@ def test_different_normalize_equals(self): offset = CDay() offset2 = CDay() offset2.normalize = True - self.assertEqual(offset, offset2) + assert offset == offset2 def test_repr(self): assert repr(self.offset) == '' @@ -1717,50 +1702,50 @@ def test_with_offset(self): assert (self.d + offset) == datetime(2008, 1, 2, 2) def testEQ(self): - self.assertEqual(self.offset2, self.offset2) + assert self.offset2 == self.offset2 def test_mul(self): pass def test_hash(self): - self.assertEqual(hash(self.offset2), hash(self.offset2)) + assert hash(self.offset2) == hash(self.offset2) def testCall(self): - self.assertEqual(self.offset2(self.d), datetime(2008, 1, 3)) - self.assertEqual(self.offset2(self.nd), datetime(2008, 1, 3)) + assert self.offset2(self.d) == datetime(2008, 1, 3) + assert self.offset2(self.nd) == datetime(2008, 1, 3) def testRAdd(self): - self.assertEqual(self.d + self.offset2, self.offset2 + self.d) + assert self.d + self.offset2 == self.offset2 + self.d def testSub(self): off = self.offset2 pytest.raises(Exception, off.__sub__, self.d) - self.assertEqual(2 * off - off, off) + assert 2 * off - off == off - self.assertEqual(self.d - self.offset2, self.d + CDay(-2)) + assert self.d - self.offset2 == self.d + CDay(-2) def testRSub(self): - self.assertEqual(self.d - self.offset2, (-self.offset2).apply(self.d)) + assert self.d - self.offset2 == (-self.offset2).apply(self.d) def testMult1(self): - self.assertEqual(self.d + 10 * self.offset, self.d + CDay(10)) + assert self.d + 10 * self.offset == self.d + CDay(10) def testMult2(self): - self.assertEqual(self.d + (-5 * CDay(-10)), self.d + CDay(50)) + assert self.d + (-5 * CDay(-10)) == self.d + CDay(50) def testRollback1(self): - self.assertEqual(CDay(10).rollback(self.d), self.d) + assert CDay(10).rollback(self.d) == self.d def testRollback2(self): - self.assertEqual( - CDay(10).rollback(datetime(2008, 1, 5)), datetime(2008, 1, 4)) + assert (CDay(10).rollback(datetime(2008, 1, 5)) == + datetime(2008, 1, 4)) def testRollforward1(self): - self.assertEqual(CDay(10).rollforward(self.d), self.d) + assert CDay(10).rollforward(self.d) == self.d def testRollforward2(self): - self.assertEqual( - CDay(10).rollforward(datetime(2008, 1, 5)), datetime(2008, 1, 7)) + assert (CDay(10).rollforward(datetime(2008, 1, 5)) == + datetime(2008, 1, 7)) def test_roll_date_object(self): offset = CDay() @@ -1768,17 +1753,17 @@ def test_roll_date_object(self): dt = date(2012, 9, 15) result = offset.rollback(dt) - self.assertEqual(result, datetime(2012, 9, 14)) + assert result == datetime(2012, 9, 14) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2012, 9, 17)) + assert result == datetime(2012, 9, 17) offset = offsets.Day() result = offset.rollback(dt) - self.assertEqual(result, datetime(2012, 9, 15)) + assert result == datetime(2012, 9, 15) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2012, 9, 15)) + assert result == datetime(2012, 9, 15) def test_onOffset(self): tests = [(CDay(), datetime(2008, 1, 1), True), @@ -1837,20 +1822,20 @@ def test_apply_large_n(self): dt = datetime(2012, 10, 23) result = dt + CDay(10) - self.assertEqual(result, datetime(2012, 11, 6)) + assert result == datetime(2012, 11, 6) result = dt + CDay(100) - CDay(100) - self.assertEqual(result, dt) + assert result == dt off = CDay() * 6 rs = datetime(2012, 1, 1) - off xp = datetime(2011, 12, 23) - self.assertEqual(rs, xp) + assert rs == xp st = datetime(2011, 12, 18) rs = st + off xp = datetime(2011, 12, 26) - self.assertEqual(rs, xp) + assert rs == xp def test_apply_corner(self): pytest.raises(Exception, CDay().apply, BMonthEnd()) @@ -1870,7 +1855,7 @@ def test_holidays(self): dt = datetime(year, 4, 30) xp = datetime(year, 5, 2) rs = dt + tday - self.assertEqual(rs, xp) + assert rs == xp def test_weekmask(self): weekmask_saudi = 'Sat Sun Mon Tue Wed' # Thu-Fri Weekend @@ -1883,13 +1868,13 @@ def test_weekmask(self): xp_saudi = datetime(2013, 5, 4) xp_uae = datetime(2013, 5, 2) xp_egypt = datetime(2013, 5, 2) - self.assertEqual(xp_saudi, dt + bday_saudi) - self.assertEqual(xp_uae, dt + bday_uae) - self.assertEqual(xp_egypt, dt + bday_egypt) + assert xp_saudi == dt + bday_saudi + assert xp_uae == dt + bday_uae + assert xp_egypt == dt + bday_egypt xp2 = datetime(2013, 5, 5) - self.assertEqual(xp2, dt + 2 * bday_saudi) - self.assertEqual(xp2, dt + 2 * bday_uae) - self.assertEqual(xp2, dt + 2 * bday_egypt) + assert xp2 == dt + 2 * bday_saudi + assert xp2 == dt + 2 * bday_uae + assert xp2 == dt + 2 * bday_egypt def test_weekmask_and_holidays(self): weekmask_egypt = 'Sun Mon Tue Wed Thu' # Fri-Sat Weekend @@ -1898,7 +1883,7 @@ def test_weekmask_and_holidays(self): bday_egypt = CDay(holidays=holidays, weekmask=weekmask_egypt) dt = datetime(2013, 4, 30) xp_egypt = datetime(2013, 5, 5) - self.assertEqual(xp_egypt, dt + 2 * bday_egypt) + assert xp_egypt == dt + 2 * bday_egypt def test_calendar(self): calendar = USFederalHolidayCalendar() @@ -1908,7 +1893,7 @@ def test_calendar(self): def test_roundtrip_pickle(self): def _check_roundtrip(obj): unpickled = tm.round_trip_pickle(obj) - self.assertEqual(unpickled, obj) + assert unpickled == obj _check_roundtrip(self.offset) _check_roundtrip(self.offset2) @@ -1921,7 +1906,7 @@ def test_pickle_compat_0_14_1(self): cday0_14_1 = read_pickle(os.path.join(pth, 'cday-0.14.1.pickle')) cday = CDay(holidays=hdays) - self.assertEqual(cday, cday0_14_1) + assert cday == cday0_14_1 class CustomBusinessMonthBase(object): @@ -1933,33 +1918,32 @@ def setUp(self): self.offset2 = self._object(2) def testEQ(self): - self.assertEqual(self.offset2, self.offset2) + assert self.offset2 == self.offset2 def test_mul(self): pass def test_hash(self): - self.assertEqual(hash(self.offset2), hash(self.offset2)) + assert hash(self.offset2) == hash(self.offset2) def testRAdd(self): - self.assertEqual(self.d + self.offset2, self.offset2 + self.d) + assert self.d + self.offset2 == self.offset2 + self.d def testSub(self): off = self.offset2 pytest.raises(Exception, off.__sub__, self.d) - self.assertEqual(2 * off - off, off) + assert 2 * off - off == off - self.assertEqual(self.d - self.offset2, self.d + self._object(-2)) + assert self.d - self.offset2 == self.d + self._object(-2) def testRSub(self): - self.assertEqual(self.d - self.offset2, (-self.offset2).apply(self.d)) + assert self.d - self.offset2 == (-self.offset2).apply(self.d) def testMult1(self): - self.assertEqual(self.d + 10 * self.offset, self.d + self._object(10)) + assert self.d + 10 * self.offset == self.d + self._object(10) def testMult2(self): - self.assertEqual(self.d + (-5 * self._object(-10)), - self.d + self._object(50)) + assert self.d + (-5 * self._object(-10)) == self.d + self._object(50) def test_offsets_compare_equal(self): offset1 = self._object() @@ -1969,7 +1953,7 @@ def test_offsets_compare_equal(self): def test_roundtrip_pickle(self): def _check_roundtrip(obj): unpickled = tm.round_trip_pickle(obj) - self.assertEqual(unpickled, obj) + assert unpickled == obj _check_roundtrip(self._object()) _check_roundtrip(self._object(2)) @@ -1984,26 +1968,24 @@ def test_different_normalize_equals(self): offset = CBMonthEnd() offset2 = CBMonthEnd() offset2.normalize = True - self.assertEqual(offset, offset2) + assert offset == offset2 def test_repr(self): assert repr(self.offset) == '' assert repr(self.offset2) == '<2 * CustomBusinessMonthEnds>' def testCall(self): - self.assertEqual(self.offset2(self.d), datetime(2008, 2, 29)) + assert self.offset2(self.d) == datetime(2008, 2, 29) def testRollback1(self): - self.assertEqual( - CDay(10).rollback(datetime(2007, 12, 31)), datetime(2007, 12, 31)) + assert (CDay(10).rollback(datetime(2007, 12, 31)) == + datetime(2007, 12, 31)) def testRollback2(self): - self.assertEqual(CBMonthEnd(10).rollback(self.d), - datetime(2007, 12, 31)) + assert CBMonthEnd(10).rollback(self.d) == datetime(2007, 12, 31) def testRollforward1(self): - self.assertEqual(CBMonthEnd(10).rollforward( - self.d), datetime(2008, 1, 31)) + assert CBMonthEnd(10).rollforward(self.d) == datetime(2008, 1, 31) def test_roll_date_object(self): offset = CBMonthEnd() @@ -2011,17 +1993,17 @@ def test_roll_date_object(self): dt = date(2012, 9, 15) result = offset.rollback(dt) - self.assertEqual(result, datetime(2012, 8, 31)) + assert result == datetime(2012, 8, 31) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2012, 9, 28)) + assert result == datetime(2012, 9, 28) offset = offsets.Day() result = offset.rollback(dt) - self.assertEqual(result, datetime(2012, 9, 15)) + assert result == datetime(2012, 9, 15) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2012, 9, 15)) + assert result == datetime(2012, 9, 15) def test_onOffset(self): tests = [(CBMonthEnd(), datetime(2008, 1, 31), True), @@ -2059,20 +2041,20 @@ def test_apply_large_n(self): dt = datetime(2012, 10, 23) result = dt + CBMonthEnd(10) - self.assertEqual(result, datetime(2013, 7, 31)) + assert result == datetime(2013, 7, 31) result = dt + CDay(100) - CDay(100) - self.assertEqual(result, dt) + assert result == dt off = CBMonthEnd() * 6 rs = datetime(2012, 1, 1) - off xp = datetime(2011, 7, 29) - self.assertEqual(rs, xp) + assert rs == xp st = datetime(2011, 12, 18) rs = st + off xp = datetime(2012, 5, 31) - self.assertEqual(rs, xp) + assert rs == xp def test_holidays(self): # Define a TradingDay offset @@ -2080,17 +2062,16 @@ def test_holidays(self): np.datetime64('2012-02-29')] bm_offset = CBMonthEnd(holidays=holidays) dt = datetime(2012, 1, 1) - self.assertEqual(dt + bm_offset, datetime(2012, 1, 30)) - self.assertEqual(dt + 2 * bm_offset, datetime(2012, 2, 27)) + assert dt + bm_offset == datetime(2012, 1, 30) + assert dt + 2 * bm_offset == datetime(2012, 2, 27) def test_datetimeindex(self): from pandas.tseries.holiday import USFederalHolidayCalendar hcal = USFederalHolidayCalendar() freq = CBMonthEnd(calendar=hcal) - self.assertEqual(DatetimeIndex(start='20120101', end='20130101', - freq=freq).tolist()[0], - datetime(2012, 1, 31)) + assert (DatetimeIndex(start='20120101', end='20130101', + freq=freq).tolist()[0] == datetime(2012, 1, 31)) class TestCustomBusinessMonthBegin(CustomBusinessMonthBase, Base): @@ -2101,26 +2082,24 @@ def test_different_normalize_equals(self): offset = CBMonthBegin() offset2 = CBMonthBegin() offset2.normalize = True - self.assertEqual(offset, offset2) + assert offset == offset2 def test_repr(self): assert repr(self.offset) == '' assert repr(self.offset2) == '<2 * CustomBusinessMonthBegins>' def testCall(self): - self.assertEqual(self.offset2(self.d), datetime(2008, 3, 3)) + assert self.offset2(self.d) == datetime(2008, 3, 3) def testRollback1(self): - self.assertEqual( - CDay(10).rollback(datetime(2007, 12, 31)), datetime(2007, 12, 31)) + assert (CDay(10).rollback(datetime(2007, 12, 31)) == + datetime(2007, 12, 31)) def testRollback2(self): - self.assertEqual(CBMonthBegin(10).rollback(self.d), - datetime(2008, 1, 1)) + assert CBMonthBegin(10).rollback(self.d) == datetime(2008, 1, 1) def testRollforward1(self): - self.assertEqual(CBMonthBegin(10).rollforward( - self.d), datetime(2008, 1, 1)) + assert CBMonthBegin(10).rollforward(self.d) == datetime(2008, 1, 1) def test_roll_date_object(self): offset = CBMonthBegin() @@ -2128,17 +2107,17 @@ def test_roll_date_object(self): dt = date(2012, 9, 15) result = offset.rollback(dt) - self.assertEqual(result, datetime(2012, 9, 3)) + assert result == datetime(2012, 9, 3) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2012, 10, 1)) + assert result == datetime(2012, 10, 1) offset = offsets.Day() result = offset.rollback(dt) - self.assertEqual(result, datetime(2012, 9, 15)) + assert result == datetime(2012, 9, 15) result = offset.rollforward(dt) - self.assertEqual(result, datetime(2012, 9, 15)) + assert result == datetime(2012, 9, 15) def test_onOffset(self): tests = [(CBMonthBegin(), datetime(2008, 1, 1), True), @@ -2175,20 +2154,21 @@ def test_apply_large_n(self): dt = datetime(2012, 10, 23) result = dt + CBMonthBegin(10) - self.assertEqual(result, datetime(2013, 8, 1)) + assert result == datetime(2013, 8, 1) result = dt + CDay(100) - CDay(100) - self.assertEqual(result, dt) + assert result == dt off = CBMonthBegin() * 6 rs = datetime(2012, 1, 1) - off xp = datetime(2011, 7, 1) - self.assertEqual(rs, xp) + assert rs == xp st = datetime(2011, 12, 18) rs = st + off + xp = datetime(2012, 6, 1) - self.assertEqual(rs, xp) + assert rs == xp def test_holidays(self): # Define a TradingDay offset @@ -2196,15 +2176,15 @@ def test_holidays(self): np.datetime64('2012-03-01')] bm_offset = CBMonthBegin(holidays=holidays) dt = datetime(2012, 1, 1) - self.assertEqual(dt + bm_offset, datetime(2012, 1, 2)) - self.assertEqual(dt + 2 * bm_offset, datetime(2012, 2, 3)) + + assert dt + bm_offset == datetime(2012, 1, 2) + assert dt + 2 * bm_offset == datetime(2012, 2, 3) def test_datetimeindex(self): hcal = USFederalHolidayCalendar() cbmb = CBMonthBegin(calendar=hcal) - self.assertEqual(DatetimeIndex(start='20120101', end='20130101', - freq=cbmb).tolist()[0], - datetime(2012, 1, 3)) + assert (DatetimeIndex(start='20120101', end='20130101', + freq=cbmb).tolist()[0] == datetime(2012, 1, 3)) def assertOnOffset(offset, date, expected): @@ -2218,10 +2198,9 @@ class TestWeek(Base): _offset = Week def test_repr(self): - self.assertEqual(repr(Week(weekday=0)), "") - self.assertEqual(repr(Week(n=-1, weekday=0)), "<-1 * Week: weekday=0>") - self.assertEqual(repr(Week(n=-2, weekday=0)), - "<-2 * Weeks: weekday=0>") + assert repr(Week(weekday=0)) == "" + assert repr(Week(n=-1, weekday=0)) == "<-1 * Week: weekday=0>" + assert repr(Week(n=-2, weekday=0)) == "<-2 * Weeks: weekday=0>" def test_corner(self): pytest.raises(ValueError, Week, weekday=7) @@ -2303,8 +2282,8 @@ def test_constructor(self): n=1, week=0, weekday=7) def test_repr(self): - self.assertEqual(repr(WeekOfMonth(weekday=1, week=2)), - "") + assert (repr(WeekOfMonth(weekday=1, week=2)) == + "") def test_offset(self): date1 = datetime(2011, 1, 4) # 1st Tuesday of Month @@ -2354,9 +2333,10 @@ def test_offset(self): # try subtracting result = datetime(2011, 2, 1) - WeekOfMonth(week=1, weekday=2) - self.assertEqual(result, datetime(2011, 1, 12)) + assert result == datetime(2011, 1, 12) + result = datetime(2011, 2, 3) - WeekOfMonth(week=0, weekday=2) - self.assertEqual(result, datetime(2011, 2, 2)) + assert result == datetime(2011, 2, 2) def test_onOffset(self): test_cases = [ @@ -2370,7 +2350,7 @@ def test_onOffset(self): for week, weekday, dt, expected in test_cases: offset = WeekOfMonth(week=week, weekday=weekday) - self.assertEqual(offset.onOffset(dt), expected) + assert offset.onOffset(dt) == expected class TestLastWeekOfMonth(Base): @@ -2392,13 +2372,13 @@ def test_offset(self): offset_sat = LastWeekOfMonth(n=1, weekday=5) one_day_before = (last_sat + timedelta(days=-1)) - self.assertEqual(one_day_before + offset_sat, last_sat) + assert one_day_before + offset_sat == last_sat one_day_after = (last_sat + timedelta(days=+1)) - self.assertEqual(one_day_after + offset_sat, next_sat) + assert one_day_after + offset_sat == next_sat # Test On that day - self.assertEqual(last_sat + offset_sat, next_sat) + assert last_sat + offset_sat == next_sat # Thursday @@ -2407,23 +2387,22 @@ def test_offset(self): next_thurs = datetime(2013, 2, 28) one_day_before = last_thurs + timedelta(days=-1) - self.assertEqual(one_day_before + offset_thur, last_thurs) + assert one_day_before + offset_thur == last_thurs one_day_after = last_thurs + timedelta(days=+1) - self.assertEqual(one_day_after + offset_thur, next_thurs) + assert one_day_after + offset_thur == next_thurs # Test on that day - self.assertEqual(last_thurs + offset_thur, next_thurs) + assert last_thurs + offset_thur == next_thurs three_before = last_thurs + timedelta(days=-3) - self.assertEqual(three_before + offset_thur, last_thurs) + assert three_before + offset_thur == last_thurs two_after = last_thurs + timedelta(days=+2) - self.assertEqual(two_after + offset_thur, next_thurs) + assert two_after + offset_thur == next_thurs offset_sunday = LastWeekOfMonth(n=1, weekday=WeekDay.SUN) - self.assertEqual(datetime(2013, 7, 31) + - offset_sunday, datetime(2013, 8, 25)) + assert datetime(2013, 7, 31) + offset_sunday == datetime(2013, 8, 25) def test_onOffset(self): test_cases = [ @@ -2445,7 +2424,7 @@ def test_onOffset(self): for weekday, dt, expected in test_cases: offset = LastWeekOfMonth(weekday=weekday) - self.assertEqual(offset.onOffset(dt), expected, msg=date) + assert offset.onOffset(dt) == expected class TestBMonthBegin(Base): @@ -2556,7 +2535,7 @@ def test_normalize(self): result = dt + BMonthEnd(normalize=True) expected = dt.replace(hour=0) + BMonthEnd() - self.assertEqual(result, expected) + assert result == expected def test_onOffset(self): @@ -2655,23 +2634,22 @@ def test_offset(self): for base, expected in compat.iteritems(cases): assertEq(offset, base, expected) - # def test_day_of_month(self): - # dt = datetime(2007, 1, 1) - - # offset = MonthEnd(day=20) + def test_day_of_month(self): + dt = datetime(2007, 1, 1) + offset = MonthEnd(day=20) - # result = dt + offset - # self.assertEqual(result, datetime(2007, 1, 20)) + result = dt + offset + assert result == Timestamp(2007, 1, 31) - # result = result + offset - # self.assertEqual(result, datetime(2007, 2, 20)) + result = result + offset + assert result == Timestamp(2007, 2, 28) def test_normalize(self): dt = datetime(2007, 1, 1, 3) result = dt + MonthEnd(normalize=True) expected = dt.replace(hour=0) + MonthEnd() - self.assertEqual(result, expected) + assert result == expected def test_onOffset(self): @@ -3033,12 +3011,12 @@ class TestBQuarterBegin(Base): _offset = BQuarterBegin def test_repr(self): - self.assertEqual(repr(BQuarterBegin()), - "") - self.assertEqual(repr(BQuarterBegin(startingMonth=3)), - "") - self.assertEqual(repr(BQuarterBegin(startingMonth=1)), - "") + assert (repr(BQuarterBegin()) == + "") + assert (repr(BQuarterBegin(startingMonth=3)) == + "") + assert (repr(BQuarterBegin(startingMonth=1)) == + "") def test_isAnchored(self): assert BQuarterBegin(startingMonth=1).isAnchored() @@ -3120,19 +3098,19 @@ def test_offset(self): # corner offset = BQuarterBegin(n=-1, startingMonth=1) - self.assertEqual(datetime(2007, 4, 3) + offset, datetime(2007, 4, 2)) + assert datetime(2007, 4, 3) + offset == datetime(2007, 4, 2) class TestBQuarterEnd(Base): _offset = BQuarterEnd def test_repr(self): - self.assertEqual(repr(BQuarterEnd()), - "") - self.assertEqual(repr(BQuarterEnd(startingMonth=3)), - "") - self.assertEqual(repr(BQuarterEnd(startingMonth=1)), - "") + assert (repr(BQuarterEnd()) == + "") + assert (repr(BQuarterEnd(startingMonth=3)) == + "") + assert (repr(BQuarterEnd(startingMonth=1)) == + "") def test_isAnchored(self): assert BQuarterEnd(startingMonth=1).isAnchored() @@ -3197,7 +3175,7 @@ def test_offset(self): # corner offset = BQuarterEnd(n=-1, startingMonth=1) - self.assertEqual(datetime(2010, 1, 31) + offset, datetime(2010, 1, 29)) + assert datetime(2010, 1, 31) + offset == datetime(2010, 1, 29) def test_onOffset(self): @@ -3334,58 +3312,52 @@ def test_apply(self): current = data[0] for datum in data[1:]: current = current + offset - self.assertEqual(current, datum) + assert current == datum class TestFY5253NearestEndMonth(Base): def test_get_target_month_end(self): - self.assertEqual(makeFY5253NearestEndMonth(startingMonth=8, - weekday=WeekDay.SAT) - .get_target_month_end( - datetime(2013, 1, 1)), datetime(2013, 8, 31)) - self.assertEqual(makeFY5253NearestEndMonth(startingMonth=12, - weekday=WeekDay.SAT) - .get_target_month_end(datetime(2013, 1, 1)), - datetime(2013, 12, 31)) - self.assertEqual(makeFY5253NearestEndMonth(startingMonth=2, - weekday=WeekDay.SAT) - .get_target_month_end(datetime(2013, 1, 1)), - datetime(2013, 2, 28)) + assert (makeFY5253NearestEndMonth( + startingMonth=8, weekday=WeekDay.SAT).get_target_month_end( + datetime(2013, 1, 1)) == datetime(2013, 8, 31)) + assert (makeFY5253NearestEndMonth( + startingMonth=12, weekday=WeekDay.SAT).get_target_month_end( + datetime(2013, 1, 1)) == datetime(2013, 12, 31)) + assert (makeFY5253NearestEndMonth( + startingMonth=2, weekday=WeekDay.SAT).get_target_month_end( + datetime(2013, 1, 1)) == datetime(2013, 2, 28)) def test_get_year_end(self): - self.assertEqual(makeFY5253NearestEndMonth(startingMonth=8, - weekday=WeekDay.SAT) - .get_year_end(datetime(2013, 1, 1)), - datetime(2013, 8, 31)) - self.assertEqual(makeFY5253NearestEndMonth(startingMonth=8, - weekday=WeekDay.SUN) - .get_year_end(datetime(2013, 1, 1)), - datetime(2013, 9, 1)) - self.assertEqual(makeFY5253NearestEndMonth(startingMonth=8, - weekday=WeekDay.FRI) - .get_year_end(datetime(2013, 1, 1)), - datetime(2013, 8, 30)) + assert (makeFY5253NearestEndMonth( + startingMonth=8, weekday=WeekDay.SAT).get_year_end( + datetime(2013, 1, 1)) == datetime(2013, 8, 31)) + assert (makeFY5253NearestEndMonth( + startingMonth=8, weekday=WeekDay.SUN).get_year_end( + datetime(2013, 1, 1)) == datetime(2013, 9, 1)) + assert (makeFY5253NearestEndMonth( + startingMonth=8, weekday=WeekDay.FRI).get_year_end( + datetime(2013, 1, 1)) == datetime(2013, 8, 30)) offset_n = FY5253(weekday=WeekDay.TUE, startingMonth=12, variation="nearest") - self.assertEqual(offset_n.get_year_end( - datetime(2012, 1, 1)), datetime(2013, 1, 1)) - self.assertEqual(offset_n.get_year_end( - datetime(2012, 1, 10)), datetime(2013, 1, 1)) - - self.assertEqual(offset_n.get_year_end( - datetime(2013, 1, 1)), datetime(2013, 12, 31)) - self.assertEqual(offset_n.get_year_end( - datetime(2013, 1, 2)), datetime(2013, 12, 31)) - self.assertEqual(offset_n.get_year_end( - datetime(2013, 1, 3)), datetime(2013, 12, 31)) - self.assertEqual(offset_n.get_year_end( - datetime(2013, 1, 10)), datetime(2013, 12, 31)) + assert (offset_n.get_year_end(datetime(2012, 1, 1)) == + datetime(2013, 1, 1)) + assert (offset_n.get_year_end(datetime(2012, 1, 10)) == + datetime(2013, 1, 1)) + + assert (offset_n.get_year_end(datetime(2013, 1, 1)) == + datetime(2013, 12, 31)) + assert (offset_n.get_year_end(datetime(2013, 1, 2)) == + datetime(2013, 12, 31)) + assert (offset_n.get_year_end(datetime(2013, 1, 3)) == + datetime(2013, 12, 31)) + assert (offset_n.get_year_end(datetime(2013, 1, 10)) == + datetime(2013, 12, 31)) JNJ = FY5253(n=1, startingMonth=12, weekday=6, variation="nearest") - self.assertEqual(JNJ.get_year_end( - datetime(2006, 1, 1)), datetime(2006, 12, 31)) + assert (JNJ.get_year_end(datetime(2006, 1, 1)) == + datetime(2006, 12, 31)) def test_onOffset(self): offset_lom_aug_sat = makeFY5253NearestEndMonth(1, startingMonth=8, @@ -3500,7 +3472,7 @@ def test_apply(self): current = data[0] for datum in data[1:]: current = current + offset - self.assertEqual(current, datum) + assert current == datum class TestFY5253LastOfMonthQuarter(Base): @@ -3517,26 +3489,18 @@ def test_isAnchored(self): qtr_with_extra_week=4).isAnchored() def test_equality(self): - self.assertEqual(makeFY5253LastOfMonthQuarter(startingMonth=1, - weekday=WeekDay.SAT, - qtr_with_extra_week=4), - makeFY5253LastOfMonthQuarter(startingMonth=1, - weekday=WeekDay.SAT, - qtr_with_extra_week=4)) - self.assertNotEqual( - makeFY5253LastOfMonthQuarter( - startingMonth=1, weekday=WeekDay.SAT, - qtr_with_extra_week=4), - makeFY5253LastOfMonthQuarter( - startingMonth=1, weekday=WeekDay.SUN, - qtr_with_extra_week=4)) - self.assertNotEqual( - makeFY5253LastOfMonthQuarter( - startingMonth=1, weekday=WeekDay.SAT, - qtr_with_extra_week=4), - makeFY5253LastOfMonthQuarter( - startingMonth=2, weekday=WeekDay.SAT, - qtr_with_extra_week=4)) + assert (makeFY5253LastOfMonthQuarter( + startingMonth=1, weekday=WeekDay.SAT, + qtr_with_extra_week=4) == makeFY5253LastOfMonthQuarter( + startingMonth=1, weekday=WeekDay.SAT, qtr_with_extra_week=4)) + assert (makeFY5253LastOfMonthQuarter( + startingMonth=1, weekday=WeekDay.SAT, + qtr_with_extra_week=4) != makeFY5253LastOfMonthQuarter( + startingMonth=1, weekday=WeekDay.SUN, qtr_with_extra_week=4)) + assert (makeFY5253LastOfMonthQuarter( + startingMonth=1, weekday=WeekDay.SAT, + qtr_with_extra_week=4) != makeFY5253LastOfMonthQuarter( + startingMonth=2, weekday=WeekDay.SAT, qtr_with_extra_week=4)) def test_offset(self): offset = makeFY5253LastOfMonthQuarter(1, startingMonth=9, @@ -3705,12 +3669,9 @@ def test_get_weeks(self): weekday=WeekDay.SAT, qtr_with_extra_week=4) - self.assertEqual(sat_dec_1.get_weeks( - datetime(2011, 4, 2)), [14, 13, 13, 13]) - self.assertEqual(sat_dec_4.get_weeks( - datetime(2011, 4, 2)), [13, 13, 13, 14]) - self.assertEqual(sat_dec_1.get_weeks( - datetime(2010, 12, 25)), [13, 13, 13, 13]) + assert sat_dec_1.get_weeks(datetime(2011, 4, 2)) == [14, 13, 13, 13] + assert sat_dec_4.get_weeks(datetime(2011, 4, 2)) == [13, 13, 13, 14] + assert sat_dec_1.get_weeks(datetime(2010, 12, 25)) == [13, 13, 13, 13] class TestFY5253NearestEndMonthQuarter(Base): @@ -3802,12 +3763,12 @@ def test_offset(self): class TestQuarterBegin(Base): def test_repr(self): - self.assertEqual(repr(QuarterBegin()), - "") - self.assertEqual(repr(QuarterBegin(startingMonth=3)), - "") - self.assertEqual(repr(QuarterBegin(startingMonth=1)), - "") + assert (repr(QuarterBegin()) == + "") + assert (repr(QuarterBegin(startingMonth=3)) == + "") + assert (repr(QuarterBegin(startingMonth=1)) == + "") def test_isAnchored(self): assert QuarterBegin(startingMonth=1).isAnchored() @@ -3874,18 +3835,19 @@ def test_offset(self): # corner offset = QuarterBegin(n=-1, startingMonth=1) - self.assertEqual(datetime(2010, 2, 1) + offset, datetime(2010, 1, 1)) + assert datetime(2010, 2, 1) + offset == datetime(2010, 1, 1) class TestQuarterEnd(Base): _offset = QuarterEnd def test_repr(self): - self.assertEqual(repr(QuarterEnd()), "") - self.assertEqual(repr(QuarterEnd(startingMonth=3)), - "") - self.assertEqual(repr(QuarterEnd(startingMonth=1)), - "") + assert (repr(QuarterEnd()) == + "") + assert (repr(QuarterEnd(startingMonth=3)) == + "") + assert (repr(QuarterEnd(startingMonth=1)) == + "") def test_isAnchored(self): assert QuarterEnd(startingMonth=1).isAnchored() @@ -3951,7 +3913,7 @@ def test_offset(self): # corner offset = QuarterEnd(n=-1, startingMonth=1) - self.assertEqual(datetime(2010, 2, 1) + offset, datetime(2010, 1, 31)) + assert datetime(2010, 2, 1) + offset == datetime(2010, 1, 31) def test_onOffset(self): @@ -4173,14 +4135,14 @@ def test_offset(self): for offset, cases in tests: for base, expected in compat.iteritems(cases): - self.assertEqual(base + offset, expected) + assert base + offset == expected def test_roll(self): offset = BYearEnd(month=6) date = datetime(2009, 11, 30) - self.assertEqual(offset.rollforward(date), datetime(2010, 6, 30)) - self.assertEqual(offset.rollback(date), datetime(2009, 6, 30)) + assert offset.rollforward(date) == datetime(2010, 6, 30) + assert offset.rollback(date) == datetime(2009, 6, 30) def test_onOffset(self): @@ -4389,7 +4351,7 @@ def test_ticks(self): offset = kls(3) result = offset + Timedelta(hours=2) assert isinstance(result, Timedelta) - self.assertEqual(result, expected) + assert result == expected def test_Hour(self): assertEq(Hour(), datetime(2010, 1, 1), datetime(2010, 1, 1, 1)) @@ -4397,8 +4359,8 @@ def test_Hour(self): assertEq(2 * Hour(), datetime(2010, 1, 1), datetime(2010, 1, 1, 2)) assertEq(-1 * Hour(), datetime(2010, 1, 1, 1), datetime(2010, 1, 1)) - self.assertEqual(Hour(3) + Hour(2), Hour(5)) - self.assertEqual(Hour(3) - Hour(2), Hour()) + assert Hour(3) + Hour(2) == Hour(5) + assert Hour(3) - Hour(2) == Hour() self.assertNotEqual(Hour(4), Hour(1)) @@ -4410,8 +4372,8 @@ def test_Minute(self): assertEq(-1 * Minute(), datetime(2010, 1, 1, 0, 1), datetime(2010, 1, 1)) - self.assertEqual(Minute(3) + Minute(2), Minute(5)) - self.assertEqual(Minute(3) - Minute(2), Minute()) + assert Minute(3) + Minute(2) == Minute(5) + assert Minute(3) - Minute(2) == Minute() self.assertNotEqual(Minute(5), Minute()) def test_Second(self): @@ -4423,8 +4385,8 @@ def test_Second(self): assertEq(-1 * Second(), datetime(2010, 1, 1, 0, 0, 1), datetime(2010, 1, 1)) - self.assertEqual(Second(3) + Second(2), Second(5)) - self.assertEqual(Second(3) - Second(2), Second()) + assert Second(3) + Second(2) == Second(5) + assert Second(3) - Second(2) == Second() def test_Millisecond(self): assertEq(Milli(), datetime(2010, 1, 1), @@ -4438,8 +4400,8 @@ def test_Millisecond(self): assertEq(-1 * Milli(), datetime(2010, 1, 1, 0, 0, 0, 1000), datetime(2010, 1, 1)) - self.assertEqual(Milli(3) + Milli(2), Milli(5)) - self.assertEqual(Milli(3) - Milli(2), Milli()) + assert Milli(3) + Milli(2) == Milli(5) + assert Milli(3) - Milli(2) == Milli() def test_MillisecondTimestampArithmetic(self): assertEq(Milli(), Timestamp('2010-01-01'), @@ -4457,18 +4419,18 @@ def test_Microsecond(self): assertEq(-1 * Micro(), datetime(2010, 1, 1, 0, 0, 0, 1), datetime(2010, 1, 1)) - self.assertEqual(Micro(3) + Micro(2), Micro(5)) - self.assertEqual(Micro(3) - Micro(2), Micro()) + assert Micro(3) + Micro(2) == Micro(5) + assert Micro(3) - Micro(2) == Micro() def test_NanosecondGeneric(self): timestamp = Timestamp(datetime(2010, 1, 1)) - self.assertEqual(timestamp.nanosecond, 0) + assert timestamp.nanosecond == 0 result = timestamp + Nano(10) - self.assertEqual(result.nanosecond, 10) + assert result.nanosecond == 10 reverse_result = Nano(10) + timestamp - self.assertEqual(reverse_result.nanosecond, 10) + assert reverse_result.nanosecond == 10 def test_Nanosecond(self): timestamp = Timestamp(datetime(2010, 1, 1)) @@ -4477,29 +4439,29 @@ def test_Nanosecond(self): assertEq(2 * Nano(), timestamp, timestamp + np.timedelta64(2, 'ns')) assertEq(-1 * Nano(), timestamp + np.timedelta64(1, 'ns'), timestamp) - self.assertEqual(Nano(3) + Nano(2), Nano(5)) - self.assertEqual(Nano(3) - Nano(2), Nano()) + assert Nano(3) + Nano(2) == Nano(5) + assert Nano(3) - Nano(2) == Nano() # GH9284 - self.assertEqual(Nano(1) + Nano(10), Nano(11)) - self.assertEqual(Nano(5) + Micro(1), Nano(1005)) - self.assertEqual(Micro(5) + Nano(1), Nano(5001)) + assert Nano(1) + Nano(10) == Nano(11) + assert Nano(5) + Micro(1) == Nano(1005) + assert Micro(5) + Nano(1) == Nano(5001) def test_tick_zero(self): for t1 in self.ticks: for t2 in self.ticks: - self.assertEqual(t1(0), t2(0)) - self.assertEqual(t1(0) + t2(0), t1(0)) + assert t1(0) == t2(0) + assert t1(0) + t2(0) == t1(0) if t1 is not Nano: - self.assertEqual(t1(2) + t2(0), t1(2)) + assert t1(2) + t2(0) == t1(2) if t1 is Nano: - self.assertEqual(t1(2) + Nano(0), t1(2)) + assert t1(2) + Nano(0) == t1(2) def test_tick_equalities(self): for t in self.ticks: - self.assertEqual(t(3), t(3)) - self.assertEqual(t(), t(1)) + assert t(3) == t(3) + assert t() == t(1) # not equals self.assertNotEqual(t(3), t(2)) @@ -4507,10 +4469,10 @@ def test_tick_equalities(self): def test_tick_operators(self): for t in self.ticks: - self.assertEqual(t(3) + t(2), t(5)) - self.assertEqual(t(3) - t(2), t(1)) - self.assertEqual(t(800) + t(300), t(1100)) - self.assertEqual(t(1000) - t(5), t(995)) + assert t(3) + t(2) == t(5) + assert t(3) - t(2) == t(1) + assert t(800) + t(300) == t(1100) + assert t(1000) - t(5) == t(995) def test_tick_offset(self): for t in self.ticks: @@ -4533,25 +4495,22 @@ def test_compare_ticks(self): class TestOffsetNames(tm.TestCase): def test_get_offset_name(self): - self.assertEqual(BDay().freqstr, 'B') - self.assertEqual(BDay(2).freqstr, '2B') - self.assertEqual(BMonthEnd().freqstr, 'BM') - self.assertEqual(Week(weekday=0).freqstr, 'W-MON') - self.assertEqual(Week(weekday=1).freqstr, 'W-TUE') - self.assertEqual(Week(weekday=2).freqstr, 'W-WED') - self.assertEqual(Week(weekday=3).freqstr, 'W-THU') - self.assertEqual(Week(weekday=4).freqstr, 'W-FRI') - - self.assertEqual(LastWeekOfMonth( - weekday=WeekDay.SUN).freqstr, "LWOM-SUN") - self.assertEqual( - makeFY5253LastOfMonthQuarter(weekday=1, startingMonth=3, - qtr_with_extra_week=4).freqstr, - "REQ-L-MAR-TUE-4") - self.assertEqual( - makeFY5253NearestEndMonthQuarter(weekday=1, startingMonth=3, - qtr_with_extra_week=3).freqstr, - "REQ-N-MAR-TUE-3") + assert BDay().freqstr == 'B' + assert BDay(2).freqstr == '2B' + assert BMonthEnd().freqstr == 'BM' + assert Week(weekday=0).freqstr == 'W-MON' + assert Week(weekday=1).freqstr == 'W-TUE' + assert Week(weekday=2).freqstr == 'W-WED' + assert Week(weekday=3).freqstr == 'W-THU' + assert Week(weekday=4).freqstr == 'W-FRI' + + assert LastWeekOfMonth(weekday=WeekDay.SUN).freqstr == "LWOM-SUN" + assert (makeFY5253LastOfMonthQuarter( + weekday=1, startingMonth=3, + qtr_with_extra_week=4).freqstr == "REQ-L-MAR-TUE-4") + assert (makeFY5253NearestEndMonthQuarter( + weekday=1, startingMonth=3, + qtr_with_extra_week=3).freqstr == "REQ-N-MAR-TUE-3") def test_get_offset(): @@ -4594,9 +4553,9 @@ class TestParseTimeString(tm.TestCase): def test_parse_time_string(self): (date, parsed, reso) = parse_time_string('4Q1984') (date_lower, parsed_lower, reso_lower) = parse_time_string('4q1984') - self.assertEqual(date, date_lower) - self.assertEqual(parsed, parsed_lower) - self.assertEqual(reso, reso_lower) + assert date == date_lower + assert parsed == parsed_lower + assert reso == reso_lower def test_parse_time_quarter_w_dash(self): # https://github.com/pandas-dev/pandas/issue/9688 @@ -4606,9 +4565,9 @@ def test_parse_time_quarter_w_dash(self): (date_dash, parsed_dash, reso_dash) = parse_time_string(dashed) (date, parsed, reso) = parse_time_string(normal) - self.assertEqual(date_dash, date) - self.assertEqual(parsed_dash, parsed) - self.assertEqual(reso_dash, reso) + assert date_dash == date + assert parsed_dash == parsed + assert reso_dash == reso pytest.raises(DateParseError, parse_time_string, "-2Q1992") pytest.raises(DateParseError, parse_time_string, "2-Q1992") @@ -4661,22 +4620,22 @@ def test_alias_equality(self): for k, v in compat.iteritems(_offset_map): if v is None: continue - self.assertEqual(k, v.copy()) + assert k == v.copy() def test_rule_code(self): lst = ['M', 'MS', 'BM', 'BMS', 'D', 'B', 'H', 'T', 'S', 'L', 'U'] for k in lst: - self.assertEqual(k, get_offset(k).rule_code) + assert k == get_offset(k).rule_code # should be cached - this is kind of an internals test... assert k in _offset_map - self.assertEqual(k, (get_offset(k) * 3).rule_code) + assert k == (get_offset(k) * 3).rule_code suffix_lst = ['MON', 'TUE', 'WED', 'THU', 'FRI', 'SAT', 'SUN'] base = 'W' for v in suffix_lst: alias = '-'.join([base, v]) - self.assertEqual(alias, get_offset(alias).rule_code) - self.assertEqual(alias, (get_offset(alias) * 5).rule_code) + assert alias == get_offset(alias).rule_code + assert alias == (get_offset(alias) * 5).rule_code suffix_lst = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] @@ -4684,15 +4643,15 @@ def test_rule_code(self): for base in base_lst: for v in suffix_lst: alias = '-'.join([base, v]) - self.assertEqual(alias, get_offset(alias).rule_code) - self.assertEqual(alias, (get_offset(alias) * 5).rule_code) + assert alias == get_offset(alias).rule_code + assert alias == (get_offset(alias) * 5).rule_code lst = ['M', 'D', 'B', 'H', 'T', 'S', 'L', 'U'] for k in lst: code, stride = get_freq_code('3' + k) assert isinstance(code, int) - self.assertEqual(stride, 3) - self.assertEqual(k, _get_freq_str(code)) + assert stride == 3 + assert k == _get_freq_str(code) def test_apply_ticks(): @@ -4804,7 +4763,7 @@ def test_str_for_named_is_name(self): _offset_map.clear() for name in names: offset = get_offset(name) - self.assertEqual(offset.freqstr, name) + assert offset.freqstr == name def get_utc_offset_hours(ts): @@ -4949,4 +4908,4 @@ def test_all_offset_classes(self): for offset, test_values in iteritems(tests): first = Timestamp(test_values[0], tz='US/Eastern') + offset() second = Timestamp(test_values[1], tz='US/Eastern') - self.assertEqual(first, second, msg=str(offset)) + assert first == second diff --git a/pandas/tests/tseries/test_timezones.py b/pandas/tests/tseries/test_timezones.py index 2c3aa03e85904..8b6774885c8b7 100644 --- a/pandas/tests/tseries/test_timezones.py +++ b/pandas/tests/tseries/test_timezones.py @@ -89,7 +89,7 @@ def test_utc_to_local_no_modify_explicit(self): # Values are unmodified tm.assert_numpy_array_equal(rng.asi8, rng_eastern.asi8) - self.assertEqual(rng_eastern.tz, self.tz('US/Eastern')) + assert rng_eastern.tz == self.tz('US/Eastern') def test_localize_utc_conversion(self): # Localizing to time zone should: @@ -129,16 +129,16 @@ def test_timestamp_tz_localize(self): result = stamp.tz_localize(self.tzstr('US/Eastern')) expected = Timestamp('3/11/2012 04:00', tz=self.tzstr('US/Eastern')) - self.assertEqual(result.hour, expected.hour) - self.assertEqual(result, expected) + assert result.hour == expected.hour + assert result == expected def test_timestamp_tz_localize_explicit(self): stamp = Timestamp('3/11/2012 04:00') result = stamp.tz_localize(self.tz('US/Eastern')) expected = Timestamp('3/11/2012 04:00', tz=self.tz('US/Eastern')) - self.assertEqual(result.hour, expected.hour) - self.assertEqual(result, expected) + assert result.hour == expected.hour + assert result == expected def test_timestamp_constructed_by_date_and_tz(self): # Fix Issue 2993, Timestamp cannot be constructed by datetime.date @@ -147,8 +147,8 @@ def test_timestamp_constructed_by_date_and_tz(self): result = Timestamp(date(2012, 3, 11), tz=self.tzstr('US/Eastern')) expected = Timestamp('3/11/2012', tz=self.tzstr('US/Eastern')) - self.assertEqual(result.hour, expected.hour) - self.assertEqual(result, expected) + assert result.hour == expected.hour + assert result == expected def test_timestamp_constructed_by_date_and_tz_explicit(self): # Fix Issue 2993, Timestamp cannot be constructed by datetime.date @@ -157,8 +157,8 @@ def test_timestamp_constructed_by_date_and_tz_explicit(self): result = Timestamp(date(2012, 3, 11), tz=self.tz('US/Eastern')) expected = Timestamp('3/11/2012', tz=self.tz('US/Eastern')) - self.assertEqual(result.hour, expected.hour) - self.assertEqual(result, expected) + assert result.hour == expected.hour + assert result == expected def test_timestamp_constructor_near_dst_boundary(self): # GH 11481 & 15777 @@ -212,7 +212,7 @@ def test_timestamp_to_datetime_tzoffset(self): tzinfo = tzoffset(None, 7200) expected = Timestamp('3/11/2012 04:00', tz=tzinfo) result = Timestamp(expected.to_pydatetime()) - self.assertEqual(expected, result) + assert expected == result def test_timedelta_push_over_dst_boundary(self): # #1389 @@ -225,7 +225,7 @@ def test_timedelta_push_over_dst_boundary(self): # spring forward, + "7" hours expected = Timestamp('3/11/2012 05:00', tz=self.tzstr('US/Eastern')) - self.assertEqual(result, expected) + assert result == expected def test_timedelta_push_over_dst_boundary_explicit(self): # #1389 @@ -238,7 +238,7 @@ def test_timedelta_push_over_dst_boundary_explicit(self): # spring forward, + "7" hours expected = Timestamp('3/11/2012 05:00', tz=self.tz('US/Eastern')) - self.assertEqual(result, expected) + assert result == expected def test_tz_localize_dti(self): dti = DatetimeIndex(start='1/1/2005', end='1/1/2005 0:00:30.256', @@ -278,31 +278,31 @@ def test_astimezone(self): utc = Timestamp('3/11/2012 22:00', tz='UTC') expected = utc.tz_convert(self.tzstr('US/Eastern')) result = utc.astimezone(self.tzstr('US/Eastern')) - self.assertEqual(expected, result) + assert expected == result assert isinstance(result, Timestamp) def test_create_with_tz(self): stamp = Timestamp('3/11/2012 05:00', tz=self.tzstr('US/Eastern')) - self.assertEqual(stamp.hour, 5) + assert stamp.hour == 5 rng = date_range('3/11/2012 04:00', periods=10, freq='H', tz=self.tzstr('US/Eastern')) - self.assertEqual(stamp, rng[1]) + assert stamp == rng[1] utc_stamp = Timestamp('3/11/2012 05:00', tz='utc') assert utc_stamp.tzinfo is pytz.utc - self.assertEqual(utc_stamp.hour, 5) + assert utc_stamp.hour == 5 stamp = Timestamp('3/11/2012 05:00').tz_localize('utc') - self.assertEqual(utc_stamp.hour, 5) + assert utc_stamp.hour == 5 def test_create_with_fixed_tz(self): off = FixedOffset(420, '+07:00') start = datetime(2012, 3, 11, 5, 0, 0, tzinfo=off) end = datetime(2012, 6, 11, 5, 0, 0, tzinfo=off) rng = date_range(start=start, end=end) - self.assertEqual(off, rng.tz) + assert off == rng.tz rng2 = date_range(start, periods=len(rng), tz=off) tm.assert_index_equal(rng, rng2) @@ -316,10 +316,10 @@ def test_create_with_fixedoffset_noname(self): start = datetime(2012, 3, 11, 5, 0, 0, tzinfo=off) end = datetime(2012, 6, 11, 5, 0, 0, tzinfo=off) rng = date_range(start=start, end=end) - self.assertEqual(off, rng.tz) + assert off == rng.tz idx = Index([start, end]) - self.assertEqual(off, idx.tz) + assert off == idx.tz def test_date_range_localize(self): rng = date_range('3/11/2012 03:00', periods=15, freq='H', @@ -335,9 +335,9 @@ def test_date_range_localize(self): val = rng[0] exp = Timestamp('3/11/2012 03:00', tz='US/Eastern') - self.assertEqual(val.hour, 3) - self.assertEqual(exp.hour, 3) - self.assertEqual(val, exp) # same UTC value + assert val.hour == 3 + assert exp.hour == 3 + assert val == exp # same UTC value tm.assert_index_equal(rng[:2], rng2) # Right before the DST transition @@ -347,15 +347,15 @@ def test_date_range_localize(self): tz='US/Eastern') tm.assert_index_equal(rng, rng2) exp = Timestamp('3/11/2012 00:00', tz='US/Eastern') - self.assertEqual(exp.hour, 0) - self.assertEqual(rng[0], exp) + assert exp.hour == 0 + assert rng[0] == exp exp = Timestamp('3/11/2012 01:00', tz='US/Eastern') - self.assertEqual(exp.hour, 1) - self.assertEqual(rng[1], exp) + assert exp.hour == 1 + assert rng[1] == exp rng = date_range('3/11/2012 00:00', periods=10, freq='H', tz='US/Eastern') - self.assertEqual(rng[2].hour, 3) + assert rng[2].hour == 3 def test_utc_box_timestamp_and_localize(self): rng = date_range('3/11/2012', '3/12/2012', freq='H', tz='utc') @@ -365,8 +365,8 @@ def test_utc_box_timestamp_and_localize(self): expected = rng[-1].astimezone(tz) stamp = rng_eastern[-1] - self.assertEqual(stamp, expected) - self.assertEqual(stamp.tzinfo, expected.tzinfo) + assert stamp == expected + assert stamp.tzinfo == expected.tzinfo # right tzinfo rng = date_range('3/13/2012', '3/14/2012', freq='H', tz='utc') @@ -383,7 +383,7 @@ def test_timestamp_tz_convert(self): conv = idx[0].tz_convert(self.tzstr('US/Pacific')) expected = idx.tz_convert(self.tzstr('US/Pacific'))[0] - self.assertEqual(conv, expected) + assert conv == expected def test_pass_dates_localize_to_utc(self): strdates = ['1/1/2012', '3/1/2012', '4/1/2012'] @@ -393,7 +393,7 @@ def test_pass_dates_localize_to_utc(self): fromdates = DatetimeIndex(strdates, tz=self.tzstr('US/Eastern')) - self.assertEqual(conv.tz, fromdates.tz) + assert conv.tz == fromdates.tz tm.assert_numpy_array_equal(conv.values, fromdates.values) def test_field_access_localize(self): @@ -560,12 +560,12 @@ def f(): times = date_range("2013-10-26 23:00", "2013-10-27 01:00", freq="H", tz=tz, ambiguous='infer') - self.assertEqual(times[0], Timestamp('2013-10-26 23:00', tz=tz, - freq="H")) + assert times[0] == Timestamp('2013-10-26 23:00', tz=tz, freq="H") + if dateutil.__version__ != LooseVersion('2.6.0'): - # GH 14621 - self.assertEqual(times[-1], Timestamp('2013-10-27 01:00:00+0000', - tz=tz, freq="H")) + # see gh-14621 + assert times[-1] == Timestamp('2013-10-27 01:00:00+0000', + tz=tz, freq="H") def test_ambiguous_nat(self): tz = self.tz('US/Eastern') @@ -595,10 +595,10 @@ def f(): pytest.raises(pytz.AmbiguousTimeError, f) result = t.tz_localize('US/Central', ambiguous=True) - self.assertEqual(result, expected0) + assert result == expected0 result = t.tz_localize('US/Central', ambiguous=False) - self.assertEqual(result, expected1) + assert result == expected1 s = Series([t]) expected0 = Series([expected0]) @@ -674,8 +674,8 @@ def test_take_dont_lose_meta(self): rng = date_range('1/1/2000', periods=20, tz=self.tzstr('US/Eastern')) result = rng.take(lrange(5)) - self.assertEqual(result.tz, rng.tz) - self.assertEqual(result.freq, rng.freq) + assert result.tz == rng.tz + assert result.freq == rng.freq def test_index_with_timezone_repr(self): rng = date_range('4/13/2010', '5/6/2010') @@ -694,14 +694,14 @@ def test_index_astype_asobject_tzinfos(self): objs = rng.asobject for i, x in enumerate(objs): exval = rng[i] - self.assertEqual(x, exval) - self.assertEqual(x.tzinfo, exval.tzinfo) + assert x == exval + assert x.tzinfo == exval.tzinfo objs = rng.astype(object) for i, x in enumerate(objs): exval = rng[i] - self.assertEqual(x, exval) - self.assertEqual(x.tzinfo, exval.tzinfo) + assert x == exval + assert x.tzinfo == exval.tzinfo def test_localized_at_time_between_time(self): from datetime import time @@ -736,7 +736,7 @@ def test_fixed_offset(self): datetime(2000, 1, 2, tzinfo=fixed_off), datetime(2000, 1, 3, tzinfo=fixed_off)] result = to_datetime(dates) - self.assertEqual(result.tz, fixed_off) + assert result.tz == fixed_off def test_fixedtz_topydatetime(self): dates = np.array([datetime(2000, 1, 1, tzinfo=fixed_off), @@ -796,7 +796,7 @@ def test_frame_no_datetime64_dtype(self): dr_tz = dr.tz_localize(self.tzstr('US/Eastern')) e = DataFrame({'A': 'foo', 'B': dr_tz}, index=dr) tz_expected = DatetimeTZDtype('ns', dr_tz.tzinfo) - self.assertEqual(e['B'].dtype, tz_expected) + assert e['B'].dtype == tz_expected # GH 2810 (with timezones) datetimes_naive = [ts.to_pydatetime() for ts in dr] @@ -830,7 +830,7 @@ def test_shift_localized(self): dr_tz = dr.tz_localize(self.tzstr('US/Eastern')) result = dr_tz.shift(1, '10T') - self.assertEqual(result.tz, dr_tz.tz) + assert result.tz == dr_tz.tz def test_tz_aware_asfreq(self): dr = date_range('2011-12-01', '2012-07-20', freq='D', @@ -870,8 +870,8 @@ def test_convert_datetime_list(self): tz=self.tzstr('US/Eastern'), name='foo') dr2 = DatetimeIndex(list(dr), name='foo') tm.assert_index_equal(dr, dr2) - self.assertEqual(dr.tz, dr2.tz) - self.assertEqual(dr2.name, 'foo') + assert dr.tz == dr2.tz + assert dr2.name == 'foo' def test_frame_from_records_utc(self): rec = {'datum': 1.5, @@ -886,7 +886,7 @@ def test_frame_reset_index(self): roundtripped = df.reset_index().set_index('index') xp = df.index.tz rs = roundtripped.index.tz - self.assertEqual(xp, rs) + assert xp == rs def test_dateutil_tzoffset_support(self): from dateutil.tz import tzoffset @@ -896,7 +896,7 @@ def test_dateutil_tzoffset_support(self): datetime(2012, 5, 11, 12, tzinfo=tzinfo)] series = Series(data=values, index=index) - self.assertEqual(series.index.tz, tzinfo) + assert series.index.tz == tzinfo # it works! #2443 repr(series.index[0]) @@ -909,7 +909,7 @@ def test_getitem_pydatetime_tz(self): tz=self.tzstr('Europe/Berlin')) time_datetime = self.localize( self.tz('Europe/Berlin'), datetime(2012, 12, 24, 17, 0)) - self.assertEqual(ts[time_pandas], ts[time_datetime]) + assert ts[time_pandas] == ts[time_datetime] def test_index_drop_dont_lose_tz(self): # #2621 @@ -977,12 +977,12 @@ def test_utc_with_system_utc(self): # from system utc to real utc ts = Timestamp('2001-01-05 11:56', tz=maybe_get_tz('dateutil/UTC')) # check that the time hasn't changed. - self.assertEqual(ts, ts.tz_convert(dateutil.tz.tzutc())) + assert ts == ts.tz_convert(dateutil.tz.tzutc()) # from system utc to real utc ts = Timestamp('2001-01-05 11:56', tz=maybe_get_tz('dateutil/UTC')) # check that the time hasn't changed. - self.assertEqual(ts, ts.tz_convert(dateutil.tz.tzutc())) + assert ts == ts.tz_convert(dateutil.tz.tzutc()) def test_tz_convert_hour_overflow_dst(self): # Regression test for: @@ -1140,16 +1140,16 @@ def test_tslib_tz_convert_dst(self): def test_tzlocal(self): # GH 13583 ts = Timestamp('2011-01-01', tz=dateutil.tz.tzlocal()) - self.assertEqual(ts.tz, dateutil.tz.tzlocal()) + assert ts.tz == dateutil.tz.tzlocal() assert "tz='tzlocal()')" in repr(ts) tz = tslib.maybe_get_tz('tzlocal()') - self.assertEqual(tz, dateutil.tz.tzlocal()) + assert tz == dateutil.tz.tzlocal() # get offset using normal datetime for test offset = dateutil.tz.tzlocal().utcoffset(datetime(2011, 1, 1)) offset = offset.total_seconds() * 1000000000 - self.assertEqual(ts.value + offset, Timestamp('2011-01-01').value) + assert ts.value + offset == Timestamp('2011-01-01').value def test_tz_localize_tzlocal(self): # GH 13583 @@ -1208,26 +1208,26 @@ def test_replace(self): dt = Timestamp('2016-01-01 09:00:00') result = dt.replace(hour=0) expected = Timestamp('2016-01-01 00:00:00') - self.assertEqual(result, expected) + assert result == expected for tz in self.timezones: dt = Timestamp('2016-01-01 09:00:00', tz=tz) result = dt.replace(hour=0) expected = Timestamp('2016-01-01 00:00:00', tz=tz) - self.assertEqual(result, expected) + assert result == expected # we preserve nanoseconds dt = Timestamp('2016-01-01 09:00:00.000000123', tz=tz) result = dt.replace(hour=0) expected = Timestamp('2016-01-01 00:00:00.000000123', tz=tz) - self.assertEqual(result, expected) + assert result == expected # test all dt = Timestamp('2016-01-01 09:00:00.000000123', tz=tz) result = dt.replace(year=2015, month=2, day=2, hour=0, minute=5, second=5, microsecond=5, nanosecond=5) expected = Timestamp('2015-02-02 00:05:05.000005005', tz=tz) - self.assertEqual(result, expected) + assert result == expected # error def f(): @@ -1240,7 +1240,7 @@ def f(): # assert conversion to naive is the same as replacing tzinfo with None dt = Timestamp('2013-11-03 01:59:59.999999-0400', tz='US/Eastern') - self.assertEqual(dt.tz_localize(None), dt.replace(tzinfo=None)) + assert dt.tz_localize(None) == dt.replace(tzinfo=None) def test_ambiguous_compat(self): # validate that pytz and dateutil are compat for dst @@ -1254,31 +1254,31 @@ def test_ambiguous_compat(self): .tz_localize(pytz_zone, ambiguous=0)) result_dateutil = (Timestamp('2013-10-27 01:00:00') .tz_localize(dateutil_zone, ambiguous=0)) - self.assertEqual(result_pytz.value, result_dateutil.value) - self.assertEqual(result_pytz.value, 1382835600000000000) + assert result_pytz.value == result_dateutil.value + assert result_pytz.value == 1382835600000000000 # dateutil 2.6 buggy w.r.t. ambiguous=0 if dateutil.__version__ != LooseVersion('2.6.0'): - # GH 14621 - # https://github.com/dateutil/dateutil/issues/321 - self.assertEqual(result_pytz.to_pydatetime().tzname(), - result_dateutil.to_pydatetime().tzname()) - self.assertEqual(str(result_pytz), str(result_dateutil)) + # see gh-14621 + # see https://github.com/dateutil/dateutil/issues/321 + assert (result_pytz.to_pydatetime().tzname() == + result_dateutil.to_pydatetime().tzname()) + assert str(result_pytz) == str(result_dateutil) # 1 hour difference result_pytz = (Timestamp('2013-10-27 01:00:00') .tz_localize(pytz_zone, ambiguous=1)) result_dateutil = (Timestamp('2013-10-27 01:00:00') .tz_localize(dateutil_zone, ambiguous=1)) - self.assertEqual(result_pytz.value, result_dateutil.value) - self.assertEqual(result_pytz.value, 1382832000000000000) + assert result_pytz.value == result_dateutil.value + assert result_pytz.value == 1382832000000000000 # dateutil < 2.6 is buggy w.r.t. ambiguous timezones if dateutil.__version__ > LooseVersion('2.5.3'): - # GH 14621 - self.assertEqual(str(result_pytz), str(result_dateutil)) - self.assertEqual(result_pytz.to_pydatetime().tzname(), - result_dateutil.to_pydatetime().tzname()) + # see gh-14621 + assert str(result_pytz) == str(result_dateutil) + assert (result_pytz.to_pydatetime().tzname() == + result_dateutil.to_pydatetime().tzname()) def test_index_equals_with_tz(self): left = date_range('1/1/2011', periods=100, freq='H', tz='utc') @@ -1319,17 +1319,17 @@ def test_series_frame_tz_localize(self): ts = Series(1, index=rng) result = ts.tz_localize('utc') - self.assertEqual(result.index.tz.zone, 'UTC') + assert result.index.tz.zone == 'UTC' df = DataFrame({'a': 1}, index=rng) result = df.tz_localize('utc') expected = DataFrame({'a': 1}, rng.tz_localize('UTC')) - self.assertEqual(result.index.tz.zone, 'UTC') + assert result.index.tz.zone == 'UTC' assert_frame_equal(result, expected) df = df.T result = df.tz_localize('utc', axis=1) - self.assertEqual(result.columns.tz.zone, 'UTC') + assert result.columns.tz.zone == 'UTC' assert_frame_equal(result, expected.T) # Can't localize if already tz-aware @@ -1343,17 +1343,17 @@ def test_series_frame_tz_convert(self): ts = Series(1, index=rng) result = ts.tz_convert('Europe/Berlin') - self.assertEqual(result.index.tz.zone, 'Europe/Berlin') + assert result.index.tz.zone == 'Europe/Berlin' df = DataFrame({'a': 1}, index=rng) result = df.tz_convert('Europe/Berlin') expected = DataFrame({'a': 1}, rng.tz_convert('Europe/Berlin')) - self.assertEqual(result.index.tz.zone, 'Europe/Berlin') + assert result.index.tz.zone == 'Europe/Berlin' assert_frame_equal(result, expected) df = df.T result = df.tz_convert('Europe/Berlin', axis=1) - self.assertEqual(result.columns.tz.zone, 'Europe/Berlin') + assert result.columns.tz.zone == 'Europe/Berlin' assert_frame_equal(result, expected.T) # can't convert tz-naive @@ -1398,11 +1398,11 @@ def test_join_utc_convert(self): for how in ['inner', 'outer', 'left', 'right']: result = left.join(left[:-5], how=how) assert isinstance(result, DatetimeIndex) - self.assertEqual(result.tz, left.tz) + assert result.tz == left.tz result = left.join(right[:-5], how=how) assert isinstance(result, DatetimeIndex) - self.assertEqual(result.tz.zone, 'UTC') + assert result.tz.zone == 'UTC' def test_join_aware(self): rng = date_range('1/1/2011', periods=10, freq='H') @@ -1443,30 +1443,30 @@ def test_align_aware(self): df1 = DataFrame(np.random.randn(len(idx1), 3), idx1) df2 = DataFrame(np.random.randn(len(idx2), 3), idx2) new1, new2 = df1.align(df2) - self.assertEqual(df1.index.tz, new1.index.tz) - self.assertEqual(df2.index.tz, new2.index.tz) + assert df1.index.tz == new1.index.tz + assert df2.index.tz == new2.index.tz # # different timezones convert to UTC # frame df1_central = df1.tz_convert('US/Central') new1, new2 = df1.align(df1_central) - self.assertEqual(new1.index.tz, pytz.UTC) - self.assertEqual(new2.index.tz, pytz.UTC) + assert new1.index.tz == pytz.UTC + assert new2.index.tz == pytz.UTC # series new1, new2 = df1[0].align(df1_central[0]) - self.assertEqual(new1.index.tz, pytz.UTC) - self.assertEqual(new2.index.tz, pytz.UTC) + assert new1.index.tz == pytz.UTC + assert new2.index.tz == pytz.UTC # combination new1, new2 = df1.align(df1_central[0], axis=0) - self.assertEqual(new1.index.tz, pytz.UTC) - self.assertEqual(new2.index.tz, pytz.UTC) + assert new1.index.tz == pytz.UTC + assert new2.index.tz == pytz.UTC df1[0].align(df1_central, axis=0) - self.assertEqual(new1.index.tz, pytz.UTC) - self.assertEqual(new2.index.tz, pytz.UTC) + assert new1.index.tz == pytz.UTC + assert new2.index.tz == pytz.UTC def test_append_aware(self): rng1 = date_range('1/1/2011 01:00', periods=1, freq='H', @@ -1481,7 +1481,7 @@ def test_append_aware(self): tz='US/Eastern') exp = Series([1, 2], index=exp_index) assert_series_equal(ts_result, exp) - self.assertEqual(ts_result.index.tz, rng1.tz) + assert ts_result.index.tz == rng1.tz rng1 = date_range('1/1/2011 01:00', periods=1, freq='H', tz='UTC') rng2 = date_range('1/1/2011 02:00', periods=1, freq='H', tz='UTC') @@ -1494,7 +1494,7 @@ def test_append_aware(self): exp = Series([1, 2], index=exp_index) assert_series_equal(ts_result, exp) utc = rng1.tz - self.assertEqual(utc, ts_result.index.tz) + assert utc == ts_result.index.tz # GH 7795 # different tz coerces to object dtype, not UTC @@ -1525,7 +1525,7 @@ def test_append_dst(self): tz='US/Eastern') exp = Series([1, 2, 3, 10, 11, 12], index=exp_index) assert_series_equal(ts_result, exp) - self.assertEqual(ts_result.index.tz, rng1.tz) + assert ts_result.index.tz == rng1.tz def test_append_aware_naive(self): rng1 = date_range('1/1/2011 01:00', periods=1, freq='H') @@ -1584,7 +1584,7 @@ def test_arith_utc_convert(self): uts2 = ts2.tz_convert('utc') expected = uts1 + uts2 - self.assertEqual(result.index.tz, pytz.UTC) + assert result.index.tz == pytz.UTC assert_series_equal(result, expected) def test_intersection(self): @@ -1593,9 +1593,9 @@ def test_intersection(self): left = rng[10:90][::-1] right = rng[20:80][::-1] - self.assertEqual(left.tz, rng.tz) + assert left.tz == rng.tz result = left.intersection(right) - self.assertEqual(result.tz, left.tz) + assert result.tz == left.tz def test_timestamp_equality_different_timezones(self): utc_range = date_range('1/1/2000', periods=20, tz='UTC') @@ -1603,9 +1603,9 @@ def test_timestamp_equality_different_timezones(self): berlin_range = utc_range.tz_convert('Europe/Berlin') for a, b, c in zip(utc_range, eastern_range, berlin_range): - self.assertEqual(a, b) - self.assertEqual(b, c) - self.assertEqual(a, c) + assert a == b + assert b == c + assert a == c assert (utc_range == eastern_range).all() assert (utc_range == berlin_range).all() @@ -1670,7 +1670,7 @@ def test_normalize_tz_local(self): def test_tzaware_offset(self): dates = date_range('2012-11-01', periods=3, tz='US/Pacific') offset = dates + offsets.Hour(5) - self.assertEqual(dates[0] + offsets.Hour(5), offset[0]) + assert dates[0] + offsets.Hour(5) == offset[0] # GH 6818 for tz in ['UTC', 'US/Pacific', 'Asia/Tokyo']: