From 9480916147507449c96727c68ef28f0ee4ddade8 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Thu, 13 Sep 2018 08:41:25 -0500 Subject: [PATCH 01/38] TST: Fail on warning --- doc/source/contributing.rst | 26 +++ pandas/core/algorithms.py | 5 +- pandas/core/arrays/integer.py | 7 +- pandas/core/window.py | 2 + pandas/tests/api/test_api.py | 17 +- pandas/tests/api/test_types.py | 4 +- pandas/tests/arithmetic/test_datetime64.py | 4 + pandas/tests/computation/test_eval.py | 20 +- pandas/tests/dtypes/test_generic.py | 3 +- pandas/tests/dtypes/test_inference.py | 5 +- pandas/tests/dtypes/test_missing.py | 3 +- pandas/tests/extension/base/dtype.py | 8 +- pandas/tests/frame/test_analytics.py | 4 +- pandas/tests/frame/test_apply.py | 1 + pandas/tests/frame/test_indexing.py | 80 +++++++- pandas/tests/frame/test_operators.py | 6 +- pandas/tests/frame/test_query_eval.py | 1 + pandas/tests/frame/test_reshape.py | 3 +- pandas/tests/frame/test_subclass.py | 40 ++-- pandas/tests/generic/test_generic.py | 13 +- pandas/tests/generic/test_panel.py | 4 +- pandas/tests/groupby/test_groupby.py | 83 ++++---- pandas/tests/groupby/test_grouping.py | 37 ++-- .../tests/indexes/datetimes/test_datetime.py | 3 +- pandas/tests/indexes/datetimes/test_ops.py | 2 +- pandas/tests/indexes/datetimes/test_tools.py | 17 +- pandas/tests/indexes/multi/test_duplicates.py | 5 +- pandas/tests/indexes/test_base.py | 2 + .../indexes/timedeltas/test_timedelta.py | 4 +- .../indexing/test_chaining_and_caching.py | 18 +- pandas/tests/indexing/test_floats.py | 14 +- pandas/tests/indexing/test_indexing_slow.py | 1 + pandas/tests/indexing/test_ix.py | 16 +- pandas/tests/indexing/test_multiindex.py | 179 +++++++++--------- pandas/tests/indexing/test_panel.py | 2 + pandas/tests/indexing/test_partial.py | 3 + pandas/tests/io/formats/test_to_excel.py | 4 +- pandas/tests/io/sas/test_sas7bdat.py | 2 + pandas/tests/io/test_common.py | 2 + pandas/tests/io/test_compression.py | 6 +- pandas/tests/io/test_excel.py | 1 + pandas/tests/io/test_pytables.py | 47 ++++- pandas/tests/io/test_sql.py | 9 +- pandas/tests/io/test_stata.py | 24 ++- pandas/tests/plotting/test_frame.py | 9 +- pandas/tests/plotting/test_hist_method.py | 17 +- pandas/tests/plotting/test_misc.py | 2 + pandas/tests/reshape/merge/test_join.py | 1 + pandas/tests/reshape/test_concat.py | 111 +++++------ pandas/tests/reshape/test_reshape.py | 9 +- pandas/tests/series/indexing/test_datetime.py | 2 + pandas/tests/series/indexing/test_indexing.py | 2 + pandas/tests/series/test_api.py | 9 +- pandas/tests/series/test_constructors.py | 2 + pandas/tests/series/test_dtypes.py | 6 +- pandas/tests/sparse/frame/test_frame.py | 28 ++- .../tests/sparse/frame/test_to_from_scipy.py | 11 +- pandas/tests/sparse/series/test_series.py | 3 + pandas/tests/test_downstream.py | 10 + pandas/tests/test_errors.py | 3 +- pandas/tests/test_multilevel.py | 10 +- pandas/tests/test_nanops.py | 7 +- pandas/tests/test_panel.py | 4 +- pandas/tests/test_resample.py | 28 +-- pandas/tests/test_window.py | 9 + pandas/tests/tseries/offsets/test_offsets.py | 3 + pandas/tests/tslibs/test_parsing.py | 3 + pandas/tests/util/test_hashing.py | 5 +- pandas/tseries/holiday.py | 4 +- setup.cfg | 4 +- 70 files changed, 668 insertions(+), 371 deletions(-) diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst index 60bfd07961b38..532621dc016c5 100644 --- a/doc/source/contributing.rst +++ b/doc/source/contributing.rst @@ -859,6 +859,32 @@ preferred if the inputs or logic are simple, with Hypothesis tests reserved for cases with complex logic or where there are too many combinations of options or subtle interactions to test (or think of!) all of them. +.. _warnings: + +Warnings +~~~~~~~~ + +By default, pandas test suite will fail if any unhandled warnings are emitted. + +If your change involves checking that a warning is actually emitted, use +``tm.assert_produces_warning(ExpectedWarning)``. We prefer this to pytest's +``pytest.warns`` context manager because ours checks that the warning's stacklevel +is set correctly. + +If you have a test that would emit a warning, but you aren't actually testing the +warning it self (say because it's going to be removed in the future, or because we're +matching a 3rd-party library's behavior), then use ``pytest.mark.filterwarnings`` to +ignore the error. + +``` +@pytest.mark.filterwarnings("ignore:msg:category") +def test_thing(self): + ... +``` + +If the test generates a warning of class ``category`` whose message starts +with ``msg``, the warning will be ignored and the test will pass. + Running the test suite ---------------------- diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index e5b6c84d37541..d39e9e08e2947 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -3,7 +3,7 @@ intended for public consumption """ from __future__ import division -from warnings import warn, catch_warnings +from warnings import warn, catch_warnings, simplefilter from textwrap import dedent import numpy as np @@ -91,7 +91,8 @@ def _ensure_data(values, dtype=None): # ignore the fact that we are casting to float # which discards complex parts - with catch_warnings(record=True): + with catch_warnings(): + simplefilter("ignore", np.ComplexWarning) values = ensure_float64(values) return values, 'float64', 'float64' diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index aebc7a6a04ffc..a04366332b419 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -5,7 +5,7 @@ from pandas._libs.lib import infer_dtype from pandas.util._decorators import cache_readonly -from pandas.compat import u, range +from pandas.compat import u, range, string_types from pandas.compat import set_function_name from pandas.core.dtypes.cast import astype_nansafe @@ -147,6 +147,11 @@ def coerce_to_array(values, dtype, mask=None, copy=False): dtype = values.dtype if dtype is not None: + if (isinstance(dtype, string_types) and + (dtype.startswith("Int") or dtype.startswith("UInt"))): + # Avoid DeprecationWarning from NumPy about np.dtype("Int64") + # https://github.com/numpy/numpy/pull/7476 + dtype = dtype.lower() if not issubclass(type(dtype), _IntegerDtype): try: dtype = _dtypes[str(np.dtype(dtype))] diff --git a/pandas/core/window.py b/pandas/core/window.py index eed0e97f30dc9..66f48f403c941 100644 --- a/pandas/core/window.py +++ b/pandas/core/window.py @@ -2387,11 +2387,13 @@ def dataframe_from_int_dict(data, frame_template): if not arg2.columns.is_unique: raise ValueError("'arg2' columns are not unique") with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) X, Y = arg1.align(arg2, join='outer') X = X + 0 * Y Y = Y + 0 * X with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) res_columns = arg1.columns.union(arg2.columns) for col in res_columns: if col in X and col in Y: diff --git a/pandas/tests/api/test_api.py b/pandas/tests/api/test_api.py index bf9e14b427015..9f9cea237d161 100644 --- a/pandas/tests/api/test_api.py +++ b/pandas/tests/api/test_api.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import sys -from warnings import catch_warnings import pytest import pandas as pd @@ -175,30 +174,30 @@ def test_get_store(self): class TestJson(object): + @pytest.mark.filterwarnings("ignore") def test_deprecation_access_func(self): - with catch_warnings(record=True): - pd.json.dumps([]) + pd.json.dumps([]) class TestParser(object): + @pytest.mark.filterwarnings("ignore") def test_deprecation_access_func(self): - with catch_warnings(record=True): - pd.parser.na_values + pd.parser.na_values class TestLib(object): + @pytest.mark.filterwarnings("ignore") def test_deprecation_access_func(self): - with catch_warnings(record=True): - pd.lib.infer_dtype('foo') + pd.lib.infer_dtype('foo') class TestTSLib(object): + @pytest.mark.filterwarnings("ignore") def test_deprecation_access_func(self): - with catch_warnings(record=True): - pd.tslib.Timestamp('20160101') + pd.tslib.Timestamp('20160101') class TestTypes(object): diff --git a/pandas/tests/api/test_types.py b/pandas/tests/api/test_types.py index bd4891326c751..3bb4bf932a11f 100644 --- a/pandas/tests/api/test_types.py +++ b/pandas/tests/api/test_types.py @@ -2,8 +2,6 @@ import pytest -from warnings import catch_warnings - import pandas from pandas.api import types from pandas.util import testing as tm @@ -60,6 +58,6 @@ def test_deprecated_from_api_types(self): def test_moved_infer_dtype(): - with catch_warnings(record=True): + with tm.assert_produces_warning(FutureWarning): e = pandas.lib.infer_dtype('foo') assert e is not None diff --git a/pandas/tests/arithmetic/test_datetime64.py b/pandas/tests/arithmetic/test_datetime64.py index a3fa4e6b88256..8c9b8b437fc7d 100644 --- a/pandas/tests/arithmetic/test_datetime64.py +++ b/pandas/tests/arithmetic/test_datetime64.py @@ -1802,6 +1802,10 @@ def test_dt64_with_DateOffsets(klass, normalize, cls_and_kwargs): offset_cls = getattr(pd.offsets, cls_name) with warnings.catch_warnings(record=True): + # pandas.errors.PerformanceWarning: Non-vectorized DateOffset being + # applied to Series or DatetimeIndex + # we aren't testing that here, so ignore. + warnings.simplefilter("ignore", PerformanceWarning) for n in [0, 5]: if (cls_name in ['WeekOfMonth', 'LastWeekOfMonth', 'FY5253Quarter', 'FY5253'] and n == 0): diff --git a/pandas/tests/computation/test_eval.py b/pandas/tests/computation/test_eval.py index 118b05d16ab09..eef8646e4d6d2 100644 --- a/pandas/tests/computation/test_eval.py +++ b/pandas/tests/computation/test_eval.py @@ -1,5 +1,4 @@ import warnings -from warnings import catch_warnings import operator from itertools import product @@ -924,12 +923,18 @@ def testit(r_idx_type, c_idx_type, index_name): # only test dt with dt, otherwise weird joins result args = product(['i', 'u', 's'], ['i', 'u', 's'], ('index', 'columns')) with warnings.catch_warnings(record=True): + # avoid warning about comparing strings and ints + warnings.simplefilter("ignore", RuntimeWarning) + for r_idx_type, c_idx_type, index_name in args: testit(r_idx_type, c_idx_type, index_name) # dt with dt args = product(['dt'], ['dt'], ('index', 'columns')) with warnings.catch_warnings(record=True): + # avoid warning about comparing strings and ints + warnings.simplefilter("ignore", RuntimeWarning) + for r_idx_type, c_idx_type, index_name in args: testit(r_idx_type, c_idx_type, index_name) @@ -1112,13 +1117,13 @@ def test_bool_ops_with_constants(self): exp = eval(ex) assert res == exp + @pytest.mark.filterwarnings("ignore::FutureWarning") def test_panel_fails(self): - with catch_warnings(record=True): - x = Panel(randn(3, 4, 5)) - y = Series(randn(10)) - with pytest.raises(NotImplementedError): - self.eval('x + y', - local_dict={'x': x, 'y': y}) + x = Panel(randn(3, 4, 5)) + y = Series(randn(10)) + with pytest.raises(NotImplementedError): + self.eval('x + y', + local_dict={'x': x, 'y': y}) def test_4d_ndarray_fails(self): x = randn(3, 4, 5, 6) @@ -1382,6 +1387,7 @@ def test_query_inplace(self): @pytest.mark.parametrize("invalid_target", [1, "cat", [1, 2], np.array([]), (1, 3)]) + @pytest.mark.filterwarnings("ignore::FutureWarning") def test_cannot_item_assign(self, invalid_target): msg = "Cannot assign expression output to target" expression = "a = 1 + 2" diff --git a/pandas/tests/dtypes/test_generic.py b/pandas/tests/dtypes/test_generic.py index 53f92b98f022e..38d1143f3838b 100644 --- a/pandas/tests/dtypes/test_generic.py +++ b/pandas/tests/dtypes/test_generic.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter import numpy as np import pandas as pd from pandas.core.dtypes import generic as gt @@ -35,6 +35,7 @@ def test_abc_types(self): assert isinstance(pd.Series([1, 2, 3]), gt.ABCSeries) assert isinstance(self.df, gt.ABCDataFrame) with catch_warnings(record=True): + simplefilter('ignore', FutureWarning) assert isinstance(self.df.to_panel(), gt.ABCPanel) assert isinstance(self.sparse_series, gt.ABCSparseSeries) assert isinstance(self.sparse_array, gt.ABCSparseArray) diff --git a/pandas/tests/dtypes/test_inference.py b/pandas/tests/dtypes/test_inference.py index dc330666b4b6c..a22a31b66fd4e 100644 --- a/pandas/tests/dtypes/test_inference.py +++ b/pandas/tests/dtypes/test_inference.py @@ -5,7 +5,7 @@ related to inference and not otherwise tested in types/test_common.py """ -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter import collections import re from datetime import datetime, date, timedelta, time @@ -1158,6 +1158,7 @@ def test_is_scalar_numpy_zerodim_arrays(self): assert not is_scalar(zerodim) assert is_scalar(lib.item_from_zerodim(zerodim)) + @pytest.mark.filterwarnings("ignore::PendingDeprecationWarning") def test_is_scalar_numpy_arrays(self): assert not is_scalar(np.array([])) assert not is_scalar(np.array([[]])) @@ -1176,6 +1177,7 @@ def test_is_scalar_pandas_containers(self): assert not is_scalar(DataFrame()) assert not is_scalar(DataFrame([[1]])) with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) assert not is_scalar(Panel()) assert not is_scalar(Panel([[[1]]])) assert not is_scalar(Index([])) @@ -1210,6 +1212,7 @@ def test_nan_to_nat_conversions(): @td.skip_if_no_scipy +@pytest.mark.filterwarnings("ignore::PendingDeprecationWarning") def test_is_scipy_sparse(spmatrix): # noqa: F811 assert is_scipy_sparse(spmatrix([[0, 1]])) assert not is_scipy_sparse(np.array([1])) diff --git a/pandas/tests/dtypes/test_missing.py b/pandas/tests/dtypes/test_missing.py index ca9a2dc81fcc6..8f82db69a9213 100644 --- a/pandas/tests/dtypes/test_missing.py +++ b/pandas/tests/dtypes/test_missing.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import pytest -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter import numpy as np from datetime import datetime from pandas.util import testing as tm @@ -94,6 +94,7 @@ def test_isna_isnull(self, isna_f): # panel with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) for p in [tm.makePanel(), tm.makePeriodPanel(), tm.add_nans(tm.makePanel())]: result = isna_f(p) diff --git a/pandas/tests/extension/base/dtype.py b/pandas/tests/extension/base/dtype.py index 02b7c9527769f..ab17fa7f02b16 100644 --- a/pandas/tests/extension/base/dtype.py +++ b/pandas/tests/extension/base/dtype.py @@ -1,3 +1,5 @@ +import warnings + import numpy as np import pandas as pd @@ -67,7 +69,11 @@ def test_check_dtype(self, data): expected = pd.Series([True, True, False, False], index=list('ABCD')) - result = df.dtypes == str(dtype) + with warnings.catch_warnings(): + # XXX: This should probably be *fixed* not ignored. + # See libops.scalar_compare + warnings.simplefilter("ignore", DeprecationWarning) + result = df.dtypes == str(dtype) self.assert_series_equal(result, expected) expected = pd.Series([True, True, False, False], diff --git a/pandas/tests/frame/test_analytics.py b/pandas/tests/frame/test_analytics.py index f06c8336373ca..80ec26dca4a6c 100644 --- a/pandas/tests/frame/test_analytics.py +++ b/pandas/tests/frame/test_analytics.py @@ -116,8 +116,8 @@ def test_corr_int_and_boolean(self): 'a', 'b'], columns=['a', 'b']) for meth in ['pearson', 'kendall', 'spearman']: - # RuntimeWarning with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) result = df.corr(meth) tm.assert_frame_equal(result, expected) @@ -559,6 +559,7 @@ def wrapper(x): def test_min(self): with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) self._check_stat_op('min', np.min, check_dates=True) self._check_stat_op('min', np.min, frame=self.intframe) @@ -610,6 +611,7 @@ def test_cummax(self): def test_max(self): with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) self._check_stat_op('max', np.max, check_dates=True) self._check_stat_op('max', np.max, frame=self.intframe) diff --git a/pandas/tests/frame/test_apply.py b/pandas/tests/frame/test_apply.py index 8beab3fb816df..2ad89a9dee833 100644 --- a/pandas/tests/frame/test_apply.py +++ b/pandas/tests/frame/test_apply.py @@ -259,6 +259,7 @@ def test_apply_empty_infer_type(self): def _check(df, f): with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) test_res = f(np.array([], dtype='f8')) is_reduction = not isinstance(test_res, np.ndarray) diff --git a/pandas/tests/frame/test_indexing.py b/pandas/tests/frame/test_indexing.py index f0c4d7be2f293..5a971269335d3 100644 --- a/pandas/tests/frame/test_indexing.py +++ b/pandas/tests/frame/test_indexing.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import print_function -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter from datetime import datetime, date, timedelta, time @@ -368,6 +368,7 @@ def test_getitem_ix_mixed_integer(self): assert_frame_equal(result, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[[1, 10]] expected = df.ix[Index([1, 10], dtype=object)] assert_frame_equal(result, expected) @@ -387,37 +388,45 @@ def test_getitem_ix_mixed_integer(self): def test_getitem_setitem_ix_negative_integers(self): with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = self.frame.ix[:, -1] assert_series_equal(result, self.frame['D']) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = self.frame.ix[:, [-1]] assert_frame_equal(result, self.frame[['D']]) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = self.frame.ix[:, [-1, -2]] assert_frame_equal(result, self.frame[['D', 'C']]) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) self.frame.ix[:, [-1]] = 0 assert (self.frame['D'] == 0).all() df = DataFrame(np.random.randn(8, 4)) # ix does label-based indexing when having an integer index with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) with pytest.raises(KeyError): df.ix[[-1]] with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) with pytest.raises(KeyError): df.ix[:, [-1]] # #1942 a = DataFrame(randn(20, 2), index=[chr(x + 65) for x in range(20)]) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) a.ix[-1] = a.ix[-2] with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_series_equal(a.ix[-1], a.ix[-2], check_names=False) assert a.ix[-1].name == 'T' assert a.ix[-2].name == 'S' @@ -794,16 +803,19 @@ def test_getitem_fancy_2d(self): f = self.frame with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_frame_equal(f.ix[:, ['B', 'A']], f.reindex(columns=['B', 'A'])) subidx = self.frame.index[[5, 4, 1]] with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_frame_equal(f.ix[subidx, ['B', 'A']], f.reindex(index=subidx, columns=['B', 'A'])) # slicing rows, etc. with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_frame_equal(f.ix[5:10], f[5:10]) assert_frame_equal(f.ix[5:10, :], f[5:10]) assert_frame_equal(f.ix[:5, ['A', 'B']], @@ -812,22 +824,26 @@ def test_getitem_fancy_2d(self): # slice rows with labels, inclusive! with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) expected = f.ix[5:11] result = f.ix[f.index[5]:f.index[10]] assert_frame_equal(expected, result) # slice columns with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_frame_equal(f.ix[:, :2], f.reindex(columns=['A', 'B'])) # get view with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) exp = f.copy() f.ix[5:10].values[:] = 5 exp.values[5:10] = 5 assert_frame_equal(f, exp) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) pytest.raises(ValueError, f.ix.__getitem__, f > 0.5) def test_slice_floats(self): @@ -883,6 +899,7 @@ def test_setitem_fancy_2d(self): expected = frame.copy() with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame.ix[:, ['B', 'A']] = 1 expected['B'] = 1. expected['A'] = 1. @@ -898,6 +915,7 @@ def test_setitem_fancy_2d(self): values = randn(3, 2) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame.ix[subidx, ['B', 'A']] = values frame2.ix[[5, 4, 1], ['B', 'A']] = values @@ -911,12 +929,14 @@ def test_setitem_fancy_2d(self): frame = self.frame.copy() with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) expected1 = self.frame.copy() frame.ix[5:10] = 1. expected1.values[5:10] = 1. assert_frame_equal(frame, expected1) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) expected2 = self.frame.copy() arr = randn(5, len(frame.columns)) frame.ix[5:10] = arr @@ -925,6 +945,7 @@ def test_setitem_fancy_2d(self): # case 4 with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame = self.frame.copy() frame.ix[5:10, :] = 1. assert_frame_equal(frame, expected1) @@ -933,6 +954,7 @@ def test_setitem_fancy_2d(self): # case 5 with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame = self.frame.copy() frame2 = self.frame.copy() @@ -945,11 +967,13 @@ def test_setitem_fancy_2d(self): assert_frame_equal(frame, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame2.ix[:5, [0, 1]] = values assert_frame_equal(frame2, expected) # case 6: slice rows with labels, inclusive! with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame = self.frame.copy() expected = self.frame.copy() @@ -959,6 +983,7 @@ def test_setitem_fancy_2d(self): # case 7: slice columns with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame = self.frame.copy() frame2 = self.frame.copy() expected = self.frame.copy() @@ -1001,6 +1026,7 @@ def test_fancy_setitem_int_labels(self): df = DataFrame(np.random.randn(10, 5), index=np.arange(0, 20, 2)) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) tmp = df.copy() exp = df.copy() tmp.ix[[0, 2, 4]] = 5 @@ -1008,6 +1034,7 @@ def test_fancy_setitem_int_labels(self): assert_frame_equal(tmp, exp) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) tmp = df.copy() exp = df.copy() tmp.ix[6] = 5 @@ -1015,6 +1042,7 @@ def test_fancy_setitem_int_labels(self): assert_frame_equal(tmp, exp) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) tmp = df.copy() exp = df.copy() tmp.ix[:, 2] = 5 @@ -1028,21 +1056,25 @@ def test_fancy_getitem_int_labels(self): df = DataFrame(np.random.randn(10, 5), index=np.arange(0, 20, 2)) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[[4, 2, 0], [2, 0]] expected = df.reindex(index=[4, 2, 0], columns=[2, 0]) assert_frame_equal(result, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[[4, 2, 0]] expected = df.reindex(index=[4, 2, 0]) assert_frame_equal(result, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[4] expected = df.xs(4) assert_series_equal(result, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[:, 3] expected = df[3] assert_series_equal(result, expected) @@ -1051,6 +1083,7 @@ def test_fancy_index_int_labels_exceptions(self): df = DataFrame(np.random.randn(10, 5), index=np.arange(0, 20, 2)) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) # labels that aren't contained pytest.raises(KeyError, df.ix.__setitem__, @@ -1069,6 +1102,7 @@ def test_fancy_index_int_labels_exceptions(self): def test_setitem_fancy_mixed_2d(self): with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) self.mixed_frame.ix[:5, ['C', 'B', 'A']] = 5 result = self.mixed_frame.ix[:5, ['C', 'B', 'A']] assert (result.values == 5).all() @@ -1082,6 +1116,7 @@ def test_setitem_fancy_mixed_2d(self): # #1432 with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) df = DataFrame({1: [1., 2., 3.], 2: [3, 4, 5]}) assert df._is_mixed_type @@ -1099,27 +1134,32 @@ def test_ix_align(self): df = df_orig.copy() with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) df.ix[:, 0] = b assert_series_equal(df.ix[:, 0].reindex(b.index), b) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) dft = df_orig.T dft.ix[0, :] = b assert_series_equal(dft.ix[0, :].reindex(b.index), b) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) df = df_orig.copy() df.ix[:5, 0] = b s = df.ix[:5, 0] assert_series_equal(s, b.reindex(s.index)) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) dft = df_orig.T dft.ix[0, :5] = b s = dft.ix[0, :5] assert_series_equal(s, b.reindex(s.index)) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) df = df_orig.copy() idx = [0, 1, 3, 5] df.ix[idx, 0] = b @@ -1127,6 +1167,7 @@ def test_ix_align(self): assert_series_equal(s, b.reindex(s.index)) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) dft = df_orig.T dft.ix[0, idx] = b s = dft.ix[0, idx] @@ -1138,6 +1179,7 @@ def test_ix_frame_align(self): df = df_orig.copy() with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) df.ix[:3] = b out = b.ix[:3] assert_frame_equal(out, b) @@ -1145,12 +1187,14 @@ def test_ix_frame_align(self): b.sort_index(inplace=True) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) df = df_orig.copy() df.ix[[0, 1, 2]] = b out = df.ix[[0, 1, 2]].reindex(b.index) assert_frame_equal(out, b) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) df = df_orig.copy() df.ix[:3] = b out = df.ix[:3] @@ -1193,6 +1237,7 @@ def test_ix_multi_take_nonint_index(self): df = DataFrame(np.random.randn(3, 2), index=['x', 'y', 'z'], columns=['a', 'b']) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) rs = df.ix[[0], [0]] xp = df.reindex(['x'], columns=['a']) assert_frame_equal(rs, xp) @@ -1201,6 +1246,7 @@ def test_ix_multi_take_multiindex(self): df = DataFrame(np.random.randn(3, 2), index=['x', 'y', 'z'], columns=[['a', 'b'], ['1', '2']]) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) rs = df.ix[[0], [0]] xp = df.reindex(['x'], columns=[('a', '1')]) assert_frame_equal(rs, xp) @@ -1210,14 +1256,17 @@ def test_ix_dup(self): df = DataFrame(np.random.randn(len(idx), 3), idx) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) sub = df.ix[:'d'] assert_frame_equal(sub, df) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) sub = df.ix['a':'c'] assert_frame_equal(sub, df.ix[0:4]) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) sub = df.ix['b':'d'] assert_frame_equal(sub, df.ix[2:]) @@ -1226,48 +1275,57 @@ def test_getitem_fancy_1d(self): # return self if no slicing...for now with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert f.ix[:, :] is f # low dimensional slice with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) xs1 = f.ix[2, ['C', 'B', 'A']] xs2 = f.xs(f.index[2]).reindex(['C', 'B', 'A']) tm.assert_series_equal(xs1, xs2) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) ts1 = f.ix[5:10, 2] ts2 = f[f.columns[2]][5:10] tm.assert_series_equal(ts1, ts2) # positional xs with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) xs1 = f.ix[0] xs2 = f.xs(f.index[0]) tm.assert_series_equal(xs1, xs2) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) xs1 = f.ix[f.index[5]] xs2 = f.xs(f.index[5]) tm.assert_series_equal(xs1, xs2) # single column with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_series_equal(f.ix[:, 'A'], f['A']) # return view with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) exp = f.copy() exp.values[5] = 4 f.ix[5][:] = 4 tm.assert_frame_equal(exp, f) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) exp.values[:, 1] = 6 f.ix[:, 1][:] = 6 tm.assert_frame_equal(exp, f) # slice of mixed-frame with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) xs = self.mixed_frame.ix[5] exp = self.mixed_frame.xs(self.mixed_frame.index[5]) tm.assert_series_equal(xs, exp) @@ -1279,6 +1337,7 @@ def test_setitem_fancy_1d(self): expected = self.frame.copy() with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame.ix[2, ['C', 'B', 'A']] = [1., 2., 3.] expected['C'][2] = 1. expected['B'][2] = 2. @@ -1286,6 +1345,7 @@ def test_setitem_fancy_1d(self): assert_frame_equal(frame, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame2 = self.frame.copy() frame2.ix[2, [3, 2, 1]] = [1., 2., 3.] assert_frame_equal(frame, expected) @@ -1295,12 +1355,14 @@ def test_setitem_fancy_1d(self): expected = self.frame.copy() with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) vals = randn(5) expected.values[5:10, 2] = vals frame.ix[5:10, 2] = vals assert_frame_equal(frame, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame2 = self.frame.copy() frame2.ix[5:10, 'B'] = vals assert_frame_equal(frame, expected) @@ -1310,11 +1372,13 @@ def test_setitem_fancy_1d(self): expected = self.frame.copy() with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame.ix[4] = 5. expected.values[4] = 5. assert_frame_equal(frame, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame.ix[frame.index[4]] = 6. expected.values[4] = 6. assert_frame_equal(frame, expected) @@ -1324,6 +1388,7 @@ def test_setitem_fancy_1d(self): expected = self.frame.copy() with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) frame.ix[:, 'A'] = 7. expected['A'] = 7. assert_frame_equal(frame, expected) @@ -1834,6 +1899,7 @@ def test_single_element_ix_dont_upcast(self): assert issubclass(self.frame['E'].dtype.type, (int, np.integer)) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = self.frame.ix[self.frame.index[5], 'E'] assert is_integer(result) @@ -1845,6 +1911,7 @@ def test_single_element_ix_dont_upcast(self): df["b"] = 666 with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[0, "b"] assert is_integer(result) result = df.loc[0, "b"] @@ -1852,6 +1919,7 @@ def test_single_element_ix_dont_upcast(self): expected = Series([666], [0], name='b') with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[[0], "b"] assert_series_equal(result, expected) result = df.loc[[0], "b"] @@ -1923,12 +1991,14 @@ def test_iloc_duplicates(self): result = df.iloc[0] with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result2 = df.ix[0] assert isinstance(result, Series) assert_almost_equal(result.values, df.values[0]) assert_series_equal(result, result2) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.T.iloc[:, 0] result2 = df.T.ix[:, 0] assert isinstance(result, Series) @@ -1941,16 +2011,19 @@ def test_iloc_duplicates(self): index=[['i', 'i', 'j'], ['X', 'X', 'Y']]) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) rs = df.iloc[0] xp = df.ix[0] assert_series_equal(rs, xp) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) rs = df.iloc[:, 0] xp = df.T.ix[0] assert_series_equal(rs, xp) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) rs = df.iloc[:, [0]] xp = df.ix[:, [0]] assert_frame_equal(rs, xp) @@ -2172,6 +2245,7 @@ def test_getitem_ix_float_duplicates(self): expect = df.iloc[1:] assert_frame_equal(df.loc[0.2], expect) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_frame_equal(df.ix[0.2], expect) expect = df.iloc[1:, 0] @@ -2181,6 +2255,7 @@ def test_getitem_ix_float_duplicates(self): expect = df.iloc[1:] assert_frame_equal(df.loc[0.2], expect) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_frame_equal(df.ix[0.2], expect) expect = df.iloc[1:, 0] @@ -2191,6 +2266,7 @@ def test_getitem_ix_float_duplicates(self): expect = df.iloc[1:-1] assert_frame_equal(df.loc[0.2], expect) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_frame_equal(df.ix[0.2], expect) expect = df.iloc[1:-1, 0] @@ -2200,6 +2276,7 @@ def test_getitem_ix_float_duplicates(self): expect = df.iloc[[1, -1]] assert_frame_equal(df.loc[0.2], expect) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) assert_frame_equal(df.ix[0.2], expect) expect = df.iloc[[1, -1], 0] @@ -2415,6 +2492,7 @@ def test_index_namedtuple(self): df = DataFrame([(1, 2), (3, 4)], index=index, columns=["A", "B"]) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[IndexType("foo", "bar")]["A"] assert result == 1 diff --git a/pandas/tests/frame/test_operators.py b/pandas/tests/frame/test_operators.py index da4424b1ae626..97c94e1134cc8 100644 --- a/pandas/tests/frame/test_operators.py +++ b/pandas/tests/frame/test_operators.py @@ -209,6 +209,8 @@ def _check_unary_op(op): @pytest.mark.parametrize('op,res', [('__eq__', False), ('__ne__', True)]) + # not sure what's correct here. + @pytest.mark.filterwarnings("ignore:elementwise:FutureWarning") def test_logical_typeerror_with_non_valid(self, op, res): # we are comparing floats vs a string result = getattr(self.frame, op)('foo') @@ -278,7 +280,9 @@ def test_pos_numeric(self, df): assert_series_equal(+df['a'], df['a']) @pytest.mark.parametrize('df', [ - pd.DataFrame({'a': ['a', 'b']}), + # numpy changing behavior in the future + pytest.param(pd.DataFrame({'a': ['a', 'b']}), + marks=[pytest.mark.filterwarnings("ignore")]), pd.DataFrame({'a': np.array([-1, 2], dtype=object)}), pd.DataFrame({'a': [Decimal('-1.0'), Decimal('2.0')]}), ]) diff --git a/pandas/tests/frame/test_query_eval.py b/pandas/tests/frame/test_query_eval.py index 3be7ad12db883..3c6f0f0b2ab94 100644 --- a/pandas/tests/frame/test_query_eval.py +++ b/pandas/tests/frame/test_query_eval.py @@ -360,6 +360,7 @@ def to_series(mi, level): else: raise AssertionError("object must be a Series or Index") + @pytest.mark.filterwarnings("ignore::FutureWarning") def test_raise_on_panel_with_multiindex(self, parser, engine): p = tm.makePanel(7) p.items = tm.makeCustomIndex(len(p.items), nlevels=2) diff --git a/pandas/tests/frame/test_reshape.py b/pandas/tests/frame/test_reshape.py index 2f90d24f652ca..9f6735c7ba2bf 100644 --- a/pandas/tests/frame/test_reshape.py +++ b/pandas/tests/frame/test_reshape.py @@ -2,7 +2,7 @@ from __future__ import print_function -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter from datetime import datetime import itertools @@ -56,6 +56,7 @@ def test_pivot(self): with catch_warnings(record=True): # pivot multiple columns + simplefilter("ignore", FutureWarning) wp = tm.makePanel() lp = wp.to_frame() df = lp.reset_index() diff --git a/pandas/tests/frame/test_subclass.py b/pandas/tests/frame/test_subclass.py index caaa311e9ee96..07289d897be62 100644 --- a/pandas/tests/frame/test_subclass.py +++ b/pandas/tests/frame/test_subclass.py @@ -2,7 +2,7 @@ from __future__ import print_function -from warnings import catch_warnings +import pytest import numpy as np from pandas import DataFrame, Series, MultiIndex, Panel, Index @@ -126,28 +126,28 @@ def test_indexing_sliced(self): tm.assert_series_equal(res, exp) assert isinstance(res, tm.SubclassedSeries) + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_to_panel_expanddim(self): # GH 9762 - with catch_warnings(record=True): - class SubclassedFrame(DataFrame): - - @property - def _constructor_expanddim(self): - return SubclassedPanel - - class SubclassedPanel(Panel): - pass - - index = MultiIndex.from_tuples([(0, 0), (0, 1), (0, 2)]) - df = SubclassedFrame({'X': [1, 2, 3], 'Y': [4, 5, 6]}, index=index) - result = df.to_panel() - assert isinstance(result, SubclassedPanel) - expected = SubclassedPanel([[[1, 2, 3]], [[4, 5, 6]]], - items=['X', 'Y'], major_axis=[0], - minor_axis=[0, 1, 2], - dtype='int64') - tm.assert_panel_equal(result, expected) + class SubclassedFrame(DataFrame): + + @property + def _constructor_expanddim(self): + return SubclassedPanel + + class SubclassedPanel(Panel): + pass + + index = MultiIndex.from_tuples([(0, 0), (0, 1), (0, 2)]) + df = SubclassedFrame({'X': [1, 2, 3], 'Y': [4, 5, 6]}, index=index) + result = df.to_panel() + assert isinstance(result, SubclassedPanel) + expected = SubclassedPanel([[[1, 2, 3]], [[4, 5, 6]]], + items=['X', 'Y'], major_axis=[0], + minor_axis=[0, 1, 2], + dtype='int64') + tm.assert_panel_equal(result, expected) def test_subclass_attr_err_propagation(self): # GH 11808 diff --git a/pandas/tests/generic/test_generic.py b/pandas/tests/generic/test_generic.py index 533bff0384ad9..1652835de8228 100644 --- a/pandas/tests/generic/test_generic.py +++ b/pandas/tests/generic/test_generic.py @@ -2,7 +2,7 @@ # pylint: disable-msg=E1101,W0612 from copy import copy, deepcopy -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter import pytest import numpy as np @@ -638,6 +638,7 @@ def test_sample(sel): s.sample(n=3, weights='weight_column') with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) panel = Panel(items=[0, 1, 2], major_axis=[2, 3, 4], minor_axis=[3, 4, 5]) with pytest.raises(ValueError): @@ -705,6 +706,7 @@ def test_sample(sel): # Test default axes with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) p = Panel(items=['a', 'b', 'c'], major_axis=[2, 4, 6], minor_axis=[1, 3, 5]) assert_panel_equal( @@ -743,6 +745,7 @@ def test_squeeze(self): for df in [tm.makeTimeDataFrame()]: tm.assert_frame_equal(df.squeeze(), df) with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) for p in [tm.makePanel()]: tm.assert_panel_equal(p.squeeze(), p) @@ -751,6 +754,7 @@ def test_squeeze(self): tm.assert_series_equal(df.squeeze(), df['A']) with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) p = tm.makePanel().reindex(items=['ItemA']) tm.assert_frame_equal(p.squeeze(), p['ItemA']) @@ -761,6 +765,7 @@ def test_squeeze(self): empty_series = Series([], name='five') empty_frame = DataFrame([empty_series]) with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) empty_panel = Panel({'six': empty_frame}) [tm.assert_series_equal(empty_series, higher_dim.squeeze()) @@ -798,6 +803,7 @@ def test_transpose(self): tm.assert_frame_equal(df.transpose().transpose(), df) with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) for p in [tm.makePanel()]: tm.assert_panel_equal(p.transpose(2, 0, 1) .transpose(1, 2, 0), p) @@ -820,6 +826,7 @@ def test_numpy_transpose(self): np.transpose, df, axes=1) with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) p = tm.makePanel() tm.assert_panel_equal(np.transpose( np.transpose(p, axes=(2, 0, 1)), @@ -842,6 +849,7 @@ def test_take(self): indices = [-3, 2, 0, 1] with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) for p in [tm.makePanel()]: out = p.take(indices) expected = Panel(data=p.values.take(indices, axis=0), @@ -856,6 +864,7 @@ def test_take_invalid_kwargs(self): df = tm.makeTimeDataFrame() with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) p = tm.makePanel() for obj in (s, df, p): @@ -963,6 +972,7 @@ def test_equals(self): def test_describe_raises(self): with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) with pytest.raises(NotImplementedError): tm.makePanel().describe() @@ -996,6 +1006,7 @@ def test_pipe_tuple_error(self): def test_pipe_panel(self): with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) wp = Panel({'r1': DataFrame({"A": [1, 2, 3]})}) f = lambda x, y: x + y result = wp.pipe(f, 2) diff --git a/pandas/tests/generic/test_panel.py b/pandas/tests/generic/test_panel.py index 49cb773a1bd10..fe80b2af5ea63 100644 --- a/pandas/tests/generic/test_panel.py +++ b/pandas/tests/generic/test_panel.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # pylint: disable-msg=E1101,W0612 -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter from pandas import Panel from pandas.util.testing import (assert_panel_equal, @@ -21,6 +21,7 @@ def test_to_xarray(self): from xarray import DataArray with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) p = tm.makePanel() result = p.to_xarray() @@ -51,6 +52,7 @@ def f(): def tester(self): f = getattr(super(TestPanel, self), t) with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) f() return tester diff --git a/pandas/tests/groupby/test_groupby.py b/pandas/tests/groupby/test_groupby.py index 9affd0241d028..fa20620fc6db1 100644 --- a/pandas/tests/groupby/test_groupby.py +++ b/pandas/tests/groupby/test_groupby.py @@ -3,7 +3,6 @@ import pytest -from warnings import catch_warnings from datetime import datetime from decimal import Decimal @@ -508,30 +507,30 @@ def test_frame_multi_key_function_list(): @pytest.mark.parametrize('op', [lambda x: x.sum(), lambda x: x.mean()]) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_groupby_multiple_columns(df, op): data = df grouped = data.groupby(['A', 'B']) - with catch_warnings(record=True): - result1 = op(grouped) - - expected = defaultdict(dict) - for n1, gp1 in data.groupby('A'): - for n2, gp2 in gp1.groupby('B'): - expected[n1][n2] = op(gp2.loc[:, ['C', 'D']]) - expected = {k: DataFrame(v) - for k, v in compat.iteritems(expected)} - expected = Panel.fromDict(expected).swapaxes(0, 1) - expected.major_axis.name, expected.minor_axis.name = 'A', 'B' - - # a little bit crude - for col in ['C', 'D']: - result_col = op(grouped[col]) - exp = expected[col] - pivoted = result1[col].unstack() - pivoted2 = result_col.unstack() - assert_frame_equal(pivoted.reindex_like(exp), exp) - assert_frame_equal(pivoted2.reindex_like(exp), exp) + result1 = op(grouped) + + expected = defaultdict(dict) + for n1, gp1 in data.groupby('A'): + for n2, gp2 in gp1.groupby('B'): + expected[n1][n2] = op(gp2.loc[:, ['C', 'D']]) + expected = {k: DataFrame(v) + for k, v in compat.iteritems(expected)} + expected = Panel.fromDict(expected).swapaxes(0, 1) + expected.major_axis.name, expected.minor_axis.name = 'A', 'B' + + # a little bit crude + for col in ['C', 'D']: + result_col = op(grouped[col]) + exp = expected[col] + pivoted = result1[col].unstack() + pivoted2 = result_col.unstack() + assert_frame_equal(pivoted.reindex_like(exp), exp) + assert_frame_equal(pivoted2.reindex_like(exp), exp) # test single series works the same result = data['C'].groupby([data['A'], data['B']]).mean() @@ -1181,11 +1180,12 @@ def test_groupby_nat_exclude(): pytest.raises(KeyError, grouped.get_group, pd.NaT) + +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_sparse_friendly(df): sdf = df[['C', 'D']].to_sparse() - with catch_warnings(record=True): - panel = tm.makePanel() - tm.add_nans(panel) + panel = tm.makePanel() + tm.add_nans(panel) def _check_work(gp): gp.mean() @@ -1201,29 +1201,30 @@ def _check_work(gp): # _check_work(panel.groupby(lambda x: x.month, axis=1)) + +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_panel_groupby(): - with catch_warnings(record=True): - panel = tm.makePanel() - tm.add_nans(panel) - grouped = panel.groupby({'ItemA': 0, 'ItemB': 0, 'ItemC': 1}, - axis='items') - agged = grouped.mean() - agged2 = grouped.agg(lambda x: x.mean('items')) + panel = tm.makePanel() + tm.add_nans(panel) + grouped = panel.groupby({'ItemA': 0, 'ItemB': 0, 'ItemC': 1}, + axis='items') + agged = grouped.mean() + agged2 = grouped.agg(lambda x: x.mean('items')) - tm.assert_panel_equal(agged, agged2) + tm.assert_panel_equal(agged, agged2) - tm.assert_index_equal(agged.items, Index([0, 1])) + tm.assert_index_equal(agged.items, Index([0, 1])) - grouped = panel.groupby(lambda x: x.month, axis='major') - agged = grouped.mean() + grouped = panel.groupby(lambda x: x.month, axis='major') + agged = grouped.mean() - exp = Index(sorted(list(set(panel.major_axis.month)))) - tm.assert_index_equal(agged.major_axis, exp) + exp = Index(sorted(list(set(panel.major_axis.month)))) + tm.assert_index_equal(agged.major_axis, exp) - grouped = panel.groupby({'A': 0, 'B': 0, 'C': 1, 'D': 1}, - axis='minor') - agged = grouped.mean() - tm.assert_index_equal(agged.minor_axis, Index([0, 1])) + grouped = panel.groupby({'A': 0, 'B': 0, 'C': 1, 'D': 1}, + axis='minor') + agged = grouped.mean() + tm.assert_index_equal(agged.minor_axis, Index([0, 1])) def test_groupby_2d_malformed(): diff --git a/pandas/tests/groupby/test_grouping.py b/pandas/tests/groupby/test_grouping.py index 737e8a805f3ce..e7c0881b11871 100644 --- a/pandas/tests/groupby/test_grouping.py +++ b/pandas/tests/groupby/test_grouping.py @@ -4,7 +4,6 @@ import pytest -from warnings import catch_warnings from pandas import (date_range, Timestamp, Index, MultiIndex, DataFrame, Series, CategoricalIndex) from pandas.util.testing import (assert_panel_equal, assert_frame_equal, @@ -557,15 +556,15 @@ def test_list_grouper_with_nat(self): class TestGetGroup(): + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_get_group(self): - with catch_warnings(record=True): - wp = tm.makePanel() - grouped = wp.groupby(lambda x: x.month, axis='major') + wp = tm.makePanel() + grouped = wp.groupby(lambda x: x.month, axis='major') - gp = grouped.get_group(1) - expected = wp.reindex( - major=[x for x in wp.major_axis if x.month == 1]) - assert_panel_equal(gp, expected) + gp = grouped.get_group(1) + expected = wp.reindex( + major=[x for x in wp.major_axis if x.month == 1]) + assert_panel_equal(gp, expected) # GH 5267 # be datelike friendly @@ -743,18 +742,18 @@ def test_multi_iter_frame(self, three_group): for key, group in grouped: pass + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_multi_iter_panel(self): - with catch_warnings(record=True): - wp = tm.makePanel() - grouped = wp.groupby([lambda x: x.month, lambda x: x.weekday()], - axis=1) - - for (month, wd), group in grouped: - exp_axis = [x - for x in wp.major_axis - if x.month == month and x.weekday() == wd] - expected = wp.reindex(major=exp_axis) - assert_panel_equal(group, expected) + wp = tm.makePanel() + grouped = wp.groupby([lambda x: x.month, lambda x: x.weekday()], + axis=1) + + for (month, wd), group in grouped: + exp_axis = [x + for x in wp.major_axis + if x.month == month and x.weekday() == wd] + expected = wp.reindex(major=exp_axis) + assert_panel_equal(group, expected) def test_dictify(self, df): dict(iter(df.groupby('A'))) diff --git a/pandas/tests/indexes/datetimes/test_datetime.py b/pandas/tests/indexes/datetimes/test_datetime.py index db3de0ceced0c..5ab32ee3863ae 100644 --- a/pandas/tests/indexes/datetimes/test_datetime.py +++ b/pandas/tests/indexes/datetimes/test_datetime.py @@ -1,4 +1,3 @@ -import warnings import sys import pytest @@ -201,7 +200,7 @@ def test_get_duplicates(self): idx = DatetimeIndex(['2000-01-01', '2000-01-02', '2000-01-02', '2000-01-03', '2000-01-03', '2000-01-04']) - with warnings.catch_warnings(record=True): + with tm.assert_produces_warning(FutureWarning): # Deprecated - see GH20239 result = idx.get_duplicates() diff --git a/pandas/tests/indexes/datetimes/test_ops.py b/pandas/tests/indexes/datetimes/test_ops.py index 6ccd310f33bbd..24d99abaf44a8 100644 --- a/pandas/tests/indexes/datetimes/test_ops.py +++ b/pandas/tests/indexes/datetimes/test_ops.py @@ -534,8 +534,8 @@ def test_shift(self): assert shifted[0] == self.rng[0] assert shifted.freq == self.rng.freq - # PerformanceWarning with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", pd.errors.PerformanceWarning) rng = date_range(START, END, freq=BMonthEnd()) shifted = rng.shift(1, freq=CDay()) assert shifted[0] == rng[0] + CDay() diff --git a/pandas/tests/indexes/datetimes/test_tools.py b/pandas/tests/indexes/datetimes/test_tools.py index bef9b73773f46..13d1d05833361 100644 --- a/pandas/tests/indexes/datetimes/test_tools.py +++ b/pandas/tests/indexes/datetimes/test_tools.py @@ -1175,6 +1175,8 @@ def test_dayfirst(self, cache): class TestGuessDatetimeFormat(object): @td.skip_if_not_us_locale + @pytest.mark.filterwarnings("ignore:_timelex:DeprecationWarning") + # https://github.com/pandas-dev/pandas/issues/21322 def test_guess_datetime_format_for_array(self): expected_format = '%Y-%m-%d %H:%M:%S.%f' dt_string = datetime(2011, 12, 30, 0, 0, 0).strftime(expected_format) @@ -1587,12 +1589,15 @@ def units_from_epochs(): return list(range(5)) -@pytest.fixture(params=[epoch_1960(), - epoch_1960().to_pydatetime(), - epoch_1960().to_datetime64(), - str(epoch_1960())]) -def epochs(request): - return request.param +@pytest.fixture(params=['timestamp', 'pydatetime', 'datetime64']) +def epochs(epoch_1960, request): + assert request.param in {'timestamp', 'pydatetime', 'datetime64'} + if request.param == 'timestamp': + return epoch_1960 + elif request.param == 'pydatetime': + return epoch_1960.to_pydatetime() + else: + return epoch_1960.to_datetime64() @pytest.fixture diff --git a/pandas/tests/indexes/multi/test_duplicates.py b/pandas/tests/indexes/multi/test_duplicates.py index 1cdf0ca6e013e..54a12137c9457 100644 --- a/pandas/tests/indexes/multi/test_duplicates.py +++ b/pandas/tests/indexes/multi/test_duplicates.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -import warnings from itertools import product import pytest @@ -241,7 +240,7 @@ def test_get_duplicates(): mi = MultiIndex.from_arrays([[101, a], [3.5, np.nan]]) assert not mi.has_duplicates - with warnings.catch_warnings(record=True): + with tm.assert_produces_warning(FutureWarning): # Deprecated - see GH20239 assert mi.get_duplicates().equals(MultiIndex.from_arrays([[], []])) @@ -257,7 +256,7 @@ def test_get_duplicates(): assert len(mi) == (n + 1) * (m + 1) assert not mi.has_duplicates - with warnings.catch_warnings(record=True): + with tm.assert_produces_warning(FutureWarning): # Deprecated - see GH20239 assert mi.get_duplicates().equals(MultiIndex.from_arrays( [[], []])) diff --git a/pandas/tests/indexes/test_base.py b/pandas/tests/indexes/test_base.py index 755b3cc7f1dca..8c3eb27954c69 100644 --- a/pandas/tests/indexes/test_base.py +++ b/pandas/tests/indexes/test_base.py @@ -714,6 +714,8 @@ def test_empty_fancy_raises(self, attr): pytest.raises(IndexError, index.__getitem__, empty_farr) @pytest.mark.parametrize("itm", [101, 'no_int']) + # FutureWarning from non-tuple sequence of nd indexing + @pytest.mark.filterwarnings("ignore::FutureWarning") def test_getitem_error(self, indices, itm): with pytest.raises(IndexError): indices[itm] diff --git a/pandas/tests/indexes/timedeltas/test_timedelta.py b/pandas/tests/indexes/timedeltas/test_timedelta.py index d7745ffd94cd9..c329d8d15d729 100644 --- a/pandas/tests/indexes/timedeltas/test_timedelta.py +++ b/pandas/tests/indexes/timedeltas/test_timedelta.py @@ -1,5 +1,3 @@ -import warnings - import pytest import numpy as np @@ -147,7 +145,7 @@ def test_get_duplicates(self): idx = TimedeltaIndex(['1 day', '2 day', '2 day', '3 day', '3day', '4day']) - with warnings.catch_warnings(record=True): + with tm.assert_produces_warning(FutureWarning): # Deprecated - see GH20239 result = idx.get_duplicates() diff --git a/pandas/tests/indexing/test_chaining_and_caching.py b/pandas/tests/indexing/test_chaining_and_caching.py index 0e396a3248e3f..a7e55cdf9936e 100644 --- a/pandas/tests/indexing/test_chaining_and_caching.py +++ b/pandas/tests/indexing/test_chaining_and_caching.py @@ -1,5 +1,3 @@ -from warnings import catch_warnings - import pytest import numpy as np @@ -366,22 +364,22 @@ def check(result, expected): result4 = df['A'].iloc[2] check(result4, expected) + @pytest.mark.filterwarnings("ignore::DeprecationWarning") + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_cache_updating(self): # GH 4939, make sure to update the cache on setitem df = tm.makeDataFrame() df['A'] # cache series - with catch_warnings(record=True): - df.ix["Hello Friend"] = df.ix[0] + df.ix["Hello Friend"] = df.ix[0] assert "Hello Friend" in df['A'].index assert "Hello Friend" in df['B'].index - with catch_warnings(record=True): - panel = tm.makePanel() - panel.ix[0] # get first item into cache - panel.ix[:, :, 'A+1'] = panel.ix[:, :, 'A'] + 1 - assert "A+1" in panel.ix[0].columns - assert "A+1" in panel.ix[1].columns + panel = tm.makePanel() + panel.ix[0] # get first item into cache + panel.ix[:, :, 'A+1'] = panel.ix[:, :, 'A'] + 1 + assert "A+1" in panel.ix[0].columns + assert "A+1" in panel.ix[1].columns # 5216 # make sure that we don't try to set a dead cache diff --git a/pandas/tests/indexing/test_floats.py b/pandas/tests/indexing/test_floats.py index ba1f1de21871f..3773b432135b9 100644 --- a/pandas/tests/indexing/test_floats.py +++ b/pandas/tests/indexing/test_floats.py @@ -10,6 +10,9 @@ import pandas.util.testing as tm +ignore_ix = pytest.mark.filterwarnings("ignore:\\n.ix:DeprecationWarning") + + class TestFloatIndexers(object): def check(self, result, original, indexer, getitem): @@ -57,6 +60,7 @@ def f(): s.iloc[3.0] = 0 pytest.raises(TypeError, f) + @ignore_ix def test_scalar_non_numeric(self): # GH 4892 @@ -145,6 +149,7 @@ def f(): s[3] pytest.raises(TypeError, lambda: s[3.0]) + @ignore_ix def test_scalar_with_mixed(self): s2 = Series([1, 2, 3], index=['a', 'b', 'c']) @@ -202,6 +207,7 @@ def f(): expected = 3 assert result == expected + @ignore_ix def test_scalar_integer(self): # test how scalar float indexers work on int indexes @@ -254,6 +260,7 @@ def compare(x, y): # coerce to equal int assert 3.0 in s + @ignore_ix def test_scalar_float(self): # scalar float indexers work on a float index @@ -269,8 +276,7 @@ def test_scalar_float(self): (lambda x: x, True)]: # getting - with catch_warnings(record=True): - result = idxr(s)[indexer] + result = idxr(s)[indexer] self.check(result, s, 3, getitem) # setting @@ -305,6 +311,7 @@ def g(): s2.iloc[3.0] = 0 pytest.raises(TypeError, g) + @ignore_ix def test_slice_non_numeric(self): # GH 4892 @@ -356,6 +363,7 @@ def f(): idxr(s)[l] = 0 pytest.raises(TypeError, f) + @ignore_ix def test_slice_integer(self): # same as above, but for Integer based indexes @@ -483,6 +491,7 @@ def f(): pytest.raises(TypeError, f) + @ignore_ix def test_slice_integer_frame_getitem(self): # similar to above, but on the getitem dim (of a DataFrame) @@ -554,6 +563,7 @@ def f(): with catch_warnings(record=True): f(lambda x: x.ix) + @ignore_ix def test_slice_float(self): # same as above, but for floats diff --git a/pandas/tests/indexing/test_indexing_slow.py b/pandas/tests/indexing/test_indexing_slow.py index f4d581f450363..61e5fdd7b9562 100644 --- a/pandas/tests/indexing/test_indexing_slow.py +++ b/pandas/tests/indexing/test_indexing_slow.py @@ -12,6 +12,7 @@ class TestIndexingSlow(object): @pytest.mark.slow + @pytest.mark.filterwarnings("ignore::pandas.errors.PerformanceWarning") def test_multiindex_get_loc(self): # GH7724, GH2646 with warnings.catch_warnings(record=True): diff --git a/pandas/tests/indexing/test_ix.py b/pandas/tests/indexing/test_ix.py index c84576c984525..04d0e04b5651e 100644 --- a/pandas/tests/indexing/test_ix.py +++ b/pandas/tests/indexing/test_ix.py @@ -14,15 +14,17 @@ from pandas.errors import PerformanceWarning -class TestIX(object): +def test_ix_deprecation(): + # GH 15114 + + df = DataFrame({'A': [1, 2, 3]}) + with tm.assert_produces_warning(DeprecationWarning, + check_stacklevel=False): + df.ix[1, 'A'] - def test_ix_deprecation(self): - # GH 15114 - df = DataFrame({'A': [1, 2, 3]}) - with tm.assert_produces_warning(DeprecationWarning, - check_stacklevel=False): - df.ix[1, 'A'] +@pytest.mark.filterwarnings("ignore:\\n.ix:DeprecationWarning") +class TestIX(object): def test_ix_loc_setitem_consistency(self): diff --git a/pandas/tests/indexing/test_multiindex.py b/pandas/tests/indexing/test_multiindex.py index d2c4c8f5e149b..9e66dfad3ddc7 100644 --- a/pandas/tests/indexing/test_multiindex.py +++ b/pandas/tests/indexing/test_multiindex.py @@ -9,6 +9,7 @@ from pandas.tests.indexing.common import _mklbl +@pytest.mark.filterwarnings("ignore:\\n.ix:DeprecationWarning") class TestMultiIndexBasic(object): def test_iloc_getitem_multiindex2(self): @@ -1232,101 +1233,99 @@ def f(): tm.assert_frame_equal(df, expected) +@pytest.mark.filterwarnings('ignore:\\nPanel:FutureWarning') class TestMultiIndexPanel(object): def test_iloc_getitem_panel_multiindex(self): - with catch_warnings(record=True): + # GH 7199 + # Panel with multi-index + multi_index = MultiIndex.from_tuples([('ONE', 'one'), + ('TWO', 'two'), + ('THREE', 'three')], + names=['UPPER', 'lower']) + + simple_index = [x[0] for x in multi_index] + wd1 = Panel(items=['First', 'Second'], + major_axis=['a', 'b', 'c', 'd'], + minor_axis=multi_index) + + wd2 = Panel(items=['First', 'Second'], + major_axis=['a', 'b', 'c', 'd'], + minor_axis=simple_index) + + expected1 = wd1['First'].iloc[[True, True, True, False], [0, 2]] + result1 = wd1.iloc[0, [True, True, True, False], [0, 2]] # WRONG + tm.assert_frame_equal(result1, expected1) + + expected2 = wd2['First'].iloc[[True, True, True, False], [0, 2]] + result2 = wd2.iloc[0, [True, True, True, False], [0, 2]] + tm.assert_frame_equal(result2, expected2) + + expected1 = DataFrame(index=['a'], columns=multi_index, + dtype='float64') + result1 = wd1.iloc[0, [0], [0, 1, 2]] + tm.assert_frame_equal(result1, expected1) + + expected2 = DataFrame(index=['a'], columns=simple_index, + dtype='float64') + result2 = wd2.iloc[0, [0], [0, 1, 2]] + tm.assert_frame_equal(result2, expected2) + + # GH 7516 + mi = MultiIndex.from_tuples([(0, 'x'), (1, 'y'), (2, 'z')]) + p = Panel(np.arange(3 * 3 * 3, dtype='int64').reshape(3, 3, 3), + items=['a', 'b', 'c'], major_axis=mi, + minor_axis=['u', 'v', 'w']) + result = p.iloc[:, 1, 0] + expected = Series([3, 12, 21], index=['a', 'b', 'c'], name='u') + tm.assert_series_equal(result, expected) - # GH 7199 - # Panel with multi-index - multi_index = MultiIndex.from_tuples([('ONE', 'one'), - ('TWO', 'two'), - ('THREE', 'three')], - names=['UPPER', 'lower']) - - simple_index = [x[0] for x in multi_index] - wd1 = Panel(items=['First', 'Second'], - major_axis=['a', 'b', 'c', 'd'], - minor_axis=multi_index) - - wd2 = Panel(items=['First', 'Second'], - major_axis=['a', 'b', 'c', 'd'], - minor_axis=simple_index) - - expected1 = wd1['First'].iloc[[True, True, True, False], [0, 2]] - result1 = wd1.iloc[0, [True, True, True, False], [0, 2]] # WRONG - tm.assert_frame_equal(result1, expected1) - - expected2 = wd2['First'].iloc[[True, True, True, False], [0, 2]] - result2 = wd2.iloc[0, [True, True, True, False], [0, 2]] - tm.assert_frame_equal(result2, expected2) - - expected1 = DataFrame(index=['a'], columns=multi_index, - dtype='float64') - result1 = wd1.iloc[0, [0], [0, 1, 2]] - tm.assert_frame_equal(result1, expected1) - - expected2 = DataFrame(index=['a'], columns=simple_index, - dtype='float64') - result2 = wd2.iloc[0, [0], [0, 1, 2]] - tm.assert_frame_equal(result2, expected2) - - # GH 7516 - mi = MultiIndex.from_tuples([(0, 'x'), (1, 'y'), (2, 'z')]) - p = Panel(np.arange(3 * 3 * 3, dtype='int64').reshape(3, 3, 3), - items=['a', 'b', 'c'], major_axis=mi, - minor_axis=['u', 'v', 'w']) - result = p.iloc[:, 1, 0] - expected = Series([3, 12, 21], index=['a', 'b', 'c'], name='u') - tm.assert_series_equal(result, expected) - - result = p.loc[:, (1, 'y'), 'u'] - tm.assert_series_equal(result, expected) + result = p.loc[:, (1, 'y'), 'u'] + tm.assert_series_equal(result, expected) def test_panel_setitem_with_multiindex(self): - with catch_warnings(record=True): - # 10360 - # failing with a multi-index - arr = np.array([[[1, 2, 3], [0, 0, 0]], - [[0, 0, 0], [0, 0, 0]]], - dtype=np.float64) - - # reg index - axes = dict(items=['A', 'B'], major_axis=[0, 1], - minor_axis=['X', 'Y', 'Z']) - p1 = Panel(0., **axes) - p1.iloc[0, 0, :] = [1, 2, 3] - expected = Panel(arr, **axes) - tm.assert_panel_equal(p1, expected) - - # multi-indexes - axes['items'] = MultiIndex.from_tuples( - [('A', 'a'), ('B', 'b')]) - p2 = Panel(0., **axes) - p2.iloc[0, 0, :] = [1, 2, 3] - expected = Panel(arr, **axes) - tm.assert_panel_equal(p2, expected) - - axes['major_axis'] = MultiIndex.from_tuples( - [('A', 1), ('A', 2)]) - p3 = Panel(0., **axes) - p3.iloc[0, 0, :] = [1, 2, 3] - expected = Panel(arr, **axes) - tm.assert_panel_equal(p3, expected) - - axes['minor_axis'] = MultiIndex.from_product( - [['X'], range(3)]) - p4 = Panel(0., **axes) - p4.iloc[0, 0, :] = [1, 2, 3] - expected = Panel(arr, **axes) - tm.assert_panel_equal(p4, expected) - - arr = np.array( - [[[1, 0, 0], [2, 0, 0]], [[0, 0, 0], [0, 0, 0]]], - dtype=np.float64) - p5 = Panel(0., **axes) - p5.iloc[0, :, 0] = [1, 2] - expected = Panel(arr, **axes) - tm.assert_panel_equal(p5, expected) + # 10360 + # failing with a multi-index + arr = np.array([[[1, 2, 3], [0, 0, 0]], + [[0, 0, 0], [0, 0, 0]]], + dtype=np.float64) + + # reg index + axes = dict(items=['A', 'B'], major_axis=[0, 1], + minor_axis=['X', 'Y', 'Z']) + p1 = Panel(0., **axes) + p1.iloc[0, 0, :] = [1, 2, 3] + expected = Panel(arr, **axes) + tm.assert_panel_equal(p1, expected) + + # multi-indexes + axes['items'] = MultiIndex.from_tuples( + [('A', 'a'), ('B', 'b')]) + p2 = Panel(0., **axes) + p2.iloc[0, 0, :] = [1, 2, 3] + expected = Panel(arr, **axes) + tm.assert_panel_equal(p2, expected) + + axes['major_axis'] = MultiIndex.from_tuples( + [('A', 1), ('A', 2)]) + p3 = Panel(0., **axes) + p3.iloc[0, 0, :] = [1, 2, 3] + expected = Panel(arr, **axes) + tm.assert_panel_equal(p3, expected) + + axes['minor_axis'] = MultiIndex.from_product( + [['X'], range(3)]) + p4 = Panel(0., **axes) + p4.iloc[0, 0, :] = [1, 2, 3] + expected = Panel(arr, **axes) + tm.assert_panel_equal(p4, expected) + + arr = np.array( + [[[1, 0, 0], [2, 0, 0]], [[0, 0, 0], [0, 0, 0]]], + dtype=np.float64) + p5 = Panel(0., **axes) + p5.iloc[0, :, 0] = [1, 2] + expected = Panel(arr, **axes) + tm.assert_panel_equal(p5, expected) diff --git a/pandas/tests/indexing/test_panel.py b/pandas/tests/indexing/test_panel.py index 1085e2a61be48..2cd05b5779f30 100644 --- a/pandas/tests/indexing/test_panel.py +++ b/pandas/tests/indexing/test_panel.py @@ -6,6 +6,7 @@ from pandas import Panel, date_range, DataFrame +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class TestPanel(object): def test_iloc_getitem_panel(self): @@ -110,6 +111,7 @@ def test_iloc_panel_issue(self): assert p.iloc[1, :3, 1].shape == (3, ) assert p.iloc[:3, 1, 1].shape == (3, ) + @pytest.mark.filterwarnings("ignore:\\n.ix:DeprecationWarning") def test_panel_getitem(self): with catch_warnings(record=True): diff --git a/pandas/tests/indexing/test_partial.py b/pandas/tests/indexing/test_partial.py index 3c7a7f070805d..5910f462cb3df 100644 --- a/pandas/tests/indexing/test_partial.py +++ b/pandas/tests/indexing/test_partial.py @@ -16,6 +16,8 @@ class TestPartialSetting(object): + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") + @pytest.mark.filterwarnings("ignore:\\n.ix:DeprecationWarning") def test_partial_setting(self): # GH2578, allow ix and friends to partially set @@ -404,6 +406,7 @@ def test_series_partial_set_with_name(self): result = ser.iloc[[1, 1, 0, 0]] tm.assert_series_equal(result, expected, check_index_type=True) + @pytest.mark.filterwarnings("ignore:\\n.ix") def test_partial_set_invalid(self): # GH 4940 diff --git a/pandas/tests/io/formats/test_to_excel.py b/pandas/tests/io/formats/test_to_excel.py index 9fc16c43f5c1d..7d54f93c9831e 100644 --- a/pandas/tests/io/formats/test_to_excel.py +++ b/pandas/tests/io/formats/test_to_excel.py @@ -6,8 +6,8 @@ import pytest import pandas.util.testing as tm -from warnings import catch_warnings from pandas.io.formats.excel import CSSToExcelConverter +from pandas.io.formats.css import CSSWarning @pytest.mark.parametrize('css,expected', [ @@ -272,6 +272,6 @@ def test_css_to_excel_bad_colors(input_color): "patternType": "solid" } - with catch_warnings(record=True): + with tm.assert_produces_warning(CSSWarning): convert = CSSToExcelConverter() assert expected == convert(css) diff --git a/pandas/tests/io/sas/test_sas7bdat.py b/pandas/tests/io/sas/test_sas7bdat.py index 101ee3e619f5b..2f3503c6c8a4a 100644 --- a/pandas/tests/io/sas/test_sas7bdat.py +++ b/pandas/tests/io/sas/test_sas7bdat.py @@ -9,6 +9,8 @@ import pytest +# https://github.com/cython/cython/issues/1720 +@pytest.mark.filterwarnings("ignore:can't resolve package:ImportWarning") class TestSAS7BDAT(object): @pytest.fixture(autouse=True) diff --git a/pandas/tests/io/test_common.py b/pandas/tests/io/test_common.py index 991b8ee508760..73e29e6eb9a6a 100644 --- a/pandas/tests/io/test_common.py +++ b/pandas/tests/io/test_common.py @@ -44,6 +44,8 @@ def __fspath__(self): HERE = os.path.abspath(os.path.dirname(__file__)) +# https://github.com/cython/cython/issues/1720 +@pytest.mark.filterwarnings("ignore:can't resolve package:ImportWarning") class TestCommonIOCapabilities(object): data1 = """index,A,B,C,D foo,2,3,4,5 diff --git a/pandas/tests/io/test_compression.py b/pandas/tests/io/test_compression.py index 1806ddd2bbcc6..b62a1e6c4933e 100644 --- a/pandas/tests/io/test_compression.py +++ b/pandas/tests/io/test_compression.py @@ -1,5 +1,6 @@ import os import warnings +import contextlib import pytest @@ -8,12 +9,15 @@ import pandas.util.testing as tm +@contextlib.contextmanager def catch_to_csv_depr(): # Catching warnings because Series.to_csv has # been deprecated. Remove this context when # Series.to_csv has been aligned. - return warnings.catch_warnings(record=True) + with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", FutureWarning) + yield @pytest.mark.parametrize('obj', [ diff --git a/pandas/tests/io/test_excel.py b/pandas/tests/io/test_excel.py index 6741645e466f3..544760a022b6d 100644 --- a/pandas/tests/io/test_excel.py +++ b/pandas/tests/io/test_excel.py @@ -2189,6 +2189,7 @@ def test_ExcelWriter_dispatch_raises(self): with tm.assert_raises_regex(ValueError, 'No engine'): ExcelWriter('nothing') + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_register_writer(self): # some awkward mocking to test out dispatch and such actually works called_save = [] diff --git a/pandas/tests/io/test_pytables.py b/pandas/tests/io/test_pytables.py index ddcfcc0842d1a..ea5f1684c0695 100644 --- a/pandas/tests/io/test_pytables.py +++ b/pandas/tests/io/test_pytables.py @@ -2,7 +2,7 @@ import os import tempfile from contextlib import contextmanager -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter from distutils.version import LooseVersion import datetime @@ -40,6 +40,10 @@ LooseVersion('2.2') else 'zlib') +ignore_natural_naming_warning = pytest.mark.filterwarnings( + "ignore:object name:tables.exceptions.NaturalNameWarning" +) + # contextmanager to ensure the file cleanup @@ -139,12 +143,14 @@ def teardown_method(self, method): @pytest.mark.single +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class TestHDFStore(Base): def test_factory_fun(self): path = create_tempfile(self.path) try: - with catch_warnings(record=True): + with tm.assert_produces_warning(FutureWarning, + check_stacklevel=False): with get_store(path) as tbl: raise ValueError('blah') except ValueError: @@ -153,11 +159,13 @@ def test_factory_fun(self): safe_remove(path) try: - with catch_warnings(record=True): + with tm.assert_produces_warning(FutureWarning, + check_stacklevel=False): with get_store(path) as tbl: tbl['a'] = tm.makeDataFrame() - with catch_warnings(record=True): + with tm.assert_produces_warning(FutureWarning, + check_stacklevel=False): with get_store(path) as tbl: assert len(tbl) == 1 assert type(tbl['a']) == DataFrame @@ -425,8 +433,8 @@ def test_repr(self): df.loc[3:6, ['obj1']] = np.nan df = df._consolidate()._convert(datetime=True) - # PerformanceWarning with catch_warnings(record=True): + simplefilter("ignore", pd.errors.PerformanceWarning) store['df'] = df # make a random group in hdf space @@ -446,6 +454,7 @@ def test_repr(self): repr(s) str(s) + @ignore_natural_naming_warning def test_contains(self): with ensure_clean_store(self.path) as store: @@ -912,11 +921,15 @@ def test_put_mixed_type(self): # PerformanceWarning with catch_warnings(record=True): + simplefilter("ignore", pd.errors.PerformanceWarning) store.put('df', df) expected = store.get('df') tm.assert_frame_equal(expected, df) + @pytest.mark.filterwarnings( + "ignore:object name:tables.exceptions.NaturalNameWarning" + ) def test_append(self): with ensure_clean_store(self.path) as store: @@ -1075,6 +1088,7 @@ def check(format, index): # PerformanceWarning with catch_warnings(record=True): + simplefilter("ignore", pd.errors.PerformanceWarning) check('fixed', index) @pytest.mark.skipif(not is_platform_little_endian(), @@ -1355,6 +1369,7 @@ def test_append_with_strings(self): with ensure_clean_store(self.path) as store: with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) wp = tm.makePanel() wp2 = wp.rename_axis( {x: "%s_extra" % x for x in wp.minor_axis}, axis=2) @@ -2553,6 +2568,7 @@ def test_terms(self): with ensure_clean_store(self.path) as store: with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) wp = tm.makePanel() wpneg = Panel.fromDict({-1: tm.makeDataFrame(), @@ -2758,8 +2774,10 @@ def test_tuple_index(self): DF = DataFrame(data, index=idx, columns=col) with catch_warnings(record=True): + simplefilter("ignore", pd.errors.PerformanceWarning) self._check_roundtrip(DF, tm.assert_frame_equal) + @pytest.mark.filterwarnings("ignore::pandas.errors.PerformanceWarning") def test_index_types(self): with catch_warnings(record=True): @@ -2988,6 +3006,9 @@ def test_wide(self): wp = tm.makePanel() self._check_roundtrip(wp, assert_panel_equal) + @pytest.mark.filterwarnings( + "ignore:\\nduplicate:pandas.io.pytables.DuplicateWarning" + ) def test_select_with_dups(self): # single dtypes @@ -3047,6 +3068,9 @@ def test_select_with_dups(self): result = store.select('df', columns=['B', 'A']) assert_frame_equal(result, expected, by_blocks=True) + @pytest.mark.filterwarnings( + "ignore:\\nduplicate:pandas.io.pytables.DuplicateWarning" + ) def test_wide_table_dups(self): with ensure_clean_store(self.path) as store: with catch_warnings(record=True): @@ -3589,6 +3613,9 @@ def test_select_iterator_many_empty_frames(self): # should be [] assert len(results) == 0 + @pytest.mark.filterwarnings( + "ignore:\\nthe :pandas.io.pytables.AttributeConflictWarning" + ) def test_retain_index_attributes(self): # GH 3499, losing frequency info on index recreation @@ -3631,6 +3658,9 @@ def test_retain_index_attributes(self): freq='D')))) store.append('df2', df3) + @pytest.mark.filterwarnings( + "ignore:\\nthe :pandas.io.pytables.AttributeConflictWarning" + ) def test_retain_index_attributes2(self): with ensure_clean_path(self.path) as path: @@ -4533,7 +4563,8 @@ def test_legacy_table_read(self, datapath): datapath('io', 'data', 'legacy_hdf', 'legacy_table.h5'), mode='r') as store: - with catch_warnings(record=True): + with catch_warnings(): + simplefilter("ignore", pd.io.pytables.IncompatibilityWarning) store.select('df1') store.select('df2') store.select('wp1') @@ -4665,6 +4696,7 @@ def test_unicode_index(self): # PerformanceWarning with catch_warnings(record=True): + simplefilter("ignore", pd.errors.PerformanceWarning) s = Series(np.random.randn(len(unicode_values)), unicode_values) self._check_roundtrip(s, tm.assert_series_equal) @@ -4933,6 +4965,7 @@ def test_columns_multiindex_modified(self): df_loaded = read_hdf(path, 'df', columns=cols2load) # noqa assert cols2load_original == cols2load + @ignore_natural_naming_warning def test_to_hdf_with_object_column_names(self): # GH9057 # Writing HDF5 table format should only work for string-like @@ -5277,6 +5310,7 @@ def test_complex_mixed_table(self): reread = read_hdf(path, 'df') assert_frame_equal(df, reread) + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_complex_across_dimensions_fixed(self): with catch_warnings(record=True): complex128 = np.array( @@ -5294,6 +5328,7 @@ def test_complex_across_dimensions_fixed(self): reread = read_hdf(path, 'obj') comp(obj, reread) + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_complex_across_dimensions(self): complex128 = np.array([1.0 + 1.0j, 1.0 + 1.0j, 1.0 + 1.0j, 1.0 + 1.0j]) s = Series(complex128, index=list('abcd')) diff --git a/pandas/tests/io/test_sql.py b/pandas/tests/io/test_sql.py index 824e5a2b23df3..d5b4ce4755698 100644 --- a/pandas/tests/io/test_sql.py +++ b/pandas/tests/io/test_sql.py @@ -18,7 +18,6 @@ """ from __future__ import print_function -from warnings import catch_warnings import pytest import sqlite3 import csv @@ -574,11 +573,11 @@ def test_to_sql_series(self): s2 = sql.read_sql_query("SELECT * FROM test_series", self.conn) tm.assert_frame_equal(s.to_frame(), s2) + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_to_sql_panel(self): - with catch_warnings(record=True): - panel = tm.makePanel() - pytest.raises(NotImplementedError, sql.to_sql, panel, - 'test_panel', self.conn) + panel = tm.makePanel() + pytest.raises(NotImplementedError, sql.to_sql, panel, + 'test_panel', self.conn) def test_roundtrip(self): sql.to_sql(self.test_frame1, 'test_frame_roundtrip', diff --git a/pandas/tests/io/test_stata.py b/pandas/tests/io/test_stata.py index cfe47cae7e5e1..303d3a3d8dbe9 100644 --- a/pandas/tests/io/test_stata.py +++ b/pandas/tests/io/test_stata.py @@ -120,7 +120,7 @@ def test_read_empty_dta(self, version): def test_data_method(self): # Minimal testing of legacy data method with StataReader(self.dta1_114) as rdr: - with warnings.catch_warnings(record=True) as w: # noqa + with tm.assert_produces_warning(UserWarning): parsed_114_data = rdr.data() with StataReader(self.dta1_114) as rdr: @@ -388,10 +388,8 @@ def test_read_write_dta11(self): formatted = formatted.astype(np.int32) with tm.ensure_clean() as path: - with warnings.catch_warnings(record=True) as w: + with tm.assert_produces_warning(pd.io.stata.InvalidColumnName): original.to_stata(path, None) - # should get a warning for that format. - assert len(w) == 1 written_and_read_again = self.read_dta(path) tm.assert_frame_equal( @@ -871,6 +869,9 @@ def test_drop_column(self): read_stata(self.dta15_117, convert_dates=True, columns=columns) @pytest.mark.parametrize('version', [114, 117]) + @pytest.mark.filterwarnings( + "ignore:\\nStata value:pandas.io.stata.ValueLabelTypeMismatch" + ) def test_categorical_writing(self, version): original = DataFrame.from_records( [ @@ -901,12 +902,10 @@ def test_categorical_writing(self, version): expected.index.name = 'index' with tm.ensure_clean() as path: - with warnings.catch_warnings(record=True) as w: # noqa - # Silence warnings - original.to_stata(path, version=version) - written_and_read_again = self.read_dta(path) - res = written_and_read_again.set_index('index') - tm.assert_frame_equal(res, expected, check_categorical=False) + original.to_stata(path, version=version) + written_and_read_again = self.read_dta(path) + res = written_and_read_again.set_index('index') + tm.assert_frame_equal(res, expected, check_categorical=False) def test_categorical_warnings_and_errors(self): # Warning for non-string labels @@ -933,10 +932,9 @@ def test_categorical_warnings_and_errors(self): original = pd.concat([original[col].astype('category') for col in original], axis=1) - with warnings.catch_warnings(record=True) as w: + with tm.assert_produces_warning(pd.io.stata.ValueLabelTypeMismatch): original.to_stata(path) # should get a warning for mixed content - assert len(w) == 1 @pytest.mark.parametrize('version', [114, 117]) def test_categorical_with_stata_missing_values(self, version): @@ -1445,7 +1443,7 @@ def test_convert_strl_name_swap(self): columns=['long1' * 10, 'long', 1]) original.index.name = 'index' - with warnings.catch_warnings(record=True) as w: # noqa + with tm.assert_produces_warning(pd.io.stata.InvalidColumnName): with tm.ensure_clean() as path: original.to_stata(path, convert_strl=['long', 1], version=117) reread = self.read_dta(path) diff --git a/pandas/tests/plotting/test_frame.py b/pandas/tests/plotting/test_frame.py index 47a93ba82d77b..cfd1e0b801b51 100644 --- a/pandas/tests/plotting/test_frame.py +++ b/pandas/tests/plotting/test_frame.py @@ -632,6 +632,7 @@ def test_subplots_multiple_axes(self): # TestDataFrameGroupByPlots.test_grouped_box_multiple_axes fig, axes = self.plt.subplots(2, 2) with warnings.catch_warnings(): + warnings.simplefilter("ignore", UserWarning) df = DataFrame(np.random.rand(10, 4), index=list(string.ascii_letters[:10])) @@ -1578,7 +1579,11 @@ def test_hist_df(self): self._check_ticks_props(axes, xrot=40, yrot=0) tm.close() - ax = series.plot.hist(normed=True, cumulative=True, bins=4) + if plotting._compat._mpl_ge_2_2_0: + kwargs = {"density": True} + else: + kwargs = {"normed": True} + ax = series.plot.hist(cumulative=True, bins=4, **kwargs) # height of last bin (index 5) must be 1.0 rects = [x for x in ax.get_children() if isinstance(x, Rectangle)] tm.assert_almost_equal(rects[-1].get_height(), 1.0) @@ -1854,7 +1859,7 @@ def test_line_colors(self): tm.close() - ax2 = df.plot(colors=custom_colors) + ax2 = df.plot(color=custom_colors) lines2 = ax2.get_lines() for l1, l2 in zip(ax.get_lines(), lines2): diff --git a/pandas/tests/plotting/test_hist_method.py b/pandas/tests/plotting/test_hist_method.py index 864d39eba29c5..6df1391ec2a9c 100644 --- a/pandas/tests/plotting/test_hist_method.py +++ b/pandas/tests/plotting/test_hist_method.py @@ -12,6 +12,7 @@ from numpy.random import randn from pandas.plotting._core import grouped_hist +from pandas.plotting._compat import _mpl_ge_2_2_0 from pandas.tests.plotting.common import (TestPlotBase, _check_plot_works) @@ -193,7 +194,11 @@ def test_hist_df_legacy(self): tm.close() # make sure kwargs to hist are handled - ax = ser.hist(normed=True, cumulative=True, bins=4) + if _mpl_ge_2_2_0: + kwargs = {"density": True} + else: + kwargs = {"normed": True} + ax = ser.hist(cumulative=True, bins=4, **kwargs) # height of last bin (index 5) must be 1.0 rects = [x for x in ax.get_children() if isinstance(x, Rectangle)] tm.assert_almost_equal(rects[-1].get_height(), 1.0) @@ -279,9 +284,15 @@ def test_grouped_hist_legacy(self): # make sure kwargs to hist are handled xf, yf = 20, 18 xrot, yrot = 30, 40 - axes = grouped_hist(df.A, by=df.C, normed=True, cumulative=True, + + if _mpl_ge_2_2_0: + kwargs = {"density": True} + else: + kwargs = {"normed": True} + + axes = grouped_hist(df.A, by=df.C, cumulative=True, bins=4, xlabelsize=xf, xrot=xrot, - ylabelsize=yf, yrot=yrot) + ylabelsize=yf, yrot=yrot, **kwargs) # height of last bin (index 5) must be 1.0 for ax in axes.ravel(): rects = [x for x in ax.get_children() if isinstance(x, Rectangle)] diff --git a/pandas/tests/plotting/test_misc.py b/pandas/tests/plotting/test_misc.py index e80443954a434..8c84b785c88e4 100644 --- a/pandas/tests/plotting/test_misc.py +++ b/pandas/tests/plotting/test_misc.py @@ -212,6 +212,8 @@ def test_parallel_coordinates(self, iris): with tm.assert_produces_warning(FutureWarning): parallel_coordinates(df, 'Name', colors=colors) + # not sure if this is indicative of a problem + @pytest.mark.filterwarnings("ignore:Attempting to set:UserWarning") def test_parallel_coordinates_with_sorted_labels(self): """ For #15908 """ from pandas.plotting import parallel_coordinates diff --git a/pandas/tests/reshape/merge/test_join.py b/pandas/tests/reshape/merge/test_join.py index 09f511886583c..e965ff7a78a39 100644 --- a/pandas/tests/reshape/merge/test_join.py +++ b/pandas/tests/reshape/merge/test_join.py @@ -19,6 +19,7 @@ a_ = np.array +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class TestJoin(object): def setup_method(self, method): diff --git a/pandas/tests/reshape/test_concat.py b/pandas/tests/reshape/test_concat.py index 762b04cc3bd4f..da43194b95ed3 100644 --- a/pandas/tests/reshape/test_concat.py +++ b/pandas/tests/reshape/test_concat.py @@ -1,4 +1,4 @@ -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter from itertools import combinations import datetime as dt @@ -1465,6 +1465,7 @@ def test_concat_mixed_objs(self): # invalid concatente of mixed dims with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) panel = tm.makePanel() pytest.raises(ValueError, lambda: concat([panel, s1], axis=1)) @@ -1503,59 +1504,61 @@ def test_dtype_coerceion(self): result = concat([df.iloc[[0]], df.iloc[[1]]]) tm.assert_series_equal(result.dtypes, df.dtypes) + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_panel_concat_other_axes(self): - with catch_warnings(record=True): - panel = tm.makePanel() + panel = tm.makePanel() - p1 = panel.iloc[:, :5, :] - p2 = panel.iloc[:, 5:, :] + p1 = panel.iloc[:, :5, :] + p2 = panel.iloc[:, 5:, :] - result = concat([p1, p2], axis=1) - tm.assert_panel_equal(result, panel) + result = concat([p1, p2], axis=1) + tm.assert_panel_equal(result, panel) - p1 = panel.iloc[:, :, :2] - p2 = panel.iloc[:, :, 2:] + p1 = panel.iloc[:, :, :2] + p2 = panel.iloc[:, :, 2:] - result = concat([p1, p2], axis=2) - tm.assert_panel_equal(result, panel) + result = concat([p1, p2], axis=2) + tm.assert_panel_equal(result, panel) - # if things are a bit misbehaved - p1 = panel.iloc[:2, :, :2] - p2 = panel.iloc[:, :, 2:] - p1['ItemC'] = 'baz' + # if things are a bit misbehaved + p1 = panel.iloc[:2, :, :2] + p2 = panel.iloc[:, :, 2:] + p1['ItemC'] = 'baz' - result = concat([p1, p2], axis=2) + result = concat([p1, p2], axis=2) - expected = panel.copy() - expected['ItemC'] = expected['ItemC'].astype('O') - expected.loc['ItemC', :, :2] = 'baz' - tm.assert_panel_equal(result, expected) + expected = panel.copy() + expected['ItemC'] = expected['ItemC'].astype('O') + expected.loc['ItemC', :, :2] = 'baz' + tm.assert_panel_equal(result, expected) + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") + # Panel.rename warning we don't care about + @pytest.mark.filterwarnings("ignore:Using:FutureWarning") def test_panel_concat_buglet(self, sort): - with catch_warnings(record=True): - # #2257 - def make_panel(): - index = 5 - cols = 3 + # #2257 + def make_panel(): + index = 5 + cols = 3 - def df(): - return DataFrame(np.random.randn(index, cols), - index=["I%s" % i for i in range(index)], - columns=["C%s" % i for i in range(cols)]) - return Panel({"Item%s" % x: df() for x in ['A', 'B', 'C']}) + def df(): + return DataFrame(np.random.randn(index, cols), + index=["I%s" % i for i in range(index)], + columns=["C%s" % i for i in range(cols)]) + return Panel({"Item%s" % x: df() for x in ['A', 'B', 'C']}) - panel1 = make_panel() - panel2 = make_panel() + panel1 = make_panel() + panel2 = make_panel() - panel2 = panel2.rename_axis({x: "%s_1" % x - for x in panel2.major_axis}, - axis=1) + panel2 = panel2.rename_axis({x: "%s_1" % x + for x in panel2.major_axis}, + axis=1) - panel3 = panel2.rename_axis(lambda x: '%s_1' % x, axis=1) - panel3 = panel3.rename_axis(lambda x: '%s_1' % x, axis=2) + panel3 = panel2.rename_axis(lambda x: '%s_1' % x, axis=1) + panel3 = panel3.rename_axis(lambda x: '%s_1' % x, axis=2) - # it works! - concat([panel1, panel3], axis=1, verify_integrity=True, sort=sort) + # it works! + concat([panel1, panel3], axis=1, verify_integrity=True, sort=sort) def test_concat_series(self): @@ -2351,30 +2354,30 @@ def test_concat_datetime_timezone(self): tm.assert_frame_equal(result, expected) # GH 13783: Concat after resample - with catch_warnings(record=True): - result = pd.concat([df1.resample('H').mean(), - df2.resample('H').mean()]) - expected = pd.DataFrame({'a': [1, 2, 3] + [np.nan] * 3, - 'b': [np.nan] * 3 + [1, 2, 3]}, - index=idx1.append(idx1)) - tm.assert_frame_equal(result, expected) + result = pd.concat([df1.resample('H').mean(), + df2.resample('H').mean()], sort=True) + expected = pd.DataFrame({'a': [1, 2, 3] + [np.nan] * 3, + 'b': [np.nan] * 3 + [1, 2, 3]}, + index=idx1.append(idx1)) + tm.assert_frame_equal(result, expected) @pytest.mark.parametrize('pdt', [pd.Series, pd.DataFrame, pd.Panel]) @pytest.mark.parametrize('dt', np.sctypes['float']) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_concat_no_unnecessary_upcast(dt, pdt): - with catch_warnings(record=True): - # GH 13247 - dims = pdt().ndim - dfs = [pdt(np.array([1], dtype=dt, ndmin=dims)), - pdt(np.array([np.nan], dtype=dt, ndmin=dims)), - pdt(np.array([5], dtype=dt, ndmin=dims))] - x = pd.concat(dfs) - assert x.values.dtype == dt + # GH 13247 + dims = pdt().ndim + dfs = [pdt(np.array([1], dtype=dt, ndmin=dims)), + pdt(np.array([np.nan], dtype=dt, ndmin=dims)), + pdt(np.array([5], dtype=dt, ndmin=dims))] + x = pd.concat(dfs) + assert x.values.dtype == dt @pytest.mark.parametrize('pdt', [pd.Series, pd.DataFrame, pd.Panel]) @pytest.mark.parametrize('dt', np.sctypes['int']) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_concat_will_upcast(dt, pdt): with catch_warnings(record=True): dims = pdt().ndim diff --git a/pandas/tests/reshape/test_reshape.py b/pandas/tests/reshape/test_reshape.py index 3f4ccd7693a8f..ed9ad06a9b371 100644 --- a/pandas/tests/reshape/test_reshape.py +++ b/pandas/tests/reshape/test_reshape.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- # pylint: disable-msg=W0612,E1101 -from warnings import catch_warnings import pytest from collections import OrderedDict @@ -501,12 +500,12 @@ def test_get_dummies_duplicate_columns(self, df): class TestCategoricalReshape(object): + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_reshaping_panel_categorical(self): - with catch_warnings(record=True): - p = tm.makePanel() - p['str'] = 'foo' - df = p.to_frame() + p = tm.makePanel() + p['str'] = 'foo' + df = p.to_frame() df['category'] = df['str'].astype('category') result = df['category'].unstack() diff --git a/pandas/tests/series/indexing/test_datetime.py b/pandas/tests/series/indexing/test_datetime.py index bcea47f42056b..d1f022ef982c0 100644 --- a/pandas/tests/series/indexing/test_datetime.py +++ b/pandas/tests/series/indexing/test_datetime.py @@ -383,6 +383,8 @@ def test_getitem_setitem_periodindex(): assert_series_equal(result, ts) +# FutureWarning from NumPy. +@pytest.mark.filterwarnings("ignore:Using a non-tuple:FutureWarning") def test_getitem_median_slice_bug(): index = date_range('20090415', '20090519', freq='2B') s = Series(np.random.randn(13), index=index) diff --git a/pandas/tests/series/indexing/test_indexing.py b/pandas/tests/series/indexing/test_indexing.py index 25bc394e312a0..aa4f58089a933 100644 --- a/pandas/tests/series/indexing/test_indexing.py +++ b/pandas/tests/series/indexing/test_indexing.py @@ -390,6 +390,8 @@ def test_setslice(test_data): assert sl.index.is_unique +# FutureWarning from NumPy about [slice(None, 5). +@pytest.mark.filterwarnings("ignore:Using a non-tuple:FutureWarning") def test_basic_getitem_setitem_corner(test_data): # invalid tuples, e.g. td.ts[:, None] vs. td.ts[:, 2] with tm.assert_raises_regex(ValueError, 'tuple-index'): diff --git a/pandas/tests/series/test_api.py b/pandas/tests/series/test_api.py index da9b03e81994d..3b82242626c20 100644 --- a/pandas/tests/series/test_api.py +++ b/pandas/tests/series/test_api.py @@ -1,6 +1,7 @@ # coding=utf-8 # pylint: disable-msg=E1101,W0612 from collections import OrderedDict +import warnings import pydoc import pytest @@ -728,8 +729,12 @@ def test_dt_accessor_api_for_categorical(self): func_defs.append(f_def) for func, args, kwargs in func_defs: - res = getattr(c.dt, func)(*args, **kwargs) - exp = getattr(s.dt, func)(*args, **kwargs) + with warnings.catch_warnings(): + if func == 'to_period': + # dropping TZ + warnings.simplefilter("ignore", UserWarning) + res = getattr(c.dt, func)(*args, **kwargs) + exp = getattr(s.dt, func)(*args, **kwargs) if isinstance(res, DataFrame): tm.assert_frame_equal(res, exp) diff --git a/pandas/tests/series/test_constructors.py b/pandas/tests/series/test_constructors.py index d2fbd69a2a08f..e8257939f89ba 100644 --- a/pandas/tests/series/test_constructors.py +++ b/pandas/tests/series/test_constructors.py @@ -957,6 +957,8 @@ def test_constructor_set(self): values = frozenset(values) pytest.raises(TypeError, Series, values) + # https://github.com/pandas-dev/pandas/issues/22698 + @pytest.mark.filterwarnings("ignore:elementwise comparison:FutureWarning") def test_fromDict(self): data = {'a': 0, 'b': 1, 'c': 2, 'd': 3} diff --git a/pandas/tests/series/test_dtypes.py b/pandas/tests/series/test_dtypes.py index dd1b623f0f7ff..7aecaf340a3e0 100644 --- a/pandas/tests/series/test_dtypes.py +++ b/pandas/tests/series/test_dtypes.py @@ -428,8 +428,10 @@ def test_astype_empty_constructor_equality(self, dtype): if dtype not in ('S', 'V'): # poor support (if any) currently with warnings.catch_warnings(record=True): - # Generic timestamp dtypes ('M' and 'm') are deprecated, - # but we test that already in series/test_constructors.py + if dtype in ('M', 'm'): + # Generic timestamp dtypes ('M' and 'm') are deprecated, + # but we test that already in series/test_constructors.py + warnings.simplefilter("ignore", FutureWarning) init_empty = Series([], dtype=dtype) as_type_empty = Series([]).astype(dtype) diff --git a/pandas/tests/sparse/frame/test_frame.py b/pandas/tests/sparse/frame/test_frame.py index be5a1710119ee..03533d9396015 100644 --- a/pandas/tests/sparse/frame/test_frame.py +++ b/pandas/tests/sparse/frame/test_frame.py @@ -3,7 +3,6 @@ import operator import pytest -from warnings import catch_warnings from numpy import nan import numpy as np import pandas as pd @@ -981,26 +980,25 @@ def _check(frame, orig): self._check_all(_check) + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_stack_sparse_frame(self): - with catch_warnings(record=True): + def _check(frame): + dense_frame = frame.to_dense() # noqa - def _check(frame): - dense_frame = frame.to_dense() # noqa + wp = Panel.from_dict({'foo': frame}) + from_dense_lp = wp.to_frame() - wp = Panel.from_dict({'foo': frame}) - from_dense_lp = wp.to_frame() + from_sparse_lp = spf.stack_sparse_frame(frame) - from_sparse_lp = spf.stack_sparse_frame(frame) + tm.assert_numpy_array_equal(from_dense_lp.values, + from_sparse_lp.values) - tm.assert_numpy_array_equal(from_dense_lp.values, - from_sparse_lp.values) + _check(self.frame) + _check(self.iframe) - _check(self.frame) - _check(self.iframe) - - # for now - pytest.raises(Exception, _check, self.zframe) - pytest.raises(Exception, _check, self.fill_frame) + # for now + pytest.raises(Exception, _check, self.zframe) + pytest.raises(Exception, _check, self.fill_frame) def test_transpose(self): diff --git a/pandas/tests/sparse/frame/test_to_from_scipy.py b/pandas/tests/sparse/frame/test_to_from_scipy.py index aef49c84fc2ad..d60c2d423fa9f 100644 --- a/pandas/tests/sparse/frame/test_to_from_scipy.py +++ b/pandas/tests/sparse/frame/test_to_from_scipy.py @@ -12,12 +12,16 @@ scipy = pytest.importorskip('scipy') +ignore_matrix_warning = pytest.mark.filterwarnings( + "ignore:the matrix subclass:PendingDeprecationWarning" +) @pytest.mark.parametrize('index', [None, list('abc')]) # noqa: F811 @pytest.mark.parametrize('columns', [None, list('def')]) @pytest.mark.parametrize('fill_value', [None, 0, np.nan]) @pytest.mark.parametrize('dtype', [bool, int, float, np.uint16]) +@ignore_matrix_warning def test_from_to_scipy(spmatrix, index, columns, fill_value, dtype): # GH 4343 # Make one ndarray and from it one sparse matrix, both to be used for @@ -69,6 +73,8 @@ def test_from_to_scipy(spmatrix, index, columns, fill_value, dtype): @pytest.mark.parametrize('fill_value', [None, 0, np.nan]) # noqa: F811 +@ignore_matrix_warning +@pytest.mark.filterwarnings("ignore:object dtype is not supp:UserWarning") def test_from_to_scipy_object(spmatrix, fill_value): # GH 4343 dtype = object @@ -108,8 +114,7 @@ def test_from_to_scipy_object(spmatrix, fill_value): tm.assert_frame_equal(sdf_obj.to_dense(), expected.to_dense()) # Assert spmatrices equal - with catch_warnings(record=True): - assert dict(sdf.to_coo().todok()) == dict(spm.todok()) + assert dict(sdf.to_coo().todok()) == dict(spm.todok()) # Ensure dtype is preserved if possible res_dtype = object @@ -117,6 +122,7 @@ def test_from_to_scipy_object(spmatrix, fill_value): assert sdf.to_coo().dtype == res_dtype +@ignore_matrix_warning def test_from_scipy_correct_ordering(spmatrix): # GH 16179 arr = np.arange(1, 5).reshape(2, 2) @@ -135,6 +141,7 @@ def test_from_scipy_correct_ordering(spmatrix): tm.assert_frame_equal(sdf.to_dense(), expected.to_dense()) +@ignore_matrix_warning def test_from_scipy_fillna(spmatrix): # GH 16112 arr = np.eye(3) diff --git a/pandas/tests/sparse/series/test_series.py b/pandas/tests/sparse/series/test_series.py index 921c30234660f..5b50606bf37bd 100644 --- a/pandas/tests/sparse/series/test_series.py +++ b/pandas/tests/sparse/series/test_series.py @@ -1022,6 +1022,9 @@ def test_round_trip_preserve_multiindex_names(self): @td.skip_if_no_scipy +@pytest.mark.filterwarnings( + "ignore:the matrix subclass:PendingDeprecationWarning" +) class TestSparseSeriesScipyInteraction(object): # Issue 8048: add SparseSeries coo methods diff --git a/pandas/tests/test_downstream.py b/pandas/tests/test_downstream.py index 70973801d7cda..fe1f41e7eb606 100644 --- a/pandas/tests/test_downstream.py +++ b/pandas/tests/test_downstream.py @@ -62,6 +62,8 @@ def test_oo_optimizable(): @tm.network +# Cython import warning +@pytest.mark.filterwarnings("ignore::ImportWarning") def test_statsmodels(): statsmodels = import_module('statsmodels') # noqa @@ -71,6 +73,8 @@ def test_statsmodels(): smf.ols('Lottery ~ Literacy + np.log(Pop1831)', data=df).fit() +# Cython import warning +@pytest.mark.filterwarnings("ignore::ImportWarning") def test_scikit_learn(df): sklearn = import_module('sklearn') # noqa @@ -82,7 +86,9 @@ def test_scikit_learn(df): clf.predict(digits.data[-1:]) +# Cython import warning and traitlets @tm.network +@pytest.mark.filterwarnings("ignore") def test_seaborn(): seaborn = import_module('seaborn') @@ -104,6 +110,8 @@ def test_pandas_datareader(): 'F', 'quandl', '2017-01-01', '2017-02-01') +# importing from pandas, Cython mport warning +@pytest.mark.filterwarnings("ignore") def test_geopandas(): geopandas = import_module('geopandas') # noqa @@ -111,6 +119,8 @@ def test_geopandas(): assert geopandas.read_file(fp) is not None +# Cython import warning +@pytest.mark.filterwarnings("ignore") def test_pyarrow(df): pyarrow = import_module('pyarrow') # noqa diff --git a/pandas/tests/test_errors.py b/pandas/tests/test_errors.py index 7f9cddf9859a5..76e003c463e7d 100644 --- a/pandas/tests/test_errors.py +++ b/pandas/tests/test_errors.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import pytest -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter import pandas # noqa import pandas as pd from pandas.errors import AbstractMethodError @@ -48,6 +48,7 @@ def test_error_rename(): pass with catch_warnings(record=True): + simplefilter("ignore") try: raise ParserError() except pd.parser.CParserError: diff --git a/pandas/tests/test_multilevel.py b/pandas/tests/test_multilevel.py index dcfeab55f94fc..6b255a2e8d0ea 100644 --- a/pandas/tests/test_multilevel.py +++ b/pandas/tests/test_multilevel.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # pylint: disable-msg=W0612,E1101,W0141 -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter import datetime import itertools import pytest @@ -193,6 +193,7 @@ def test_reindex(self): tm.assert_frame_equal(reindexed, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) reindexed = self.frame.ix[[('foo', 'one'), ('bar', 'one')]] tm.assert_frame_equal(reindexed, expected) @@ -205,6 +206,7 @@ def test_reindex_preserve_levels(self): assert chunk.index is new_index with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) chunk = self.ymd.ix[new_index] assert chunk.index is new_index @@ -268,6 +270,7 @@ def test_series_getitem(self): tm.assert_series_equal(result, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = s.ix[[(2000, 3, 10), (2000, 3, 13)]] tm.assert_series_equal(result, expected) @@ -347,6 +350,7 @@ def test_frame_getitem_setitem_multislice(self): tm.assert_series_equal(df['value'], result) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[:, 'value'] tm.assert_series_equal(df['value'], result) @@ -422,6 +426,7 @@ def test_getitem_tuple_plus_slice(self): expected = idf.loc[0, 0] expected2 = idf.xs((0, 0)) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) expected3 = idf.ix[0, 0] tm.assert_series_equal(result, expected) @@ -675,6 +680,7 @@ def test_frame_setitem_ix(self): assert df.loc[('bar', 'two'), 1] == 7 with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) df = self.frame.copy() df.columns = lrange(3) df.ix[('bar', 'two'), 1] = 7 @@ -704,6 +710,7 @@ def test_getitem_partial_column_select(self): tm.assert_frame_equal(result, expected) with catch_warnings(record=True): + simplefilter("ignore", DeprecationWarning) result = df.ix[('a', 'y'), [1, 0]] tm.assert_frame_equal(result, expected) @@ -1285,6 +1292,7 @@ def test_swaplevel(self): def test_swaplevel_panel(self): with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) panel = Panel({'ItemA': self.frame, 'ItemB': self.frame * 2}) expected = panel.copy() expected.major_axis = expected.major_axis.swaplevel(0, 1) diff --git a/pandas/tests/test_nanops.py b/pandas/tests/test_nanops.py index a70ee80aee180..b6c2c65fb6dce 100644 --- a/pandas/tests/test_nanops.py +++ b/pandas/tests/test_nanops.py @@ -359,6 +359,7 @@ def test_returned_dtype(self): def test_nanmedian(self): with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) self.check_funs(nanops.nanmedian, np.median, allow_complex=False, allow_str=False, allow_date=False, allow_tdelta=True, allow_obj='convert') @@ -394,12 +395,14 @@ def _minmax_wrap(self, value, axis=None, func=None): def test_nanmin(self): with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) func = partial(self._minmax_wrap, func=np.min) self.check_funs(nanops.nanmin, func, allow_str=False, allow_obj=False) def test_nanmax(self): - with warnings.catch_warnings(record=True): + with warnings.catch_warnings(): + warnings.simplefilter("ignore", RuntimeWarning) func = partial(self._minmax_wrap, func=np.max) self.check_funs(nanops.nanmax, func, allow_str=False, allow_obj=False) @@ -417,6 +420,7 @@ def _argminmax_wrap(self, value, axis=None, func=None): def test_nanargmax(self): with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) func = partial(self._argminmax_wrap, func=np.argmax) self.check_funs(nanops.nanargmax, func, allow_str=False, allow_obj=False, @@ -424,6 +428,7 @@ def test_nanargmax(self): def test_nanargmin(self): with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) func = partial(self._argminmax_wrap, func=np.argmin) self.check_funs(nanops.nanargmin, func, allow_str=False, allow_obj=False) diff --git a/pandas/tests/test_panel.py b/pandas/tests/test_panel.py index b968c52ce3dfd..17c88df6beb18 100644 --- a/pandas/tests/test_panel.py +++ b/pandas/tests/test_panel.py @@ -2685,8 +2685,8 @@ def test_panel_index(): tm.assert_index_equal(index, expected) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_panel_np_all(): - with catch_warnings(record=True): - wp = Panel({"A": DataFrame({'b': [1, 2]})}) + wp = Panel({"A": DataFrame({'b': [1, 2]})}) result = np.all(wp) assert result == np.bool_(True) diff --git a/pandas/tests/test_resample.py b/pandas/tests/test_resample.py index 669fa9742a705..377253574d2c1 100644 --- a/pandas/tests/test_resample.py +++ b/pandas/tests/test_resample.py @@ -1,6 +1,6 @@ # pylint: disable=E1101 -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter from datetime import datetime, timedelta from functools import partial from textwrap import dedent @@ -1463,6 +1463,7 @@ def test_resample_panel(self): n = len(rng) with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) panel = Panel(np.random.randn(3, n, 5), items=['one', 'two', 'three'], major_axis=rng, @@ -1485,6 +1486,7 @@ def p_apply(panel, f): lambda x: x.resample('M', axis=1).mean()) tm.assert_panel_equal(result, expected) + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_resample_panel_numpy(self): rng = date_range('1/1/2000', '6/30/2000') n = len(rng) @@ -3237,25 +3239,25 @@ def test_apply_iteration(self): result = grouped.apply(f) tm.assert_index_equal(result.index, df.index) + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_panel_aggregation(self): ind = pd.date_range('1/1/2000', periods=100) data = np.random.randn(2, len(ind), 4) - with catch_warnings(record=True): - wp = Panel(data, items=['Item1', 'Item2'], major_axis=ind, - minor_axis=['A', 'B', 'C', 'D']) + wp = Panel(data, items=['Item1', 'Item2'], major_axis=ind, + minor_axis=['A', 'B', 'C', 'D']) - tg = TimeGrouper('M', axis=1) - _, grouper, _ = tg._get_grouper(wp) - bingrouped = wp.groupby(grouper) - binagg = bingrouped.mean() + tg = TimeGrouper('M', axis=1) + _, grouper, _ = tg._get_grouper(wp) + bingrouped = wp.groupby(grouper) + binagg = bingrouped.mean() - def f(x): - assert (isinstance(x, Panel)) - return x.mean(1) + def f(x): + assert (isinstance(x, Panel)) + return x.mean(1) - result = bingrouped.agg(f) - tm.assert_panel_equal(result, binagg) + result = bingrouped.agg(f) + tm.assert_panel_equal(result, binagg) def test_fails_on_no_datetime_index(self): index_names = ('Int64Index', 'Index', 'Float64Index', 'MultiIndex') diff --git a/pandas/tests/test_window.py b/pandas/tests/test_window.py index ec6d83062c8b0..052bfd2b858fb 100644 --- a/pandas/tests/test_window.py +++ b/pandas/tests/test_window.py @@ -153,6 +153,8 @@ def test_agg(self): tm.assert_frame_equal(result, expected) with catch_warnings(record=True): + # using a dict with renaming + warnings.simplefilter("ignore", FutureWarning) result = r.aggregate({'A': {'mean': 'mean', 'sum': 'sum'}}) expected = concat([a_mean, a_sum], axis=1) expected.columns = pd.MultiIndex.from_tuples([('A', 'mean'), @@ -160,6 +162,7 @@ def test_agg(self): tm.assert_frame_equal(result, expected, check_like=True) with catch_warnings(record=True): + warnings.simplefilter("ignore", FutureWarning) result = r.aggregate({'A': {'mean': 'mean', 'sum': 'sum'}, 'B': {'mean2': 'mean', @@ -223,11 +226,13 @@ def f(): expected.columns = pd.MultiIndex.from_tuples([('ra', 'mean'), ( 'ra', 'std'), ('rb', 'mean'), ('rb', 'std')]) with catch_warnings(record=True): + warnings.simplefilter("ignore", FutureWarning) result = r[['A', 'B']].agg({'A': {'ra': ['mean', 'std']}, 'B': {'rb': ['mean', 'std']}}) tm.assert_frame_equal(result, expected, check_like=True) with catch_warnings(record=True): + warnings.simplefilter("ignore", FutureWarning) result = r.agg({'A': {'ra': ['mean', 'std']}, 'B': {'rb': ['mean', 'std']}}) expected.columns = pd.MultiIndex.from_tuples([('A', 'ra', 'mean'), ( @@ -278,6 +283,7 @@ def test_count_nonnumeric_types(self): tm.assert_frame_equal(result, expected) @td.skip_if_no_scipy + @pytest.mark.filterwarnings("ignore:can't resolve:ImportWarning") def test_window_with_args(self): # make sure that we are aggregating window functions correctly with arg r = Series(np.random.randn(100)).rolling(window=10, min_periods=1, @@ -309,6 +315,7 @@ def test_preserve_metadata(self): assert s3.name == 'foo' +@pytest.mark.filterwarnings("ignore:can't resolve package:ImportWarning") class TestWindow(Base): def setup_method(self, method): @@ -940,6 +947,7 @@ def _create_data(self): "datetime64[ns, UTC] is not supported ATM") +@pytest.mark.filterwarnings("ignore:can't resolve package:ImportWarning") class TestMoments(Base): def setup_method(self, method): @@ -1901,6 +1909,7 @@ def test_no_pairwise_with_other(self, f): for (df, result) in zip(self.df1s, results): if result is not None: with catch_warnings(record=True): + warnings.simplefilter("ignore", RuntimeWarning) # we can have int and str columns expected_index = df.index.union(self.df2.index) expected_columns = df.columns.union(self.df2.columns) diff --git a/pandas/tests/tseries/offsets/test_offsets.py b/pandas/tests/tseries/offsets/test_offsets.py index f9f5fc2484bda..b8fabbf52159d 100644 --- a/pandas/tests/tseries/offsets/test_offsets.py +++ b/pandas/tests/tseries/offsets/test_offsets.py @@ -1825,6 +1825,7 @@ def test_weekmask_and_holidays(self): xp_egypt = datetime(2013, 5, 5) assert xp_egypt == dt + 2 * bday_egypt + @pytest.mark.filterwarnings("ignore:Non:pandas.errors.PerformanceWarning") def test_calendar(self): calendar = USFederalHolidayCalendar() dt = datetime(2014, 1, 17) @@ -1987,6 +1988,7 @@ def test_holidays(self): assert dt + bm_offset == datetime(2012, 1, 30) assert dt + 2 * bm_offset == datetime(2012, 2, 27) + @pytest.mark.filterwarnings("ignore:Non:pandas.errors.PerformanceWarning") def test_datetimeindex(self): from pandas.tseries.holiday import USFederalHolidayCalendar hcal = USFederalHolidayCalendar() @@ -2105,6 +2107,7 @@ def test_holidays(self): assert dt + bm_offset == datetime(2012, 1, 2) assert dt + 2 * bm_offset == datetime(2012, 2, 3) + @pytest.mark.filterwarnings("ignore:Non:pandas.errors.PerformanceWarning") def test_datetimeindex(self): hcal = USFederalHolidayCalendar() cbmb = CBMonthBegin(calendar=hcal) diff --git a/pandas/tests/tslibs/test_parsing.py b/pandas/tests/tslibs/test_parsing.py index 14c9ca1f6cc54..466a22e5916e9 100644 --- a/pandas/tests/tslibs/test_parsing.py +++ b/pandas/tests/tslibs/test_parsing.py @@ -92,6 +92,7 @@ def test_parsers_monthfreq(self): assert result1 == expected +@pytest.mark.filterwarnings("ignore:_timelex:DeprecationWarning") class TestGuessDatetimeFormat(object): @td.skip_if_not_us_locale @@ -160,6 +161,8 @@ def test_guess_datetime_format_invalid_inputs(self): ('2011-1-1 00:00:00', '%Y-%m-%d %H:%M:%S'), ('2011-1-1 0:0:0', '%Y-%m-%d %H:%M:%S'), ('2011-1-3T00:00:0', '%Y-%m-%dT%H:%M:%S')]) + # https://github.com/pandas-dev/pandas/issues/21322 for _timelex + @pytest.mark.filterwarnings("ignore:_timelex:DeprecationWarning") def test_guess_datetime_format_nopadding(self, string, format): # GH 11142 result = parsing._guess_datetime_format(string) diff --git a/pandas/tests/util/test_hashing.py b/pandas/tests/util/test_hashing.py index 0c14dcb49c56f..b62260071d996 100644 --- a/pandas/tests/util/test_hashing.py +++ b/pandas/tests/util/test_hashing.py @@ -1,7 +1,6 @@ import pytest import datetime -from warnings import catch_warnings import numpy as np import pandas as pd @@ -216,12 +215,12 @@ def test_categorical_with_nan_consistency(self): assert result[0] in expected assert result[1] in expected + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_pandas_errors(self): with pytest.raises(TypeError): hash_pandas_object(pd.Timestamp('20130101')) - with catch_warnings(record=True): - obj = tm.makePanel() + obj = tm.makePanel() with pytest.raises(TypeError): hash_pandas_object(obj) diff --git a/pandas/tseries/holiday.py b/pandas/tseries/holiday.py index 33dcf6d64b302..b9c89c4e314f9 100644 --- a/pandas/tseries/holiday.py +++ b/pandas/tseries/holiday.py @@ -1,6 +1,7 @@ import warnings from pandas import DateOffset, DatetimeIndex, Series, Timestamp +from pandas.errors import PerformanceWarning from pandas.compat import add_metaclass from datetime import datetime, timedelta from dateutil.relativedelta import MO, TU, WE, TH, FR, SA, SU # noqa @@ -281,7 +282,8 @@ def _apply_rule(self, dates): # if we are adding a non-vectorized value # ignore the PerformanceWarnings: - with warnings.catch_warnings(record=True): + with warnings.catch_warnings(): + warnings.simplefilter("ignore", PerformanceWarning) dates += offset return dates diff --git a/setup.cfg b/setup.cfg index 5fc0236066b93..8cccca2d038e7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,7 +39,9 @@ markers = network: mark a test as network high_memory: mark a test as a high-memory only doctest_optionflags = NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL -addopts = --strict-data-files +addopts = --strict-data-files --durations=10 +filterwarnings = + error [coverage:run] From bd5a4195799fc4b5444238489b0e497e43a60ed1 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Thu, 13 Sep 2018 17:00:44 -0500 Subject: [PATCH 02/38] more --- pandas/tests/groupby/test_whitelist.py | 12 ++++-- pandas/tests/io/test_pickle.py | 3 +- pandas/tests/series/test_analytics.py | 39 +++++++++++++++++-- pandas/tests/test_expressions.py | 6 ++- pandas/tests/test_panel.py | 4 +- .../offsets/test_offsets_properties.py | 10 ++++- 6 files changed, 61 insertions(+), 13 deletions(-) diff --git a/pandas/tests/groupby/test_whitelist.py b/pandas/tests/groupby/test_whitelist.py index 3afc278f9bc93..ae033f7b3f251 100644 --- a/pandas/tests/groupby/test_whitelist.py +++ b/pandas/tests/groupby/test_whitelist.py @@ -133,11 +133,15 @@ def df_letters(): return df -@pytest.mark.parametrize( - "obj, whitelist", zip((df_letters(), df_letters().floats), - (df_whitelist, s_whitelist))) -def test_groupby_whitelist(df_letters, obj, whitelist): +@pytest.mark.parametrize("whitelist", [df_whitelist, s_whitelist]) +def test_groupby_whitelist(df_letters, whitelist): df = df_letters + if whitelist == df_whitelist: + # dataframe + obj = df_letters + else: + obj = df_letters['floats'] + gb = obj.groupby(df.letters) assert set(whitelist) == set(gb._apply_whitelist) diff --git a/pandas/tests/io/test_pickle.py b/pandas/tests/io/test_pickle.py index 77b4a3c7cac5f..97aed7671449c 100644 --- a/pandas/tests/io/test_pickle.py +++ b/pandas/tests/io/test_pickle.py @@ -14,7 +14,7 @@ """ import glob import pytest -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter import os from distutils.version import LooseVersion @@ -202,6 +202,7 @@ def test_pickles(current_pickle_data, legacy_pickle): version = os.path.basename(os.path.dirname(legacy_pickle)) with catch_warnings(record=True): + simplefilter("ignore") compare(current_pickle_data, legacy_pickle, version) diff --git a/pandas/tests/series/test_analytics.py b/pandas/tests/series/test_analytics.py index 3a8b84cd53087..777f2a29d3b34 100644 --- a/pandas/tests/series/test_analytics.py +++ b/pandas/tests/series/test_analytics.py @@ -1644,8 +1644,35 @@ def test_value_counts_categorical_not_ordered(self): tm.assert_series_equal(idx.value_counts(normalize=True), exp) +main_dtypes = [ + 'datetime', + 'datetimetz', + 'timedelta', + 'int8', + 'int16', + 'int32', + 'int64', + 'float32', + 'float64', + 'uint8', + 'uint16', + 'uint32', + 'uint64' +] + + @pytest.fixture def s_main_dtypes(): + """A DataFrame with many dtypes + + * datetime + * datetimetz + * timedelta + * [u]int{8,16,32,64} + * float{32,64} + + The columns are the name of the dtype. + """ df = pd.DataFrame( {'datetime': pd.to_datetime(['2003', '2002', '2001', '2002', @@ -1665,6 +1692,12 @@ def s_main_dtypes(): return df +@pytest.fixture(params=main_dtypes) +def s_main_dtypes_split(request, s_main_dtypes): + """Each series in s_main_dtypes.""" + return s_main_dtypes[request.param] + + class TestMode(object): @pytest.mark.parametrize('dropna, expected', [ @@ -1868,12 +1901,10 @@ def test_error(self, r): with tm.assert_raises_regex(TypeError, msg): method(arg) - @pytest.mark.parametrize( - "s", - [v for k, v in s_main_dtypes().iteritems()]) - def test_nsmallest_nlargest(self, s): + def test_nsmallest_nlargest(self, s_main_dtypes_split): # float, int, datetime64 (use i8), timedelts64 (same), # object that are numbers, object that are strings + s = s_main_dtypes_split assert_series_equal(s.nsmallest(2), s.iloc[[2, 1]]) assert_series_equal(s.nsmallest(2, keep='last'), s.iloc[[2, 3]]) diff --git a/pandas/tests/test_expressions.py b/pandas/tests/test_expressions.py index 468463d3eba5f..c101fd25ce5e5 100644 --- a/pandas/tests/test_expressions.py +++ b/pandas/tests/test_expressions.py @@ -2,7 +2,7 @@ from __future__ import print_function # pylint: disable-msg=W0612,E1101 -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter import re import operator import pytest @@ -38,6 +38,7 @@ columns=list('ABCD'), dtype='int64') with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) _frame_panel = Panel(dict(ItemA=_frame.copy(), ItemB=(_frame.copy() + 3), ItemC=_frame.copy(), @@ -191,6 +192,7 @@ def test_integer_arithmetic_series(self): self.run_series(self.integer.iloc[:, 0], self.integer.iloc[:, 0]) @pytest.mark.slow + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_integer_panel(self): self.run_panel(_integer2_panel, np.random.randint(1, 100)) @@ -201,6 +203,7 @@ def test_float_arithmetic_series(self): self.run_series(self.frame2.iloc[:, 0], self.frame2.iloc[:, 0]) @pytest.mark.slow + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_float_panel(self): self.run_panel(_frame2_panel, np.random.randn() + 0.1, binary_comp=0.8) @@ -215,6 +218,7 @@ def test_mixed_arithmetic_series(self): self.run_series(self.mixed2[col], self.mixed2[col], binary_comp=4) @pytest.mark.slow + @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_mixed_panel(self): self.run_panel(_mixed2_panel, np.random.randint(1, 100), binary_comp=-2) diff --git a/pandas/tests/test_panel.py b/pandas/tests/test_panel.py index 17c88df6beb18..4cd1039d69c0d 100644 --- a/pandas/tests/test_panel.py +++ b/pandas/tests/test_panel.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # pylint: disable=W0612,E1101 -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter from datetime import datetime import operator import pytest @@ -30,6 +30,7 @@ def make_test_panel(): with catch_warnings(record=True): + simplefilter("ignore", FutureWarning) _panel = tm.makePanel() tm.add_nans(_panel) _panel = _panel.copy() @@ -896,6 +897,7 @@ def test_set_value(self): self.panel.set_value('a') +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class TestPanel(PanelTests, CheckIndexing, SafeForLongAndSparse, SafeForSparse): diff --git a/pandas/tests/tseries/offsets/test_offsets_properties.py b/pandas/tests/tseries/offsets/test_offsets_properties.py index f19066ba76b20..07a6895d1e231 100644 --- a/pandas/tests/tseries/offsets/test_offsets_properties.py +++ b/pandas/tests/tseries/offsets/test_offsets_properties.py @@ -8,6 +8,7 @@ You may wish to consult the previous version for inspiration on further tests, or when trying to pin down the bugs exposed by the tests below. """ +import warnings import pytest from hypothesis import given, assume, strategies as st @@ -25,6 +26,11 @@ # ---------------------------------------------------------------- # Helpers for generating random data +with warnings.catch_warnings(): + warnings.simplefilter('ignore') + min_dt = pd.Timestamp(1900, 1, 1).to_pydatetime(), + max_dt = pd.Timestamp(1900, 1, 1).to_pydatetime(), + gen_date_range = st.builds( pd.date_range, start=st.datetimes( @@ -38,8 +44,8 @@ ) gen_random_datetime = st.datetimes( - min_value=pd.Timestamp.min.to_pydatetime(), - max_value=pd.Timestamp.max.to_pydatetime(), + min_value=min_dt, + max_value=max_dt, timezones=st.one_of(st.none(), dateutil_timezones(), pytz_timezones()) ) From 36279c9add5e11f0111437fb13cd021c1ad13aa3 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Thu, 13 Sep 2018 20:50:23 -0500 Subject: [PATCH 03/38] Fixed base --- pandas/tests/indexing/common.py | 56 +++++++++++++++------------------ 1 file changed, 25 insertions(+), 31 deletions(-) diff --git a/pandas/tests/indexing/common.py b/pandas/tests/indexing/common.py index cbf1bdbce9574..653c5cda4b464 100644 --- a/pandas/tests/indexing/common.py +++ b/pandas/tests/indexing/common.py @@ -2,6 +2,7 @@ import itertools from warnings import catch_warnings, filterwarnings +import pytest import numpy as np from pandas.compat import lrange @@ -25,6 +26,7 @@ def _axify(obj, key, axis): return tuple(axes) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class Base(object): """ indexing comprehensive base class """ @@ -49,22 +51,20 @@ def setup_method(self, method): self.frame_uints = DataFrame(np.random.randn(4, 4), index=UInt64Index(lrange(0, 8, 2)), columns=UInt64Index(lrange(0, 12, 3))) - with catch_warnings(record=True): - self.panel_uints = Panel(np.random.rand(4, 4, 4), - items=UInt64Index(lrange(0, 8, 2)), - major_axis=UInt64Index(lrange(0, 12, 3)), - minor_axis=UInt64Index(lrange(0, 16, 4))) + self.panel_uints = Panel(np.random.rand(4, 4, 4), + items=UInt64Index(lrange(0, 8, 2)), + major_axis=UInt64Index(lrange(0, 12, 3)), + minor_axis=UInt64Index(lrange(0, 16, 4))) self.series_floats = Series(np.random.rand(4), index=Float64Index(range(0, 8, 2))) self.frame_floats = DataFrame(np.random.randn(4, 4), index=Float64Index(range(0, 8, 2)), columns=Float64Index(range(0, 12, 3))) - with catch_warnings(record=True): - self.panel_floats = Panel(np.random.rand(4, 4, 4), - items=Float64Index(range(0, 8, 2)), - major_axis=Float64Index(range(0, 12, 3)), - minor_axis=Float64Index(range(0, 16, 4))) + self.panel_floats = Panel(np.random.rand(4, 4, 4), + items=Float64Index(range(0, 8, 2)), + major_axis=Float64Index(range(0, 12, 3)), + minor_axis=Float64Index(range(0, 16, 4))) m_idces = [MultiIndex.from_product([[1, 2], [3, 4]]), MultiIndex.from_product([[5, 6], [7, 8]]), @@ -75,35 +75,31 @@ def setup_method(self, method): self.frame_multi = DataFrame(np.random.randn(4, 4), index=m_idces[0], columns=m_idces[1]) - with catch_warnings(record=True): - self.panel_multi = Panel(np.random.rand(4, 4, 4), - items=m_idces[0], - major_axis=m_idces[1], - minor_axis=m_idces[2]) + self.panel_multi = Panel(np.random.rand(4, 4, 4), + items=m_idces[0], + major_axis=m_idces[1], + minor_axis=m_idces[2]) self.series_labels = Series(np.random.randn(4), index=list('abcd')) self.frame_labels = DataFrame(np.random.randn(4, 4), index=list('abcd'), columns=list('ABCD')) - with catch_warnings(record=True): - self.panel_labels = Panel(np.random.randn(4, 4, 4), - items=list('abcd'), - major_axis=list('ABCD'), - minor_axis=list('ZYXW')) + self.panel_labels = Panel(np.random.randn(4, 4, 4), + items=list('abcd'), + major_axis=list('ABCD'), + minor_axis=list('ZYXW')) self.series_mixed = Series(np.random.randn(4), index=[2, 4, 'null', 8]) self.frame_mixed = DataFrame(np.random.randn(4, 4), index=[2, 4, 'null', 8]) - with catch_warnings(record=True): - self.panel_mixed = Panel(np.random.randn(4, 4, 4), - items=[2, 4, 'null', 8]) + self.panel_mixed = Panel(np.random.randn(4, 4, 4), + items=[2, 4, 'null', 8]) self.series_ts = Series(np.random.randn(4), index=date_range('20130101', periods=4)) self.frame_ts = DataFrame(np.random.randn(4, 4), index=date_range('20130101', periods=4)) - with catch_warnings(record=True): - self.panel_ts = Panel(np.random.randn(4, 4, 4), - items=date_range('20130101', periods=4)) + self.panel_ts = Panel(np.random.randn(4, 4, 4), + items=date_range('20130101', periods=4)) dates_rev = (date_range('20130101', periods=4) .sort_values(ascending=False)) @@ -111,14 +107,12 @@ def setup_method(self, method): index=dates_rev) self.frame_ts_rev = DataFrame(np.random.randn(4, 4), index=dates_rev) - with catch_warnings(record=True): - self.panel_ts_rev = Panel(np.random.randn(4, 4, 4), - items=dates_rev) + self.panel_ts_rev = Panel(np.random.randn(4, 4, 4), + items=dates_rev) self.frame_empty = DataFrame({}) self.series_empty = Series({}) - with catch_warnings(record=True): - self.panel_empty = Panel({}) + self.panel_empty = Panel({}) # form agglomerates for o in self._objs: From 43d7a780f239a288332a68feea8d008d74eee1dc Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Thu, 13 Sep 2018 21:23:22 -0500 Subject: [PATCH 04/38] ABC compat --- pandas/compat/__init__.py | 12 ++++++++++++ pandas/core/common.py | 2 +- pandas/core/dtypes/inference.py | 3 ++- pandas/core/frame.py | 6 +++--- pandas/core/groupby/generic.py | 2 +- pandas/core/series.py | 4 ++-- pandas/io/html.py | 4 ++-- pandas/tests/dtypes/test_inference.py | 2 +- pandas/tests/extension/json/array.py | 7 ++++--- pandas/tests/frame/test_constructors.py | 2 +- pandas/tests/frame/test_convert_to.py | 2 +- 11 files changed, 30 insertions(+), 16 deletions(-) diff --git a/pandas/compat/__init__.py b/pandas/compat/__init__.py index 28a55133e68aa..1453725225e7d 100644 --- a/pandas/compat/__init__.py +++ b/pandas/compat/__init__.py @@ -38,6 +38,7 @@ import struct import inspect from collections import namedtuple +import collections PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] >= 3 @@ -135,6 +136,11 @@ def lfilter(*args, **kwargs): from importlib import reload reload = reload + Hashable = collections.abc.Hashable + Iterable = collections.abc.Iterable + Mapping = collections.abc.Mapping + Sequence = collections.abc.Sequence + Sized = collections.abc.Sized else: # Python 2 @@ -190,6 +196,12 @@ def get_range_parameters(data): reload = builtins.reload + Hashable = collections.Hashable + Iterable = collections.Iterable + Mapping = collections.Mapping + Sequence = collections.Sequence + Sized = collections.Sized + if PY2: def iteritems(obj, **kw): return obj.iteritems(**kw) diff --git a/pandas/core/common.py b/pandas/core/common.py index 92e4e23ce958e..a6b05daf1d85d 100644 --- a/pandas/core/common.py +++ b/pandas/core/common.py @@ -356,7 +356,7 @@ def standardize_mapping(into): return partial( collections.defaultdict, into.default_factory) into = type(into) - if not issubclass(into, collections.Mapping): + if not issubclass(into, compat.Mapping): raise TypeError('unsupported type: {into}'.format(into=into)) elif into == collections.defaultdict: raise TypeError( diff --git a/pandas/core/dtypes/inference.py b/pandas/core/dtypes/inference.py index ed416c3ef857d..676c7f433666d 100644 --- a/pandas/core/dtypes/inference.py +++ b/pandas/core/dtypes/inference.py @@ -5,6 +5,7 @@ import numpy as np from collections import Iterable from numbers import Number +from pandas import compat from pandas.compat import (PY2, string_types, text_type, string_and_binary_types, re_type) from pandas._libs import lib @@ -112,7 +113,7 @@ def _iterable_not_string(obj): False """ - return (isinstance(obj, collections.Iterable) and + return (isinstance(obj, compat.Iterable) and not isinstance(obj, string_types)) diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 251bc6587872d..14c2b5ca337da 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -418,9 +418,9 @@ def __init__(self, data=None, index=None, columns=None, dtype=None, copy=copy) # For data is list-like, or Iterable (will consume into list) - elif (isinstance(data, collections.Iterable) + elif (isinstance(data, compat.Iterable) and not isinstance(data, string_and_binary_types)): - if not isinstance(data, collections.Sequence): + if not isinstance(data, compat.Sequence): data = list(data) if len(data) > 0: if is_list_like(data[0]) and getattr(data[0], 'ndim', 1) == 1: @@ -7655,7 +7655,7 @@ def _to_arrays(data, columns, coerce_float=False, dtype=None): if isinstance(data[0], (list, tuple)): return _list_to_arrays(data, columns, coerce_float=coerce_float, dtype=dtype) - elif isinstance(data[0], collections.Mapping): + elif isinstance(data[0], compat.Mapping): return _list_of_dict_to_arrays(data, columns, coerce_float=coerce_float, dtype=dtype) elif isinstance(data[0], Series): diff --git a/pandas/core/groupby/generic.py b/pandas/core/groupby/generic.py index 685635fb6854d..f15b1203a334e 100644 --- a/pandas/core/groupby/generic.py +++ b/pandas/core/groupby/generic.py @@ -758,7 +758,7 @@ def aggregate(self, func_or_funcs, *args, **kwargs): if isinstance(func_or_funcs, compat.string_types): return getattr(self, func_or_funcs)(*args, **kwargs) - if isinstance(func_or_funcs, collections.Iterable): + if isinstance(func_or_funcs, compat.Iterable): # Catch instances of lists / tuples # but not the class list / tuple itself. ret = self._aggregate_multiple_funcs(func_or_funcs, diff --git a/pandas/core/series.py b/pandas/core/series.py index a4d403e4bcd94..83e3fdc5507b5 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -242,8 +242,8 @@ def __init__(self, data=None, index=None, dtype=None, name=None, raise TypeError("{0!r} type is unordered" "".format(data.__class__.__name__)) # If data is Iterable but not list-like, consume into list. - elif (isinstance(data, collections.Iterable) - and not isinstance(data, collections.Sized)): + elif (isinstance(data, compat.Iterable) + and not isinstance(data, compat.Sized)): data = list(data) else: diff --git a/pandas/io/html.py b/pandas/io/html.py index cca27db00f48d..04534ff591a2c 100644 --- a/pandas/io/html.py +++ b/pandas/io/html.py @@ -6,7 +6,6 @@ import os import re import numbers -import collections from distutils.version import LooseVersion @@ -14,6 +13,7 @@ from pandas.errors import EmptyDataError from pandas.io.common import _is_url, urlopen, _validate_header_arg from pandas.io.parsers import TextParser +from pandas import compat from pandas.compat import (lrange, lmap, u, string_types, iteritems, raise_with_traceback, binary_type) from pandas import Series @@ -859,7 +859,7 @@ def _validate_flavor(flavor): flavor = 'lxml', 'bs4' elif isinstance(flavor, string_types): flavor = flavor, - elif isinstance(flavor, collections.Iterable): + elif isinstance(flavor, compat.Iterable): if not all(isinstance(flav, string_types) for flav in flavor): raise TypeError('Object of type {typ!r} is not an iterable of ' 'strings' diff --git a/pandas/tests/dtypes/test_inference.py b/pandas/tests/dtypes/test_inference.py index a22a31b66fd4e..8877d0b18461c 100644 --- a/pandas/tests/dtypes/test_inference.py +++ b/pandas/tests/dtypes/test_inference.py @@ -226,7 +226,7 @@ class OldStyleClass(): pass c = OldStyleClass() - assert not isinstance(c, collections.Hashable) + assert not isinstance(c, compat.Hashable) assert inference.is_hashable(c) hash(c) # this will not raise diff --git a/pandas/tests/extension/json/array.py b/pandas/tests/extension/json/array.py index 980c245d55711..6ce0d63eb63ec 100644 --- a/pandas/tests/extension/json/array.py +++ b/pandas/tests/extension/json/array.py @@ -17,12 +17,13 @@ import numpy as np +from pandas import compat from pandas.core.dtypes.base import ExtensionDtype from pandas.core.arrays import ExtensionArray class JSONDtype(ExtensionDtype): - type = collections.Mapping + type = compat.Mapping name = 'json' try: na_value = collections.UserDict() @@ -79,7 +80,7 @@ def __getitem__(self, item): return self.data[item] elif isinstance(item, np.ndarray) and item.dtype == 'bool': return self._from_sequence([x for x, m in zip(self, item) if m]) - elif isinstance(item, collections.Iterable): + elif isinstance(item, compat.Iterable): # fancy indexing return type(self)([self.data[i] for i in item]) else: @@ -91,7 +92,7 @@ def __setitem__(self, key, value): self.data[key] = value else: if not isinstance(value, (type(self), - collections.Sequence)): + compat.Sequence)): # broadcast value value = itertools.cycle([value]) diff --git a/pandas/tests/frame/test_constructors.py b/pandas/tests/frame/test_constructors.py index 6c84beb64e196..9a65f63be6eaf 100644 --- a/pandas/tests/frame/test_constructors.py +++ b/pandas/tests/frame/test_constructors.py @@ -918,7 +918,7 @@ def test_constructor_sequence_like(self): # collections.Squence like import collections - class DummyContainer(collections.Sequence): + class DummyContainer(compat.Sequence): def __init__(self, lst): self._lst = lst diff --git a/pandas/tests/frame/test_convert_to.py b/pandas/tests/frame/test_convert_to.py index 2472022b862bc..148888c3aea55 100644 --- a/pandas/tests/frame/test_convert_to.py +++ b/pandas/tests/frame/test_convert_to.py @@ -112,7 +112,7 @@ def test_to_records_with_Mapping_type(self): from email.parser import Parser import collections - collections.Mapping.register(email.message.Message) + compat.Mapping.register(email.message.Message) headers = Parser().parsestr('From: \n' 'To: \n' From 4919b0a28c3d88f9ef1352afd7fd0e5827b9ab9d Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Thu, 13 Sep 2018 21:40:38 -0500 Subject: [PATCH 05/38] limit to dev --- .travis.yml | 4 ++-- ci/{travis-36-numpydev.yaml => travis-37-numpydev.yaml} | 2 +- pandas/tests/io/test_excel.py | 2 ++ setup.cfg | 3 --- 4 files changed, 5 insertions(+), 6 deletions(-) rename ci/{travis-36-numpydev.yaml => travis-37-numpydev.yaml} (95%) diff --git a/.travis.yml b/.travis.yml index 32e6d2eae90a7..f8498bbc1359a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -64,7 +64,7 @@ matrix: # In allow_failures - dist: trusty env: - - JOB="3.6, NumPy dev" ENV_FILE="ci/travis-36-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network" PANDAS_TESTING_MODE="deprecate" + - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network" PANDAS_TESTING_MODE="deprecate" addons: apt: packages: @@ -79,7 +79,7 @@ matrix: - JOB="3.6, slow" ENV_FILE="ci/travis-36-slow.yaml" SLOW=true - dist: trusty env: - - JOB="3.6, NumPy dev" ENV_FILE="ci/travis-36-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network" PANDAS_TESTING_MODE="deprecate" + - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error" PANDAS_TESTING_MODE="deprecate" addons: apt: packages: diff --git a/ci/travis-36-numpydev.yaml b/ci/travis-37-numpydev.yaml similarity index 95% rename from ci/travis-36-numpydev.yaml rename to ci/travis-37-numpydev.yaml index aba28634edd0d..82c75b7c91b1f 100644 --- a/ci/travis-36-numpydev.yaml +++ b/ci/travis-37-numpydev.yaml @@ -2,7 +2,7 @@ name: pandas channels: - defaults dependencies: - - python=3.6* + - python=3.7* - pytz - Cython>=0.28.2 # universal diff --git a/pandas/tests/io/test_excel.py b/pandas/tests/io/test_excel.py index 544760a022b6d..a639556eb07d6 100644 --- a/pandas/tests/io/test_excel.py +++ b/pandas/tests/io/test_excel.py @@ -611,6 +611,8 @@ def test_read_from_s3_url(self, ext): tm.assert_frame_equal(url_table, local_table) @pytest.mark.slow + # ignore warning from old xlrd + @pytest.mark.filterwarnings("ignore:This metho:PendingDeprecationWarning") def test_read_from_file_url(self, ext): # FILE diff --git a/setup.cfg b/setup.cfg index 8cccca2d038e7..c2b9c5dca9687 100644 --- a/setup.cfg +++ b/setup.cfg @@ -40,9 +40,6 @@ markers = high_memory: mark a test as a high-memory only doctest_optionflags = NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL addopts = --strict-data-files --durations=10 -filterwarnings = - error - [coverage:run] branch = False From 2804192e78a9ef45a9fa35240e61835010da5e3a Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Thu, 13 Sep 2018 22:03:54 -0500 Subject: [PATCH 06/38] Explicit warnings --- pandas/core/arrays/datetimelike.py | 1 + pandas/core/arrays/integer.py | 1 + pandas/core/computation/eval.py | 1 + pandas/core/indexes/base.py | 1 + pandas/core/internals/blocks.py | 1 + pandas/io/pickle.py | 3 ++- pandas/tests/frame/test_apply.py | 6 +++--- pandas/util/testing.py | 2 ++ 8 files changed, 12 insertions(+), 4 deletions(-) diff --git a/pandas/core/arrays/datetimelike.py b/pandas/core/arrays/datetimelike.py index eb8821382037d..637a19ddba119 100644 --- a/pandas/core/arrays/datetimelike.py +++ b/pandas/core/arrays/datetimelike.py @@ -59,6 +59,7 @@ def cmp_method(self, other): # numpy will show a DeprecationWarning on invalid elementwise # comparisons, this will raise in the future with warnings.catch_warnings(record=True): + warnings.filterwarnings("ignore", "elementwise", FutureWarning) with np.errstate(all='ignore'): result = op(self.values, np.asarray(other)) diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index a04366332b419..5ca0bed87c6d3 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -513,6 +513,7 @@ def cmp_method(self, other): # numpy will show a DeprecationWarning on invalid elementwise # comparisons, this will raise in the future with warnings.catch_warnings(record=True): + warnings.filterwarnings("ignore", "elementwise", FutureWarning) with np.errstate(all='ignore'): result = op(self._data, other) diff --git a/pandas/core/computation/eval.py b/pandas/core/computation/eval.py index 434d7f6ccfe13..7025f3000eb5f 100644 --- a/pandas/core/computation/eval.py +++ b/pandas/core/computation/eval.py @@ -323,6 +323,7 @@ def eval(expr, parser='pandas', engine=None, truediv=True, # to use a non-numeric indexer try: with warnings.catch_warnings(record=True): + # TODO: Filter the warnings we actually care about here. target[assigner] = ret except (TypeError, IndexError): raise ValueError("Cannot assign expression output to target") diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index b2b6e02e908c5..0483c54318e28 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -98,6 +98,7 @@ def cmp_method(self, other): # numpy will show a DeprecationWarning on invalid elementwise # comparisons, this will raise in the future with warnings.catch_warnings(record=True): + warnings.filterwarnings("ignore", "elementwise", FutureWarning) with np.errstate(all='ignore'): result = op(self.values, np.asarray(other)) diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index e735b35653cd4..6576db9f642a6 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -3490,6 +3490,7 @@ def _putmask_smart(v, m, n): # we ignore ComplexWarning here with warnings.catch_warnings(record=True): + warnings.simplefilter("ignore", np.ComplexWarning) nn_at = nn.astype(v.dtype) # avoid invalid dtype comparisons diff --git a/pandas/io/pickle.py b/pandas/io/pickle.py index 6738daec9397c..9c219d7fd6997 100644 --- a/pandas/io/pickle.py +++ b/pandas/io/pickle.py @@ -160,7 +160,8 @@ def try_read(path, encoding=None): # GH 6899 try: with warnings.catch_warnings(record=True): - # We want to silencce any warnings about, e.g. moved modules. + # We want to silence any warnings about, e.g. moved modules. + warnings.simplefilter("ignore", Warning) return read_wrapper(lambda f: pkl.load(f)) except Exception: # reg/patched pickle diff --git a/pandas/tests/frame/test_apply.py b/pandas/tests/frame/test_apply.py index 2ad89a9dee833..26ac406968869 100644 --- a/pandas/tests/frame/test_apply.py +++ b/pandas/tests/frame/test_apply.py @@ -106,9 +106,9 @@ def test_apply_with_reduce_empty(self): assert x == [] def test_apply_deprecate_reduce(self): - with warnings.catch_warnings(record=True): - x = [] - self.empty.apply(x.append, axis=1, result_type='reduce') + x = [] + with tm.assert_produces_warning(FutureWarning): + self.empty.apply(x.append, axis=1, reduce=True) def test_apply_standard_nonunique(self): df = DataFrame( diff --git a/pandas/util/testing.py b/pandas/util/testing.py index 1e8c123fa6f13..b3bf5b3e7a208 100644 --- a/pandas/util/testing.py +++ b/pandas/util/testing.py @@ -1897,6 +1897,7 @@ def makePeriodFrame(nper=None): def makePanel(nper=None): with warnings.catch_warnings(record=True): + warnings.filterwarnings("ignore", "\\nPanel", FutureWarning) cols = ['Item' + c for c in string.ascii_uppercase[:K - 1]] data = {c: makeTimeDataFrame(nper) for c in cols} return Panel.fromDict(data) @@ -1904,6 +1905,7 @@ def makePanel(nper=None): def makePeriodPanel(nper=None): with warnings.catch_warnings(record=True): + warnings.filterwarnings("ignore", "\\nPanel", FutureWarning) cols = ['Item' + c for c in string.ascii_uppercase[:K - 1]] data = {c: makePeriodFrame(nper) for c in cols} return Panel.fromDict(data) From 7ad249abdcf14ce75c36d9bd07cd3c6f15362225 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 06:06:21 -0500 Subject: [PATCH 07/38] Fixed plotting check --- pandas/tests/plotting/test_frame.py | 2 +- pandas/tests/plotting/test_hist_method.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pandas/tests/plotting/test_frame.py b/pandas/tests/plotting/test_frame.py index cc207d8d428a7..cd297c356d60e 100644 --- a/pandas/tests/plotting/test_frame.py +++ b/pandas/tests/plotting/test_frame.py @@ -1575,7 +1575,7 @@ def test_hist_df(self): self._check_ticks_props(axes, xrot=40, yrot=0) tm.close() - if plotting._compat._mpl_ge_2_2_0: + if plotting._compat._mpl_ge_2_2_0(): kwargs = {"density": True} else: kwargs = {"normed": True} diff --git a/pandas/tests/plotting/test_hist_method.py b/pandas/tests/plotting/test_hist_method.py index 6df1391ec2a9c..2864877550bac 100644 --- a/pandas/tests/plotting/test_hist_method.py +++ b/pandas/tests/plotting/test_hist_method.py @@ -194,7 +194,7 @@ def test_hist_df_legacy(self): tm.close() # make sure kwargs to hist are handled - if _mpl_ge_2_2_0: + if _mpl_ge_2_2_0(): kwargs = {"density": True} else: kwargs = {"normed": True} @@ -285,7 +285,7 @@ def test_grouped_hist_legacy(self): xf, yf = 20, 18 xrot, yrot = 30, 40 - if _mpl_ge_2_2_0: + if _mpl_ge_2_2_0(): kwargs = {"density": True} else: kwargs = {"normed": True} From 953fde18288dfe021ff60ca04e82b68a0ff859a7 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 07:04:03 -0500 Subject: [PATCH 08/38] Set for NumPy dev --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f8498bbc1359a..76f4715a4abb2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -64,7 +64,7 @@ matrix: # In allow_failures - dist: trusty env: - - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network" PANDAS_TESTING_MODE="deprecate" + - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error" PANDAS_TESTING_MODE="deprecate" addons: apt: packages: From d2dd0af5c237ab8bf2da6c309b9bcd3215684337 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 07:08:29 -0500 Subject: [PATCH 09/38] collections --- pandas/core/dtypes/inference.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pandas/core/dtypes/inference.py b/pandas/core/dtypes/inference.py index 676c7f433666d..21ced504c5953 100644 --- a/pandas/core/dtypes/inference.py +++ b/pandas/core/dtypes/inference.py @@ -3,7 +3,6 @@ import collections import re import numpy as np -from collections import Iterable from numbers import Number from pandas import compat from pandas.compat import (PY2, string_types, text_type, @@ -285,7 +284,7 @@ def is_list_like(obj): False """ - return (isinstance(obj, Iterable) and + return (isinstance(obj, compat.Iterable) and # we do not count strings/unicode/bytes as list-like not isinstance(obj, string_and_binary_types) and # exclude zero-dimensional numpy arrays, effectively scalars From b813e4168e4657f2491737455dca8e0da4f866cc Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 07:11:57 -0500 Subject: [PATCH 10/38] more collections, actually set --- pandas/compat/chainmap_impl.py | 9 ++++++++- pandas/tests/arithmetic/test_numeric.py | 3 +-- pandas/tests/dtypes/test_inference.py | 1 + pandas/tests/reshape/test_concat.py | 4 ++-- 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pandas/compat/chainmap_impl.py b/pandas/compat/chainmap_impl.py index c4aa8c8d6ab30..3ea5414cc41eb 100644 --- a/pandas/compat/chainmap_impl.py +++ b/pandas/compat/chainmap_impl.py @@ -1,4 +1,11 @@ -from collections import MutableMapping +import sys + +PY3 = sys.version_info[0] >= 3 + +if PY3: + from collections.abc import MutableMapping +else: + from collections import MutableMapping try: from thread import get_ident diff --git a/pandas/tests/arithmetic/test_numeric.py b/pandas/tests/arithmetic/test_numeric.py index fcfc3994a88c8..0449212713048 100644 --- a/pandas/tests/arithmetic/test_numeric.py +++ b/pandas/tests/arithmetic/test_numeric.py @@ -4,7 +4,6 @@ # Specifically for numeric dtypes from decimal import Decimal import operator -from collections import Iterable import pytest import numpy as np @@ -12,7 +11,7 @@ import pandas as pd import pandas.util.testing as tm -from pandas.compat import PY3 +from pandas.compat import PY3, Iterable from pandas.core import ops from pandas import Timedelta, Series, Index, TimedeltaIndex diff --git a/pandas/tests/dtypes/test_inference.py b/pandas/tests/dtypes/test_inference.py index 8877d0b18461c..76cd6aabb93ae 100644 --- a/pandas/tests/dtypes/test_inference.py +++ b/pandas/tests/dtypes/test_inference.py @@ -20,6 +20,7 @@ DatetimeIndex, TimedeltaIndex, Timestamp, Panel, Period, Categorical, isna, Interval, DateOffset) +from pandas import compat from pandas.compat import u, PY2, StringIO, lrange from pandas.core.dtypes import inference from pandas.core.dtypes.common import ( diff --git a/pandas/tests/reshape/test_concat.py b/pandas/tests/reshape/test_concat.py index da43194b95ed3..2aaa04d571e69 100644 --- a/pandas/tests/reshape/test_concat.py +++ b/pandas/tests/reshape/test_concat.py @@ -1,5 +1,6 @@ from warnings import catch_warnings, simplefilter from itertools import combinations +from collections import deque import datetime as dt import dateutil @@ -13,6 +14,7 @@ read_csv, isna, Series, date_range, Index, Panel, MultiIndex, Timestamp, DatetimeIndex, Categorical) +from pandas.compat import Iterable from pandas.core.dtypes.dtypes import CategoricalDtype from pandas.util import testing as tm from pandas.util.testing import (assert_frame_equal, @@ -1725,8 +1727,6 @@ def test_concat_series_axis1_same_names_ignore_index(self): tm.assert_index_equal(result.columns, expected) def test_concat_iterables(self): - from collections import deque, Iterable - # GH8645 check concat works with tuples, list, generators, and weird # stuff like deque and custom iterables df1 = DataFrame([1, 2, 3]) From 3baba66051cabddf50228888d8063cbb842cdf28 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 07:15:35 -0500 Subject: [PATCH 11/38] Always error ResourceWarning --- setup.cfg | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.cfg b/setup.cfg index c2b9c5dca9687..2b68655255eb4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -40,6 +40,8 @@ markers = high_memory: mark a test as a high-memory only doctest_optionflags = NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL addopts = --strict-data-files --durations=10 +filterwarnings = + error::ResourceWarning [coverage:run] branch = False From 3067ed102a00cc94a22917e1b3810c5de3a21db0 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 07:22:38 -0500 Subject: [PATCH 12/38] DeprecationWarnings --- pandas/tests/indexes/timedeltas/test_ops.py | 2 +- pandas/tests/internals/test_internals.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pandas/tests/indexes/timedeltas/test_ops.py b/pandas/tests/indexes/timedeltas/test_ops.py index 2e257bb8a500a..d7bdd18f48523 100644 --- a/pandas/tests/indexes/timedeltas/test_ops.py +++ b/pandas/tests/indexes/timedeltas/test_ops.py @@ -334,7 +334,7 @@ def test_freq_setter_errors(self): idx.freq = '5D' # setting with a non-fixed frequency - msg = '<2 \* BusinessDays> is a non-fixed frequency' + msg = r'<2 \* BusinessDays> is a non-fixed frequency' with tm.assert_raises_regex(ValueError, msg): idx.freq = '2B' diff --git a/pandas/tests/internals/test_internals.py b/pandas/tests/internals/test_internals.py index 34f22513106ba..86251ad7529d5 100644 --- a/pandas/tests/internals/test_internals.py +++ b/pandas/tests/internals/test_internals.py @@ -1285,7 +1285,7 @@ def test_deprecated_fastpath(): def test_validate_ndim(): values = np.array([1.0, 2.0]) placement = slice(2) - msg = "Wrong number of dimensions. values.ndim != ndim \[1 != 2\]" + msg = r"Wrong number of dimensions. values.ndim != ndim \[1 != 2\]" with tm.assert_raises_regex(ValueError, msg): make_block(values, placement, ndim=2) From 614f514d471c3e723d3a1c94b7d6f8957d79b622 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 07:33:43 -0500 Subject: [PATCH 13/38] Pop from sys.modules --- pandas/tests/api/test_types.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pandas/tests/api/test_types.py b/pandas/tests/api/test_types.py index 3bb4bf932a11f..41c54e6ac8328 100644 --- a/pandas/tests/api/test_types.py +++ b/pandas/tests/api/test_types.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- - +import sys import pytest import pandas @@ -57,6 +57,11 @@ def test_deprecated_from_api_types(self): def test_moved_infer_dtype(): + # del from sys.modules to ensure we try to freshly load. + # if this was imported from another test previously, we would + # not see the warning, since the import is otherwise cached. + + sys.modules.pop("pandas.lib", None) with tm.assert_produces_warning(FutureWarning): e = pandas.lib.infer_dtype('foo') From 37a3d39868c05a829fa7727b149a3e0fbe1eaef5 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 08:05:24 -0500 Subject: [PATCH 14/38] redo resourcewarning --- pandas/conftest.py | 11 +++++++++++ setup.cfg | 2 -- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/pandas/conftest.py b/pandas/conftest.py index 28c24fc8c0640..71b9a6dc2d9e5 100644 --- a/pandas/conftest.py +++ b/pandas/conftest.py @@ -1,4 +1,5 @@ import os +import sys import importlib import pytest @@ -31,6 +32,16 @@ def pytest_addoption(parser): help="Fail if a test is skipped for missing data file.") +def pytest_collection_modifyitems(items): + # Make unhandled ResourceWarnings fail early to track down + # https://github.com/pandas-dev/pandas/issues/22675 + if PY3: + for item in items: + item.add_marker( + pytest.mark.filterwarnings("error::ResourceWarning") + ) + + def pytest_runtest_setup(item): if 'slow' in item.keywords and item.config.getoption("--skip-slow"): pytest.skip("skipping due to --skip-slow") diff --git a/setup.cfg b/setup.cfg index 2b68655255eb4..c2b9c5dca9687 100644 --- a/setup.cfg +++ b/setup.cfg @@ -40,8 +40,6 @@ markers = high_memory: mark a test as a high-memory only doctest_optionflags = NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL addopts = --strict-data-files --durations=10 -filterwarnings = - error::ResourceWarning [coverage:run] branch = False From 80ce447436bd955ad3f8f13ffe1c8d6626c2830b Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 08:42:03 -0500 Subject: [PATCH 15/38] wip --- pandas/tests/io/generate_legacy_storage_files.py | 3 ++- pandas/tests/io/test_packers.py | 11 +++++++---- pandas/tests/test_panel.py | 2 ++ 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/pandas/tests/io/generate_legacy_storage_files.py b/pandas/tests/io/generate_legacy_storage_files.py index aa020ba4c0623..4ebf435f7d75f 100755 --- a/pandas/tests/io/generate_legacy_storage_files.py +++ b/pandas/tests/io/generate_legacy_storage_files.py @@ -35,7 +35,7 @@ """ from __future__ import print_function -from warnings import catch_warnings +from warnings import catch_warnings, filterwarnings from distutils.version import LooseVersion from pandas import (Series, DataFrame, Panel, SparseSeries, SparseDataFrame, @@ -187,6 +187,7 @@ def create_data(): ) with catch_warnings(record=True): + filterwarnings("ignore", "\\nPanel", FutureWarning) mixed_dup_panel = Panel({u'ItemA': frame[u'float'], u'ItemB': frame[u'int']}) mixed_dup_panel.items = [u'ItemA', u'ItemA'] diff --git a/pandas/tests/io/test_packers.py b/pandas/tests/io/test_packers.py index 412e218f95c6f..ee45f8828d85e 100644 --- a/pandas/tests/io/test_packers.py +++ b/pandas/tests/io/test_packers.py @@ -91,6 +91,7 @@ def check_arbitrary(a, b): assert(a == b) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class TestPackers(object): def setup_method(self, method): @@ -105,6 +106,7 @@ def encode_decode(self, x, compress=None, **kwargs): return read_msgpack(p, **kwargs) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class TestAPI(TestPackers): def test_string_io(self): @@ -464,6 +466,7 @@ def test_basic(self): assert_categorical_equal(i, i_rec) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class TestNDFrame(TestPackers): def setup_method(self, method): @@ -486,10 +489,9 @@ def setup_method(self, method): 'int': DataFrame(dict(A=data['B'], B=Series(data['B']) + 1)), 'mixed': DataFrame(data)} - with catch_warnings(record=True): - self.panel = { - 'float': Panel(dict(ItemA=self.frame['float'], - ItemB=self.frame['float'] + 1))} + self.panel = { + 'float': Panel(dict(ItemA=self.frame['float'], + ItemB=self.frame['float'] + 1))} def test_basic_frame(self): @@ -846,6 +848,7 @@ def legacy_packer(request, datapath): return datapath(request.param) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class TestMsgpack(object): """ How to add msgpack tests: diff --git a/pandas/tests/test_panel.py b/pandas/tests/test_panel.py index 4cd1039d69c0d..44ba6ffb81513 100644 --- a/pandas/tests/test_panel.py +++ b/pandas/tests/test_panel.py @@ -92,6 +92,7 @@ def test_mean(self): def test_prod(self): self._check_stat_op('prod', np.prod, skipna_alternative=np.nanprod) + @pytest.mark.filterwarnings("ignore:Invalid value:RuntimeWarning") def test_median(self): def wrapper(x): if isna(x).any(): @@ -104,6 +105,7 @@ def test_min(self): with catch_warnings(record=True): self._check_stat_op('min', np.min) + @pytest.mark.filterwarnings("ignore:Invalid value:RuntimeWarning") def test_max(self): with catch_warnings(record=True): self._check_stat_op('max', np.max) From 61beba7b08df682167cbcea0e97a2f7f38e035b0 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 09:03:13 -0500 Subject: [PATCH 16/38] Panel --- pandas/tests/test_panel.py | 3542 ++++++++++++++++++------------------ 1 file changed, 1730 insertions(+), 1812 deletions(-) diff --git a/pandas/tests/test_panel.py b/pandas/tests/test_panel.py index 44ba6ffb81513..cbb75fba08015 100644 --- a/pandas/tests/test_panel.py +++ b/pandas/tests/test_panel.py @@ -37,43 +37,40 @@ def make_test_panel(): return _panel +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class PanelTests(object): panel = None def test_pickle(self): - with catch_warnings(record=True): - unpickled = tm.round_trip_pickle(self.panel) - assert_frame_equal(unpickled['ItemA'], self.panel['ItemA']) + unpickled = tm.round_trip_pickle(self.panel) + assert_frame_equal(unpickled['ItemA'], self.panel['ItemA']) def test_rank(self): - with catch_warnings(record=True): - pytest.raises(NotImplementedError, lambda: self.panel.rank()) + pytest.raises(NotImplementedError, lambda: self.panel.rank()) def test_cumsum(self): - with catch_warnings(record=True): - cumsum = self.panel.cumsum() - assert_frame_equal(cumsum['ItemA'], self.panel['ItemA'].cumsum()) + cumsum = self.panel.cumsum() + assert_frame_equal(cumsum['ItemA'], self.panel['ItemA'].cumsum()) def not_hashable(self): - with catch_warnings(record=True): - c_empty = Panel() - c = Panel(Panel([[[1]]])) - pytest.raises(TypeError, hash, c_empty) - pytest.raises(TypeError, hash, c) + c_empty = Panel() + c = Panel(Panel([[[1]]])) + pytest.raises(TypeError, hash, c_empty) + pytest.raises(TypeError, hash, c) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class SafeForLongAndSparse(object): def test_repr(self): repr(self.panel) def test_copy_names(self): - with catch_warnings(record=True): - for attr in ('major_axis', 'minor_axis'): - getattr(self.panel, attr).name = None - cp = self.panel.copy() - getattr(cp, attr).name = 'foo' - assert getattr(self.panel, attr).name is None + for attr in ('major_axis', 'minor_axis'): + getattr(self.panel, attr).name = None + cp = self.panel.copy() + getattr(cp, attr).name = 'foo' + assert getattr(self.panel, attr).name is None def test_iter(self): tm.equalContents(list(self.panel), self.panel.items) @@ -101,14 +98,13 @@ def wrapper(x): self._check_stat_op('median', wrapper) + @pytest.mark.filterwarnings("ignore:Invalid value:RuntimeWarning") def test_min(self): - with catch_warnings(record=True): - self._check_stat_op('min', np.min) + self._check_stat_op('min', np.min) @pytest.mark.filterwarnings("ignore:Invalid value:RuntimeWarning") def test_max(self): - with catch_warnings(record=True): - self._check_stat_op('max', np.max) + self._check_stat_op('max', np.max) @td.skip_if_no_scipy def test_skew(self): @@ -184,6 +180,7 @@ def wrapper(x): numeric_only=True) +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class SafeForSparse(object): def test_get_axis(self): @@ -243,48 +240,46 @@ def test_get_plane_axes(self): index, columns = self.panel._get_plane_axes(0) def test_truncate(self): - with catch_warnings(record=True): - dates = self.panel.major_axis - start, end = dates[1], dates[5] + dates = self.panel.major_axis + start, end = dates[1], dates[5] - trunced = self.panel.truncate(start, end, axis='major') - expected = self.panel['ItemA'].truncate(start, end) + trunced = self.panel.truncate(start, end, axis='major') + expected = self.panel['ItemA'].truncate(start, end) - assert_frame_equal(trunced['ItemA'], expected) + assert_frame_equal(trunced['ItemA'], expected) - trunced = self.panel.truncate(before=start, axis='major') - expected = self.panel['ItemA'].truncate(before=start) + trunced = self.panel.truncate(before=start, axis='major') + expected = self.panel['ItemA'].truncate(before=start) - assert_frame_equal(trunced['ItemA'], expected) + assert_frame_equal(trunced['ItemA'], expected) - trunced = self.panel.truncate(after=end, axis='major') - expected = self.panel['ItemA'].truncate(after=end) + trunced = self.panel.truncate(after=end, axis='major') + expected = self.panel['ItemA'].truncate(after=end) - assert_frame_equal(trunced['ItemA'], expected) + assert_frame_equal(trunced['ItemA'], expected) def test_arith(self): - with catch_warnings(record=True): - self._test_op(self.panel, operator.add) - self._test_op(self.panel, operator.sub) - self._test_op(self.panel, operator.mul) - self._test_op(self.panel, operator.truediv) - self._test_op(self.panel, operator.floordiv) - self._test_op(self.panel, operator.pow) - - self._test_op(self.panel, lambda x, y: y + x) - self._test_op(self.panel, lambda x, y: y - x) - self._test_op(self.panel, lambda x, y: y * x) - self._test_op(self.panel, lambda x, y: y / x) - self._test_op(self.panel, lambda x, y: y ** x) - - self._test_op(self.panel, lambda x, y: x + y) # panel + 1 - self._test_op(self.panel, lambda x, y: x - y) # panel - 1 - self._test_op(self.panel, lambda x, y: x * y) # panel * 1 - self._test_op(self.panel, lambda x, y: x / y) # panel / 1 - self._test_op(self.panel, lambda x, y: x ** y) # panel ** 1 - - pytest.raises(Exception, self.panel.__add__, - self.panel['ItemA']) + self._test_op(self.panel, operator.add) + self._test_op(self.panel, operator.sub) + self._test_op(self.panel, operator.mul) + self._test_op(self.panel, operator.truediv) + self._test_op(self.panel, operator.floordiv) + self._test_op(self.panel, operator.pow) + + self._test_op(self.panel, lambda x, y: y + x) + self._test_op(self.panel, lambda x, y: y - x) + self._test_op(self.panel, lambda x, y: y * x) + self._test_op(self.panel, lambda x, y: y / x) + self._test_op(self.panel, lambda x, y: y ** x) + + self._test_op(self.panel, lambda x, y: x + y) # panel + 1 + self._test_op(self.panel, lambda x, y: x - y) # panel - 1 + self._test_op(self.panel, lambda x, y: x * y) # panel * 1 + self._test_op(self.panel, lambda x, y: x / y) # panel / 1 + self._test_op(self.panel, lambda x, y: x ** y) # panel ** 1 + + pytest.raises(Exception, self.panel.__add__, + self.panel['ItemA']) @staticmethod def _test_op(panel, op): @@ -303,100 +298,99 @@ def test_iteritems(self): assert len(list(self.panel.iteritems())) == len(self.panel.items) def test_combineFrame(self): - with catch_warnings(record=True): - def check_op(op, name): - # items - df = self.panel['ItemA'] + def check_op(op, name): + # items + df = self.panel['ItemA'] - func = getattr(self.panel, name) + func = getattr(self.panel, name) - result = func(df, axis='items') + result = func(df, axis='items') - assert_frame_equal( - result['ItemB'], op(self.panel['ItemB'], df)) + assert_frame_equal( + result['ItemB'], op(self.panel['ItemB'], df)) - # major - xs = self.panel.major_xs(self.panel.major_axis[0]) - result = func(xs, axis='major') + # major + xs = self.panel.major_xs(self.panel.major_axis[0]) + result = func(xs, axis='major') - idx = self.panel.major_axis[1] + idx = self.panel.major_axis[1] - assert_frame_equal(result.major_xs(idx), - op(self.panel.major_xs(idx), xs)) + assert_frame_equal(result.major_xs(idx), + op(self.panel.major_xs(idx), xs)) - # minor - xs = self.panel.minor_xs(self.panel.minor_axis[0]) - result = func(xs, axis='minor') + # minor + xs = self.panel.minor_xs(self.panel.minor_axis[0]) + result = func(xs, axis='minor') - idx = self.panel.minor_axis[1] + idx = self.panel.minor_axis[1] - assert_frame_equal(result.minor_xs(idx), - op(self.panel.minor_xs(idx), xs)) + assert_frame_equal(result.minor_xs(idx), + op(self.panel.minor_xs(idx), xs)) - ops = ['add', 'sub', 'mul', 'truediv', 'floordiv', 'pow', 'mod'] - if not compat.PY3: - ops.append('div') + ops = ['add', 'sub', 'mul', 'truediv', 'floordiv', 'pow', 'mod'] + if not compat.PY3: + ops.append('div') - for op in ops: - try: - check_op(getattr(operator, op), op) - except: - pprint_thing("Failing operation: %r" % op) - raise - if compat.PY3: - try: - check_op(operator.truediv, 'div') - except: - pprint_thing("Failing operation: %r" % 'div') - raise + for op in ops: + try: + check_op(getattr(operator, op), op) + except: + pprint_thing("Failing operation: %r" % op) + raise + if compat.PY3: + try: + check_op(operator.truediv, 'div') + except: + pprint_thing("Failing operation: %r" % 'div') + raise def test_combinePanel(self): - with catch_warnings(record=True): - result = self.panel.add(self.panel) - assert_panel_equal(result, self.panel * 2) + result = self.panel.add(self.panel) + assert_panel_equal(result, self.panel * 2) def test_neg(self): - with catch_warnings(record=True): - assert_panel_equal(-self.panel, self.panel * -1) + assert_panel_equal(-self.panel, self.panel * -1) # issue 7692 def test_raise_when_not_implemented(self): - with catch_warnings(record=True): - p = Panel(np.arange(3 * 4 * 5).reshape(3, 4, 5), - items=['ItemA', 'ItemB', 'ItemC'], - major_axis=date_range('20130101', periods=4), - minor_axis=list('ABCDE')) - d = p.sum(axis=1).iloc[0] - ops = ['add', 'sub', 'mul', 'truediv', - 'floordiv', 'div', 'mod', 'pow'] - for op in ops: - with pytest.raises(NotImplementedError): - getattr(p, op)(d, axis=0) + p = Panel(np.arange(3 * 4 * 5).reshape(3, 4, 5), + items=['ItemA', 'ItemB', 'ItemC'], + major_axis=date_range('20130101', periods=4), + minor_axis=list('ABCDE')) + d = p.sum(axis=1).iloc[0] + ops = ['add', 'sub', 'mul', 'truediv', + 'floordiv', 'div', 'mod', 'pow'] + for op in ops: + with pytest.raises(NotImplementedError): + getattr(p, op)(d, axis=0) def test_select(self): - with catch_warnings(record=True): - p = self.panel + p = self.panel - # select items + # select items + with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = p.select(lambda x: x in ('ItemA', 'ItemC'), axis='items') - expected = p.reindex(items=['ItemA', 'ItemC']) - assert_panel_equal(result, expected) + expected = p.reindex(items=['ItemA', 'ItemC']) + assert_panel_equal(result, expected) - # select major_axis + # select major_axis + with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = p.select(lambda x: x >= datetime( 2000, 1, 15), axis='major') - new_major = p.major_axis[p.major_axis >= datetime(2000, 1, 15)] - expected = p.reindex(major=new_major) - assert_panel_equal(result, expected) + new_major = p.major_axis[p.major_axis >= datetime(2000, 1, 15)] + expected = p.reindex(major=new_major) + assert_panel_equal(result, expected) - # select minor_axis + # select minor_axis + with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = p.select(lambda x: x in ('D', 'A'), axis=2) - expected = p.reindex(minor=['A', 'D']) - assert_panel_equal(result, expected) + expected = p.reindex(minor=['A', 'D']) + assert_panel_equal(result, expected) - # corner case, empty thing + # corner case, empty thing + with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = p.select(lambda x: x in ('foo', ), axis='items') - assert_panel_equal(result, p.reindex(items=[])) + assert_panel_equal(result, p.reindex(items=[])) def test_get_value(self): for item in self.panel.items: @@ -410,211 +404,204 @@ def test_get_value(self): def test_abs(self): - with catch_warnings(record=True): - result = self.panel.abs() - result2 = abs(self.panel) - expected = np.abs(self.panel) - assert_panel_equal(result, expected) - assert_panel_equal(result2, expected) + result = self.panel.abs() + result2 = abs(self.panel) + expected = np.abs(self.panel) + assert_panel_equal(result, expected) + assert_panel_equal(result2, expected) - df = self.panel['ItemA'] - result = df.abs() - result2 = abs(df) - expected = np.abs(df) - assert_frame_equal(result, expected) - assert_frame_equal(result2, expected) - - s = df['A'] - result = s.abs() - result2 = abs(s) - expected = np.abs(s) - assert_series_equal(result, expected) - assert_series_equal(result2, expected) - assert result.name == 'A' - assert result2.name == 'A' + df = self.panel['ItemA'] + result = df.abs() + result2 = abs(df) + expected = np.abs(df) + assert_frame_equal(result, expected) + assert_frame_equal(result2, expected) + + s = df['A'] + result = s.abs() + result2 = abs(s) + expected = np.abs(s) + assert_series_equal(result, expected) + assert_series_equal(result2, expected) + assert result.name == 'A' + assert result2.name == 'A' +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class CheckIndexing(object): def test_getitem(self): pytest.raises(Exception, self.panel.__getitem__, 'ItemQ') def test_delitem_and_pop(self): - with catch_warnings(record=True): - expected = self.panel['ItemA'] - result = self.panel.pop('ItemA') - assert_frame_equal(expected, result) - assert 'ItemA' not in self.panel.items + expected = self.panel['ItemA'] + result = self.panel.pop('ItemA') + assert_frame_equal(expected, result) + assert 'ItemA' not in self.panel.items - del self.panel['ItemB'] - assert 'ItemB' not in self.panel.items - pytest.raises(Exception, self.panel.__delitem__, 'ItemB') + del self.panel['ItemB'] + assert 'ItemB' not in self.panel.items + pytest.raises(Exception, self.panel.__delitem__, 'ItemB') - values = np.empty((3, 3, 3)) - values[0] = 0 - values[1] = 1 - values[2] = 2 + values = np.empty((3, 3, 3)) + values[0] = 0 + values[1] = 1 + values[2] = 2 - panel = Panel(values, lrange(3), lrange(3), lrange(3)) + panel = Panel(values, lrange(3), lrange(3), lrange(3)) - # did we delete the right row? + # did we delete the right row? - panelc = panel.copy() - del panelc[0] - tm.assert_frame_equal(panelc[1], panel[1]) - tm.assert_frame_equal(panelc[2], panel[2]) + panelc = panel.copy() + del panelc[0] + tm.assert_frame_equal(panelc[1], panel[1]) + tm.assert_frame_equal(panelc[2], panel[2]) - panelc = panel.copy() - del panelc[1] - tm.assert_frame_equal(panelc[0], panel[0]) - tm.assert_frame_equal(panelc[2], panel[2]) + panelc = panel.copy() + del panelc[1] + tm.assert_frame_equal(panelc[0], panel[0]) + tm.assert_frame_equal(panelc[2], panel[2]) - panelc = panel.copy() - del panelc[2] - tm.assert_frame_equal(panelc[1], panel[1]) - tm.assert_frame_equal(panelc[0], panel[0]) + panelc = panel.copy() + del panelc[2] + tm.assert_frame_equal(panelc[1], panel[1]) + tm.assert_frame_equal(panelc[0], panel[0]) def test_setitem(self): - with catch_warnings(record=True): - lp = self.panel.filter(['ItemA', 'ItemB']).to_frame() - with pytest.raises(ValueError): - self.panel['ItemE'] = lp - - # DataFrame - df = self.panel['ItemA'][2:].filter(items=['A', 'B']) - self.panel['ItemF'] = df - self.panel['ItemE'] = df - - df2 = self.panel['ItemF'] - - assert_frame_equal(df, df2.reindex( - index=df.index, columns=df.columns)) - - # scalar - self.panel['ItemG'] = 1 - self.panel['ItemE'] = True - assert self.panel['ItemG'].values.dtype == np.int64 - assert self.panel['ItemE'].values.dtype == np.bool_ - - # object dtype - self.panel['ItemQ'] = 'foo' - assert self.panel['ItemQ'].values.dtype == np.object_ - - # boolean dtype - self.panel['ItemP'] = self.panel['ItemA'] > 0 - assert self.panel['ItemP'].values.dtype == np.bool_ - - pytest.raises(TypeError, self.panel.__setitem__, 'foo', - self.panel.loc[['ItemP']]) - - # bad shape - p = Panel(np.random.randn(4, 3, 2)) - with tm.assert_raises_regex(ValueError, - r"shape of value must be " - r"\(3, 2\), shape of given " - r"object was \(4, 2\)"): - p[0] = np.random.randn(4, 2) + lp = self.panel.filter(['ItemA', 'ItemB']).to_frame() + with pytest.raises(ValueError): + self.panel['ItemE'] = lp + + # DataFrame + df = self.panel['ItemA'][2:].filter(items=['A', 'B']) + self.panel['ItemF'] = df + self.panel['ItemE'] = df + + df2 = self.panel['ItemF'] + + assert_frame_equal(df, df2.reindex( + index=df.index, columns=df.columns)) + + # scalar + self.panel['ItemG'] = 1 + self.panel['ItemE'] = True + assert self.panel['ItemG'].values.dtype == np.int64 + assert self.panel['ItemE'].values.dtype == np.bool_ + + # object dtype + self.panel['ItemQ'] = 'foo' + assert self.panel['ItemQ'].values.dtype == np.object_ + + # boolean dtype + self.panel['ItemP'] = self.panel['ItemA'] > 0 + assert self.panel['ItemP'].values.dtype == np.bool_ + + pytest.raises(TypeError, self.panel.__setitem__, 'foo', + self.panel.loc[['ItemP']]) + + # bad shape + p = Panel(np.random.randn(4, 3, 2)) + with tm.assert_raises_regex(ValueError, + r"shape of value must be " + r"\(3, 2\), shape of given " + r"object was \(4, 2\)"): + p[0] = np.random.randn(4, 2) def test_setitem_ndarray(self): - with catch_warnings(record=True): - timeidx = date_range(start=datetime(2009, 1, 1), - end=datetime(2009, 12, 31), - freq=MonthEnd()) - lons_coarse = np.linspace(-177.5, 177.5, 72) - lats_coarse = np.linspace(-87.5, 87.5, 36) - P = Panel(items=timeidx, major_axis=lons_coarse, - minor_axis=lats_coarse) - data = np.random.randn(72 * 36).reshape((72, 36)) - key = datetime(2009, 2, 28) - P[key] = data - - assert_almost_equal(P[key].values, data) + timeidx = date_range(start=datetime(2009, 1, 1), + end=datetime(2009, 12, 31), + freq=MonthEnd()) + lons_coarse = np.linspace(-177.5, 177.5, 72) + lats_coarse = np.linspace(-87.5, 87.5, 36) + P = Panel(items=timeidx, major_axis=lons_coarse, + minor_axis=lats_coarse) + data = np.random.randn(72 * 36).reshape((72, 36)) + key = datetime(2009, 2, 28) + P[key] = data + + assert_almost_equal(P[key].values, data) def test_set_minor_major(self): - with catch_warnings(record=True): - # GH 11014 - df1 = DataFrame(['a', 'a', 'a', np.nan, 'a', np.nan]) - df2 = DataFrame([1.0, np.nan, 1.0, np.nan, 1.0, 1.0]) - panel = Panel({'Item1': df1, 'Item2': df2}) - - newminor = notna(panel.iloc[:, :, 0]) - panel.loc[:, :, 'NewMinor'] = newminor - assert_frame_equal(panel.loc[:, :, 'NewMinor'], - newminor.astype(object)) - - newmajor = notna(panel.iloc[:, 0, :]) - panel.loc[:, 'NewMajor', :] = newmajor - assert_frame_equal(panel.loc[:, 'NewMajor', :], - newmajor.astype(object)) + # GH 11014 + df1 = DataFrame(['a', 'a', 'a', np.nan, 'a', np.nan]) + df2 = DataFrame([1.0, np.nan, 1.0, np.nan, 1.0, 1.0]) + panel = Panel({'Item1': df1, 'Item2': df2}) + + newminor = notna(panel.iloc[:, :, 0]) + panel.loc[:, :, 'NewMinor'] = newminor + assert_frame_equal(panel.loc[:, :, 'NewMinor'], + newminor.astype(object)) + + newmajor = notna(panel.iloc[:, 0, :]) + panel.loc[:, 'NewMajor', :] = newmajor + assert_frame_equal(panel.loc[:, 'NewMajor', :], + newmajor.astype(object)) def test_major_xs(self): - with catch_warnings(record=True): - ref = self.panel['ItemA'] + ref = self.panel['ItemA'] - idx = self.panel.major_axis[5] - xs = self.panel.major_xs(idx) + idx = self.panel.major_axis[5] + xs = self.panel.major_xs(idx) - result = xs['ItemA'] - assert_series_equal(result, ref.xs(idx), check_names=False) - assert result.name == 'ItemA' + result = xs['ItemA'] + assert_series_equal(result, ref.xs(idx), check_names=False) + assert result.name == 'ItemA' - # not contained - idx = self.panel.major_axis[0] - BDay() - pytest.raises(Exception, self.panel.major_xs, idx) + # not contained + idx = self.panel.major_axis[0] - BDay() + pytest.raises(Exception, self.panel.major_xs, idx) def test_major_xs_mixed(self): - with catch_warnings(record=True): - self.panel['ItemD'] = 'foo' - xs = self.panel.major_xs(self.panel.major_axis[0]) - assert xs['ItemA'].dtype == np.float64 - assert xs['ItemD'].dtype == np.object_ + self.panel['ItemD'] = 'foo' + xs = self.panel.major_xs(self.panel.major_axis[0]) + assert xs['ItemA'].dtype == np.float64 + assert xs['ItemD'].dtype == np.object_ def test_minor_xs(self): - with catch_warnings(record=True): - ref = self.panel['ItemA'] + ref = self.panel['ItemA'] - idx = self.panel.minor_axis[1] - xs = self.panel.minor_xs(idx) + idx = self.panel.minor_axis[1] + xs = self.panel.minor_xs(idx) - assert_series_equal(xs['ItemA'], ref[idx], check_names=False) + assert_series_equal(xs['ItemA'], ref[idx], check_names=False) - # not contained - pytest.raises(Exception, self.panel.minor_xs, 'E') + # not contained + pytest.raises(Exception, self.panel.minor_xs, 'E') def test_minor_xs_mixed(self): - with catch_warnings(record=True): - self.panel['ItemD'] = 'foo' + self.panel['ItemD'] = 'foo' - xs = self.panel.minor_xs('D') - assert xs['ItemA'].dtype == np.float64 - assert xs['ItemD'].dtype == np.object_ + xs = self.panel.minor_xs('D') + assert xs['ItemA'].dtype == np.float64 + assert xs['ItemD'].dtype == np.object_ def test_xs(self): - with catch_warnings(record=True): - itemA = self.panel.xs('ItemA', axis=0) - expected = self.panel['ItemA'] - tm.assert_frame_equal(itemA, expected) + itemA = self.panel.xs('ItemA', axis=0) + expected = self.panel['ItemA'] + tm.assert_frame_equal(itemA, expected) - # Get a view by default. - itemA_view = self.panel.xs('ItemA', axis=0) - itemA_view.values[:] = np.nan + # Get a view by default. + itemA_view = self.panel.xs('ItemA', axis=0) + itemA_view.values[:] = np.nan - assert np.isnan(self.panel['ItemA'].values).all() + assert np.isnan(self.panel['ItemA'].values).all() - # Mixed-type yields a copy. - self.panel['strings'] = 'foo' - result = self.panel.xs('D', axis=2) - assert result._is_copy is not None + # Mixed-type yields a copy. + self.panel['strings'] = 'foo' + result = self.panel.xs('D', axis=2) + assert result._is_copy is not None def test_getitem_fancy_labels(self): - with catch_warnings(record=True): - p = self.panel + p = self.panel - items = p.items[[1, 0]] - dates = p.major_axis[::2] - cols = ['D', 'C', 'F'] + items = p.items[[1, 0]] + dates = p.major_axis[::2] + cols = ['D', 'C', 'F'] - # all 3 specified + # all 3 specified + with catch_warnings(): + simplefilter("ignore", FutureWarning) + # XXX: warning in _validate_read_indexer assert_panel_equal(p.loc[items, dates, cols], p.reindex(items=items, major=dates, minor=cols)) @@ -673,132 +660,127 @@ def test_getitem_fancy_xs(self): assert_series_equal(p.loc[:, date, col], p.major_xs(date).loc[col]) def test_getitem_fancy_xs_check_view(self): - with catch_warnings(record=True): - item = 'ItemB' - date = self.panel.major_axis[5] - - # make sure it's always a view - NS = slice(None, None) - - # DataFrames - comp = assert_frame_equal - self._check_view(item, comp) - self._check_view((item, NS), comp) - self._check_view((item, NS, NS), comp) - self._check_view((NS, date), comp) - self._check_view((NS, date, NS), comp) - self._check_view((NS, NS, 'C'), comp) - - # Series - comp = assert_series_equal - self._check_view((item, date), comp) - self._check_view((item, date, NS), comp) - self._check_view((item, NS, 'C'), comp) - self._check_view((NS, date, 'C'), comp) + item = 'ItemB' + date = self.panel.major_axis[5] + + # make sure it's always a view + NS = slice(None, None) + + # DataFrames + comp = assert_frame_equal + self._check_view(item, comp) + self._check_view((item, NS), comp) + self._check_view((item, NS, NS), comp) + self._check_view((NS, date), comp) + self._check_view((NS, date, NS), comp) + self._check_view((NS, NS, 'C'), comp) + + # Series + comp = assert_series_equal + self._check_view((item, date), comp) + self._check_view((item, date, NS), comp) + self._check_view((item, NS, 'C'), comp) + self._check_view((NS, date, 'C'), comp) def test_getitem_callable(self): - with catch_warnings(record=True): - p = self.panel - # GH 12533 + p = self.panel + # GH 12533 - assert_frame_equal(p[lambda x: 'ItemB'], p.loc['ItemB']) - assert_panel_equal(p[lambda x: ['ItemB', 'ItemC']], - p.loc[['ItemB', 'ItemC']]) + assert_frame_equal(p[lambda x: 'ItemB'], p.loc['ItemB']) + assert_panel_equal(p[lambda x: ['ItemB', 'ItemC']], + p.loc[['ItemB', 'ItemC']]) def test_ix_setitem_slice_dataframe(self): - with catch_warnings(record=True): - a = Panel(items=[1, 2, 3], major_axis=[11, 22, 33], - minor_axis=[111, 222, 333]) - b = DataFrame(np.random.randn(2, 3), index=[111, 333], - columns=[1, 2, 3]) + a = Panel(items=[1, 2, 3], major_axis=[11, 22, 33], + minor_axis=[111, 222, 333]) + b = DataFrame(np.random.randn(2, 3), index=[111, 333], + columns=[1, 2, 3]) - a.loc[:, 22, [111, 333]] = b + a.loc[:, 22, [111, 333]] = b - assert_frame_equal(a.loc[:, 22, [111, 333]], b) + assert_frame_equal(a.loc[:, 22, [111, 333]], b) def test_ix_align(self): - with catch_warnings(record=True): - from pandas import Series - b = Series(np.random.randn(10), name=0) - b.sort_values() - df_orig = Panel(np.random.randn(3, 10, 2)) - df = df_orig.copy() + from pandas import Series + b = Series(np.random.randn(10), name=0) + b.sort_values() + df_orig = Panel(np.random.randn(3, 10, 2)) + df = df_orig.copy() - df.loc[0, :, 0] = b - assert_series_equal(df.loc[0, :, 0].reindex(b.index), b) + df.loc[0, :, 0] = b + assert_series_equal(df.loc[0, :, 0].reindex(b.index), b) - df = df_orig.swapaxes(0, 1) - df.loc[:, 0, 0] = b - assert_series_equal(df.loc[:, 0, 0].reindex(b.index), b) + df = df_orig.swapaxes(0, 1) + df.loc[:, 0, 0] = b + assert_series_equal(df.loc[:, 0, 0].reindex(b.index), b) - df = df_orig.swapaxes(1, 2) - df.loc[0, 0, :] = b - assert_series_equal(df.loc[0, 0, :].reindex(b.index), b) + df = df_orig.swapaxes(1, 2) + df.loc[0, 0, :] = b + assert_series_equal(df.loc[0, 0, :].reindex(b.index), b) def test_ix_frame_align(self): - with catch_warnings(record=True): - p_orig = tm.makePanel() - df = p_orig.iloc[0].copy() - assert_frame_equal(p_orig['ItemA'], df) - - p = p_orig.copy() - p.iloc[0, :, :] = df - assert_panel_equal(p, p_orig) - - p = p_orig.copy() - p.iloc[0] = df - assert_panel_equal(p, p_orig) - - p = p_orig.copy() - p.iloc[0, :, :] = df - assert_panel_equal(p, p_orig) - - p = p_orig.copy() - p.iloc[0] = df - assert_panel_equal(p, p_orig) - - p = p_orig.copy() - p.loc['ItemA'] = df - assert_panel_equal(p, p_orig) - - p = p_orig.copy() - p.loc['ItemA', :, :] = df - assert_panel_equal(p, p_orig) - - p = p_orig.copy() - p['ItemA'] = df - assert_panel_equal(p, p_orig) - - p = p_orig.copy() - p.iloc[0, [0, 1, 3, 5], -2:] = df - out = p.iloc[0, [0, 1, 3, 5], -2:] - assert_frame_equal(out, df.iloc[[0, 1, 3, 5], [2, 3]]) - - # GH3830, panel assignent by values/frame - for dtype in ['float64', 'int64']: - - panel = Panel(np.arange(40).reshape((2, 4, 5)), - items=['a1', 'a2'], dtype=dtype) - df1 = panel.iloc[0] - df2 = panel.iloc[1] - - tm.assert_frame_equal(panel.loc['a1'], df1) - tm.assert_frame_equal(panel.loc['a2'], df2) - - # Assignment by Value Passes for 'a2' - panel.loc['a2'] = df1.values - tm.assert_frame_equal(panel.loc['a1'], df1) - tm.assert_frame_equal(panel.loc['a2'], df1) - - # Assignment by DataFrame Ok w/o loc 'a2' - panel['a2'] = df2 - tm.assert_frame_equal(panel.loc['a1'], df1) - tm.assert_frame_equal(panel.loc['a2'], df2) - - # Assignment by DataFrame Fails for 'a2' - panel.loc['a2'] = df2 - tm.assert_frame_equal(panel.loc['a1'], df1) - tm.assert_frame_equal(panel.loc['a2'], df2) + p_orig = tm.makePanel() + df = p_orig.iloc[0].copy() + assert_frame_equal(p_orig['ItemA'], df) + + p = p_orig.copy() + p.iloc[0, :, :] = df + assert_panel_equal(p, p_orig) + + p = p_orig.copy() + p.iloc[0] = df + assert_panel_equal(p, p_orig) + + p = p_orig.copy() + p.iloc[0, :, :] = df + assert_panel_equal(p, p_orig) + + p = p_orig.copy() + p.iloc[0] = df + assert_panel_equal(p, p_orig) + + p = p_orig.copy() + p.loc['ItemA'] = df + assert_panel_equal(p, p_orig) + + p = p_orig.copy() + p.loc['ItemA', :, :] = df + assert_panel_equal(p, p_orig) + + p = p_orig.copy() + p['ItemA'] = df + assert_panel_equal(p, p_orig) + + p = p_orig.copy() + p.iloc[0, [0, 1, 3, 5], -2:] = df + out = p.iloc[0, [0, 1, 3, 5], -2:] + assert_frame_equal(out, df.iloc[[0, 1, 3, 5], [2, 3]]) + + # GH3830, panel assignent by values/frame + for dtype in ['float64', 'int64']: + + panel = Panel(np.arange(40).reshape((2, 4, 5)), + items=['a1', 'a2'], dtype=dtype) + df1 = panel.iloc[0] + df2 = panel.iloc[1] + + tm.assert_frame_equal(panel.loc['a1'], df1) + tm.assert_frame_equal(panel.loc['a2'], df2) + + # Assignment by Value Passes for 'a2' + panel.loc['a2'] = df1.values + tm.assert_frame_equal(panel.loc['a1'], df1) + tm.assert_frame_equal(panel.loc['a2'], df1) + + # Assignment by DataFrame Ok w/o loc 'a2' + panel['a2'] = df2 + tm.assert_frame_equal(panel.loc['a1'], df1) + tm.assert_frame_equal(panel.loc['a2'], df2) + + # Assignment by DataFrame Fails for 'a2' + panel.loc['a2'] = df2 + tm.assert_frame_equal(panel.loc['a1'], df1) + tm.assert_frame_equal(panel.loc['a2'], df2) def _check_view(self, indexer, comp): cp = self.panel.copy() @@ -808,83 +790,85 @@ def _check_view(self, indexer, comp): comp(cp.loc[indexer].reindex_like(obj), obj) def test_logical_with_nas(self): - with catch_warnings(record=True): - d = Panel({'ItemA': {'a': [np.nan, False]}, - 'ItemB': {'a': [True, True]}}) + d = Panel({'ItemA': {'a': [np.nan, False]}, + 'ItemB': {'a': [True, True]}}) - result = d['ItemA'] | d['ItemB'] - expected = DataFrame({'a': [np.nan, True]}) - assert_frame_equal(result, expected) + result = d['ItemA'] | d['ItemB'] + expected = DataFrame({'a': [np.nan, True]}) + assert_frame_equal(result, expected) - # this is autodowncasted here - result = d['ItemA'].fillna(False) | d['ItemB'] - expected = DataFrame({'a': [True, True]}) - assert_frame_equal(result, expected) + # this is autodowncasted here + result = d['ItemA'].fillna(False) | d['ItemB'] + expected = DataFrame({'a': [True, True]}) + assert_frame_equal(result, expected) def test_neg(self): - with catch_warnings(record=True): - assert_panel_equal(-self.panel, -1 * self.panel) + assert_panel_equal(-self.panel, -1 * self.panel) def test_invert(self): - with catch_warnings(record=True): - assert_panel_equal(-(self.panel < 0), ~(self.panel < 0)) + assert_panel_equal(-(self.panel < 0), ~(self.panel < 0)) def test_comparisons(self): - with catch_warnings(record=True): - p1 = tm.makePanel() - p2 = tm.makePanel() + p1 = tm.makePanel() + p2 = tm.makePanel() - tp = p1.reindex(items=p1.items + ['foo']) - df = p1[p1.items[0]] + tp = p1.reindex(items=p1.items + ['foo']) + df = p1[p1.items[0]] - def test_comp(func): + def test_comp(func): - # versus same index - result = func(p1, p2) - tm.assert_numpy_array_equal(result.values, - func(p1.values, p2.values)) + # versus same index + result = func(p1, p2) + tm.assert_numpy_array_equal(result.values, + func(p1.values, p2.values)) - # versus non-indexed same objs - pytest.raises(Exception, func, p1, tp) + # versus non-indexed same objs + pytest.raises(Exception, func, p1, tp) - # versus different objs - pytest.raises(Exception, func, p1, df) + # versus different objs + pytest.raises(Exception, func, p1, df) - # versus scalar - result3 = func(self.panel, 0) - tm.assert_numpy_array_equal(result3.values, - func(self.panel.values, 0)) + # versus scalar + result3 = func(self.panel, 0) + tm.assert_numpy_array_equal(result3.values, + func(self.panel.values, 0)) - with np.errstate(invalid='ignore'): - test_comp(operator.eq) - test_comp(operator.ne) - test_comp(operator.lt) - test_comp(operator.gt) - test_comp(operator.ge) - test_comp(operator.le) + with np.errstate(invalid='ignore'): + test_comp(operator.eq) + test_comp(operator.ne) + test_comp(operator.lt) + test_comp(operator.gt) + test_comp(operator.ge) + test_comp(operator.le) def test_get_value(self): - with catch_warnings(record=True): - for item in self.panel.items: - for mjr in self.panel.major_axis[::2]: - for mnr in self.panel.minor_axis: + for item in self.panel.items: + for mjr in self.panel.major_axis[::2]: + for mnr in self.panel.minor_axis: + with tm.assert_produces_warning(FutureWarning, + check_stacklevel=False): result = self.panel.get_value(item, mjr, mnr) - expected = self.panel[item][mnr][mjr] - assert_almost_equal(result, expected) + expected = self.panel[item][mnr][mjr] + assert_almost_equal(result, expected) + with catch_warnings(): + simplefilter("ignore", FutureWarning) with tm.assert_raises_regex(TypeError, "There must be an argument " "for each axis"): self.panel.get_value('a') def test_set_value(self): - with catch_warnings(record=True): - for item in self.panel.items: - for mjr in self.panel.major_axis[::2]: - for mnr in self.panel.minor_axis: + for item in self.panel.items: + for mjr in self.panel.major_axis[::2]: + for mnr in self.panel.minor_axis: + with tm.assert_produces_warning(FutureWarning, + check_stacklevel=False): self.panel.set_value(item, mjr, mnr, 1.) - tm.assert_almost_equal(self.panel[item][mnr][mjr], 1.) + tm.assert_almost_equal(self.panel[item][mnr][mjr], 1.) - # resize + # resize + with catch_warnings(): + simplefilter("ignore", FutureWarning) res = self.panel.set_value('ItemE', 'foo', 'bar', 1.5) assert isinstance(res, Panel) assert res is not self.panel @@ -910,314 +894,298 @@ def setup_method(self, method): self.panel.items.name = None def test_constructor(self): - with catch_warnings(record=True): - # with BlockManager - wp = Panel(self.panel._data) - assert wp._data is self.panel._data - - wp = Panel(self.panel._data, copy=True) - assert wp._data is not self.panel._data - tm.assert_panel_equal(wp, self.panel) - - # strings handled prop - wp = Panel([[['foo', 'foo', 'foo', ], ['foo', 'foo', 'foo']]]) - assert wp.values.dtype == np.object_ - - vals = self.panel.values - - # no copy - wp = Panel(vals) - assert wp.values is vals - - # copy - wp = Panel(vals, copy=True) - assert wp.values is not vals - - # GH #8285, test when scalar data is used to construct a Panel - # if dtype is not passed, it should be inferred - value_and_dtype = [(1, 'int64'), (3.14, 'float64'), - ('foo', np.object_)] - for (val, dtype) in value_and_dtype: - wp = Panel(val, items=range(2), major_axis=range(3), - minor_axis=range(4)) - vals = np.empty((2, 3, 4), dtype=dtype) - vals.fill(val) - - tm.assert_panel_equal(wp, Panel(vals, dtype=dtype)) - - # test the case when dtype is passed - wp = Panel(1, items=range(2), major_axis=range(3), - minor_axis=range(4), - dtype='float32') - vals = np.empty((2, 3, 4), dtype='float32') - vals.fill(1) - - tm.assert_panel_equal(wp, Panel(vals, dtype='float32')) + # with BlockManager + wp = Panel(self.panel._data) + assert wp._data is self.panel._data + + wp = Panel(self.panel._data, copy=True) + assert wp._data is not self.panel._data + tm.assert_panel_equal(wp, self.panel) + + # strings handled prop + wp = Panel([[['foo', 'foo', 'foo', ], ['foo', 'foo', 'foo']]]) + assert wp.values.dtype == np.object_ + + vals = self.panel.values + + # no copy + wp = Panel(vals) + assert wp.values is vals + + # copy + wp = Panel(vals, copy=True) + assert wp.values is not vals + + # GH #8285, test when scalar data is used to construct a Panel + # if dtype is not passed, it should be inferred + value_and_dtype = [(1, 'int64'), (3.14, 'float64'), + ('foo', np.object_)] + for (val, dtype) in value_and_dtype: + wp = Panel(val, items=range(2), major_axis=range(3), + minor_axis=range(4)) + vals = np.empty((2, 3, 4), dtype=dtype) + vals.fill(val) + + tm.assert_panel_equal(wp, Panel(vals, dtype=dtype)) + + # test the case when dtype is passed + wp = Panel(1, items=range(2), major_axis=range(3), + minor_axis=range(4), + dtype='float32') + vals = np.empty((2, 3, 4), dtype='float32') + vals.fill(1) + + tm.assert_panel_equal(wp, Panel(vals, dtype='float32')) def test_constructor_cast(self): - with catch_warnings(record=True): - zero_filled = self.panel.fillna(0) + zero_filled = self.panel.fillna(0) - casted = Panel(zero_filled._data, dtype=int) - casted2 = Panel(zero_filled.values, dtype=int) + casted = Panel(zero_filled._data, dtype=int) + casted2 = Panel(zero_filled.values, dtype=int) - exp_values = zero_filled.values.astype(int) - assert_almost_equal(casted.values, exp_values) - assert_almost_equal(casted2.values, exp_values) + exp_values = zero_filled.values.astype(int) + assert_almost_equal(casted.values, exp_values) + assert_almost_equal(casted2.values, exp_values) - casted = Panel(zero_filled._data, dtype=np.int32) - casted2 = Panel(zero_filled.values, dtype=np.int32) + casted = Panel(zero_filled._data, dtype=np.int32) + casted2 = Panel(zero_filled.values, dtype=np.int32) - exp_values = zero_filled.values.astype(np.int32) - assert_almost_equal(casted.values, exp_values) - assert_almost_equal(casted2.values, exp_values) + exp_values = zero_filled.values.astype(np.int32) + assert_almost_equal(casted.values, exp_values) + assert_almost_equal(casted2.values, exp_values) - # can't cast - data = [[['foo', 'bar', 'baz']]] - pytest.raises(ValueError, Panel, data, dtype=float) + # can't cast + data = [[['foo', 'bar', 'baz']]] + pytest.raises(ValueError, Panel, data, dtype=float) def test_constructor_empty_panel(self): - with catch_warnings(record=True): - empty = Panel() - assert len(empty.items) == 0 - assert len(empty.major_axis) == 0 - assert len(empty.minor_axis) == 0 + empty = Panel() + assert len(empty.items) == 0 + assert len(empty.major_axis) == 0 + assert len(empty.minor_axis) == 0 def test_constructor_observe_dtype(self): - with catch_warnings(record=True): - # GH #411 - panel = Panel(items=lrange(3), major_axis=lrange(3), - minor_axis=lrange(3), dtype='O') - assert panel.values.dtype == np.object_ + # GH #411 + panel = Panel(items=lrange(3), major_axis=lrange(3), + minor_axis=lrange(3), dtype='O') + assert panel.values.dtype == np.object_ def test_constructor_dtypes(self): - with catch_warnings(record=True): - # GH #797 - - def _check_dtype(panel, dtype): - for i in panel.items: - assert panel[i].values.dtype.name == dtype - - # only nan holding types allowed here - for dtype in ['float64', 'float32', 'object']: - panel = Panel(items=lrange(2), major_axis=lrange(10), - minor_axis=lrange(5), dtype=dtype) - _check_dtype(panel, dtype) - - for dtype in ['float64', 'float32', 'int64', 'int32', 'object']: - panel = Panel(np.array(np.random.randn(2, 10, 5), dtype=dtype), - items=lrange(2), - major_axis=lrange(10), - minor_axis=lrange(5), dtype=dtype) - _check_dtype(panel, dtype) - - for dtype in ['float64', 'float32', 'int64', 'int32', 'object']: - panel = Panel(np.array(np.random.randn(2, 10, 5), dtype='O'), - items=lrange(2), - major_axis=lrange(10), - minor_axis=lrange(5), dtype=dtype) - _check_dtype(panel, dtype) - - for dtype in ['float64', 'float32', 'int64', 'int32', 'object']: - panel = Panel( - np.random.randn(2, 10, 5), - items=lrange(2), major_axis=lrange(10), - minor_axis=lrange(5), - dtype=dtype) - _check_dtype(panel, dtype) - - for dtype in ['float64', 'float32', 'int64', 'int32', 'object']: - df1 = DataFrame(np.random.randn(2, 5), - index=lrange(2), columns=lrange(5)) - df2 = DataFrame(np.random.randn(2, 5), - index=lrange(2), columns=lrange(5)) - panel = Panel.from_dict({'a': df1, 'b': df2}, dtype=dtype) - _check_dtype(panel, dtype) + # GH #797 + + def _check_dtype(panel, dtype): + for i in panel.items: + assert panel[i].values.dtype.name == dtype + + # only nan holding types allowed here + for dtype in ['float64', 'float32', 'object']: + panel = Panel(items=lrange(2), major_axis=lrange(10), + minor_axis=lrange(5), dtype=dtype) + _check_dtype(panel, dtype) + + for dtype in ['float64', 'float32', 'int64', 'int32', 'object']: + panel = Panel(np.array(np.random.randn(2, 10, 5), dtype=dtype), + items=lrange(2), + major_axis=lrange(10), + minor_axis=lrange(5), dtype=dtype) + _check_dtype(panel, dtype) + + for dtype in ['float64', 'float32', 'int64', 'int32', 'object']: + panel = Panel(np.array(np.random.randn(2, 10, 5), dtype='O'), + items=lrange(2), + major_axis=lrange(10), + minor_axis=lrange(5), dtype=dtype) + _check_dtype(panel, dtype) + + for dtype in ['float64', 'float32', 'int64', 'int32', 'object']: + panel = Panel( + np.random.randn(2, 10, 5), + items=lrange(2), major_axis=lrange(10), + minor_axis=lrange(5), + dtype=dtype) + _check_dtype(panel, dtype) + + for dtype in ['float64', 'float32', 'int64', 'int32', 'object']: + df1 = DataFrame(np.random.randn(2, 5), + index=lrange(2), columns=lrange(5)) + df2 = DataFrame(np.random.randn(2, 5), + index=lrange(2), columns=lrange(5)) + panel = Panel.from_dict({'a': df1, 'b': df2}, dtype=dtype) + _check_dtype(panel, dtype) def test_constructor_fails_with_not_3d_input(self): - with catch_warnings(record=True): - with tm.assert_raises_regex(ValueError, "The number of dimensions required is 3"): # noqa - Panel(np.random.randn(10, 2)) + with tm.assert_raises_regex(ValueError, "The number of dimensions required is 3"): # noqa + Panel(np.random.randn(10, 2)) def test_consolidate(self): - with catch_warnings(record=True): - assert self.panel._data.is_consolidated() + assert self.panel._data.is_consolidated() - self.panel['foo'] = 1. - assert not self.panel._data.is_consolidated() + self.panel['foo'] = 1. + assert not self.panel._data.is_consolidated() - panel = self.panel._consolidate() - assert panel._data.is_consolidated() + panel = self.panel._consolidate() + assert panel._data.is_consolidated() def test_ctor_dict(self): - with catch_warnings(record=True): - itema = self.panel['ItemA'] - itemb = self.panel['ItemB'] + itema = self.panel['ItemA'] + itemb = self.panel['ItemB'] - d = {'A': itema, 'B': itemb[5:]} - d2 = {'A': itema._series, 'B': itemb[5:]._series} - d3 = {'A': None, - 'B': DataFrame(itemb[5:]._series), - 'C': DataFrame(itema._series)} + d = {'A': itema, 'B': itemb[5:]} + d2 = {'A': itema._series, 'B': itemb[5:]._series} + d3 = {'A': None, + 'B': DataFrame(itemb[5:]._series), + 'C': DataFrame(itema._series)} - wp = Panel.from_dict(d) - wp2 = Panel.from_dict(d2) # nested Dict + wp = Panel.from_dict(d) + wp2 = Panel.from_dict(d2) # nested Dict - # TODO: unused? - wp3 = Panel.from_dict(d3) # noqa + # TODO: unused? + wp3 = Panel.from_dict(d3) # noqa - tm.assert_index_equal(wp.major_axis, self.panel.major_axis) - assert_panel_equal(wp, wp2) + tm.assert_index_equal(wp.major_axis, self.panel.major_axis) + assert_panel_equal(wp, wp2) - # intersect - wp = Panel.from_dict(d, intersect=True) - tm.assert_index_equal(wp.major_axis, itemb.index[5:]) + # intersect + wp = Panel.from_dict(d, intersect=True) + tm.assert_index_equal(wp.major_axis, itemb.index[5:]) - # use constructor - assert_panel_equal(Panel(d), Panel.from_dict(d)) - assert_panel_equal(Panel(d2), Panel.from_dict(d2)) - assert_panel_equal(Panel(d3), Panel.from_dict(d3)) + # use constructor + assert_panel_equal(Panel(d), Panel.from_dict(d)) + assert_panel_equal(Panel(d2), Panel.from_dict(d2)) + assert_panel_equal(Panel(d3), Panel.from_dict(d3)) - # a pathological case - d4 = {'A': None, 'B': None} + # a pathological case + d4 = {'A': None, 'B': None} - # TODO: unused? - wp4 = Panel.from_dict(d4) # noqa + # TODO: unused? + wp4 = Panel.from_dict(d4) # noqa - assert_panel_equal(Panel(d4), Panel(items=['A', 'B'])) + assert_panel_equal(Panel(d4), Panel(items=['A', 'B'])) - # cast - dcasted = {k: v.reindex(wp.major_axis).fillna(0) - for k, v in compat.iteritems(d)} - result = Panel(dcasted, dtype=int) - expected = Panel({k: v.astype(int) - for k, v in compat.iteritems(dcasted)}) - assert_panel_equal(result, expected) + # cast + dcasted = {k: v.reindex(wp.major_axis).fillna(0) + for k, v in compat.iteritems(d)} + result = Panel(dcasted, dtype=int) + expected = Panel({k: v.astype(int) + for k, v in compat.iteritems(dcasted)}) + assert_panel_equal(result, expected) - result = Panel(dcasted, dtype=np.int32) - expected = Panel({k: v.astype(np.int32) - for k, v in compat.iteritems(dcasted)}) - assert_panel_equal(result, expected) + result = Panel(dcasted, dtype=np.int32) + expected = Panel({k: v.astype(np.int32) + for k, v in compat.iteritems(dcasted)}) + assert_panel_equal(result, expected) def test_constructor_dict_mixed(self): - with catch_warnings(record=True): - data = {k: v.values for k, v in self.panel.iteritems()} - result = Panel(data) - exp_major = Index(np.arange(len(self.panel.major_axis))) - tm.assert_index_equal(result.major_axis, exp_major) + data = {k: v.values for k, v in self.panel.iteritems()} + result = Panel(data) + exp_major = Index(np.arange(len(self.panel.major_axis))) + tm.assert_index_equal(result.major_axis, exp_major) - result = Panel(data, items=self.panel.items, - major_axis=self.panel.major_axis, - minor_axis=self.panel.minor_axis) - assert_panel_equal(result, self.panel) + result = Panel(data, items=self.panel.items, + major_axis=self.panel.major_axis, + minor_axis=self.panel.minor_axis) + assert_panel_equal(result, self.panel) - data['ItemC'] = self.panel['ItemC'] - result = Panel(data) - assert_panel_equal(result, self.panel) + data['ItemC'] = self.panel['ItemC'] + result = Panel(data) + assert_panel_equal(result, self.panel) - # corner, blow up - data['ItemB'] = data['ItemB'][:-1] - pytest.raises(Exception, Panel, data) + # corner, blow up + data['ItemB'] = data['ItemB'][:-1] + pytest.raises(Exception, Panel, data) - data['ItemB'] = self.panel['ItemB'].values[:, :-1] - pytest.raises(Exception, Panel, data) + data['ItemB'] = self.panel['ItemB'].values[:, :-1] + pytest.raises(Exception, Panel, data) def test_ctor_orderedDict(self): - with catch_warnings(record=True): - keys = list(set(np.random.randint(0, 5000, 100)))[ - :50] # unique random int keys - d = OrderedDict([(k, mkdf(10, 5)) for k in keys]) - p = Panel(d) - assert list(p.items) == keys + keys = list(set(np.random.randint(0, 5000, 100)))[ + :50] # unique random int keys + d = OrderedDict([(k, mkdf(10, 5)) for k in keys]) + p = Panel(d) + assert list(p.items) == keys - p = Panel.from_dict(d) - assert list(p.items) == keys + p = Panel.from_dict(d) + assert list(p.items) == keys def test_constructor_resize(self): - with catch_warnings(record=True): - data = self.panel._data - items = self.panel.items[:-1] - major = self.panel.major_axis[:-1] - minor = self.panel.minor_axis[:-1] - - result = Panel(data, items=items, - major_axis=major, minor_axis=minor) - expected = self.panel.reindex( - items=items, major=major, minor=minor) - assert_panel_equal(result, expected) - - result = Panel(data, items=items, major_axis=major) - expected = self.panel.reindex(items=items, major=major) - assert_panel_equal(result, expected) - - result = Panel(data, items=items) - expected = self.panel.reindex(items=items) - assert_panel_equal(result, expected) - - result = Panel(data, minor_axis=minor) - expected = self.panel.reindex(minor=minor) - assert_panel_equal(result, expected) + data = self.panel._data + items = self.panel.items[:-1] + major = self.panel.major_axis[:-1] + minor = self.panel.minor_axis[:-1] + + result = Panel(data, items=items, + major_axis=major, minor_axis=minor) + expected = self.panel.reindex( + items=items, major=major, minor=minor) + assert_panel_equal(result, expected) + + result = Panel(data, items=items, major_axis=major) + expected = self.panel.reindex(items=items, major=major) + assert_panel_equal(result, expected) + + result = Panel(data, items=items) + expected = self.panel.reindex(items=items) + assert_panel_equal(result, expected) + + result = Panel(data, minor_axis=minor) + expected = self.panel.reindex(minor=minor) + assert_panel_equal(result, expected) def test_from_dict_mixed_orient(self): - with catch_warnings(record=True): - df = tm.makeDataFrame() - df['foo'] = 'bar' + df = tm.makeDataFrame() + df['foo'] = 'bar' - data = {'k1': df, 'k2': df} + data = {'k1': df, 'k2': df} - panel = Panel.from_dict(data, orient='minor') + panel = Panel.from_dict(data, orient='minor') - assert panel['foo'].values.dtype == np.object_ - assert panel['A'].values.dtype == np.float64 + assert panel['foo'].values.dtype == np.object_ + assert panel['A'].values.dtype == np.float64 def test_constructor_error_msgs(self): - with catch_warnings(record=True): - def testit(): - Panel(np.random.randn(3, 4, 5), - lrange(4), lrange(5), lrange(5)) - - tm.assert_raises_regex(ValueError, - r"Shape of passed values is " - r"\(3, 4, 5\), indices imply " - r"\(4, 5, 5\)", - testit) - - def testit(): - Panel(np.random.randn(3, 4, 5), - lrange(5), lrange(4), lrange(5)) - - tm.assert_raises_regex(ValueError, - r"Shape of passed values is " - r"\(3, 4, 5\), indices imply " - r"\(5, 4, 5\)", - testit) - - def testit(): - Panel(np.random.randn(3, 4, 5), - lrange(5), lrange(5), lrange(4)) - - tm.assert_raises_regex(ValueError, - r"Shape of passed values is " - r"\(3, 4, 5\), indices imply " - r"\(5, 5, 4\)", - testit) + def testit(): + Panel(np.random.randn(3, 4, 5), + lrange(4), lrange(5), lrange(5)) + + tm.assert_raises_regex(ValueError, + r"Shape of passed values is " + r"\(3, 4, 5\), indices imply " + r"\(4, 5, 5\)", + testit) + + def testit(): + Panel(np.random.randn(3, 4, 5), + lrange(5), lrange(4), lrange(5)) + + tm.assert_raises_regex(ValueError, + r"Shape of passed values is " + r"\(3, 4, 5\), indices imply " + r"\(5, 4, 5\)", + testit) + + def testit(): + Panel(np.random.randn(3, 4, 5), + lrange(5), lrange(5), lrange(4)) + + tm.assert_raises_regex(ValueError, + r"Shape of passed values is " + r"\(3, 4, 5\), indices imply " + r"\(5, 5, 4\)", + testit) def test_conform(self): - with catch_warnings(record=True): - df = self.panel['ItemA'][:-5].filter(items=['A', 'B']) - conformed = self.panel.conform(df) + df = self.panel['ItemA'][:-5].filter(items=['A', 'B']) + conformed = self.panel.conform(df) - tm.assert_index_equal(conformed.index, self.panel.major_axis) - tm.assert_index_equal(conformed.columns, self.panel.minor_axis) + tm.assert_index_equal(conformed.index, self.panel.major_axis) + tm.assert_index_equal(conformed.columns, self.panel.minor_axis) def test_convert_objects(self): - with catch_warnings(record=True): - - # GH 4937 - p = Panel(dict(A=dict(a=['1', '1.0']))) - expected = Panel(dict(A=dict(a=[1, 1.0]))) - result = p._convert(numeric=True, coerce=True) - assert_panel_equal(result, expected) + # GH 4937 + p = Panel(dict(A=dict(a=['1', '1.0']))) + expected = Panel(dict(A=dict(a=[1, 1.0]))) + result = p._convert(numeric=True, coerce=True) + assert_panel_equal(result, expected) def test_dtypes(self): @@ -1226,964 +1194,933 @@ def test_dtypes(self): assert_series_equal(result, expected) def test_astype(self): - with catch_warnings(record=True): - # GH7271 - data = np.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]]) - panel = Panel(data, ['a', 'b'], ['c', 'd'], ['e', 'f']) + # GH7271 + data = np.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]]) + panel = Panel(data, ['a', 'b'], ['c', 'd'], ['e', 'f']) - str_data = np.array([[['1', '2'], ['3', '4']], - [['5', '6'], ['7', '8']]]) - expected = Panel(str_data, ['a', 'b'], ['c', 'd'], ['e', 'f']) - assert_panel_equal(panel.astype(str), expected) + str_data = np.array([[['1', '2'], ['3', '4']], + [['5', '6'], ['7', '8']]]) + expected = Panel(str_data, ['a', 'b'], ['c', 'd'], ['e', 'f']) + assert_panel_equal(panel.astype(str), expected) - pytest.raises(NotImplementedError, panel.astype, {0: str}) + pytest.raises(NotImplementedError, panel.astype, {0: str}) def test_apply(self): - with catch_warnings(record=True): - # GH1148 - - # ufunc - applied = self.panel.apply(np.sqrt) - with np.errstate(invalid='ignore'): - expected = np.sqrt(self.panel.values) - assert_almost_equal(applied.values, expected) - - # ufunc same shape - result = self.panel.apply(lambda x: x * 2, axis='items') - expected = self.panel * 2 - assert_panel_equal(result, expected) - result = self.panel.apply(lambda x: x * 2, axis='major_axis') - expected = self.panel * 2 - assert_panel_equal(result, expected) - result = self.panel.apply(lambda x: x * 2, axis='minor_axis') - expected = self.panel * 2 - assert_panel_equal(result, expected) - - # reduction to DataFrame - result = self.panel.apply(lambda x: x.dtype, axis='items') - expected = DataFrame(np.dtype('float64'), - index=self.panel.major_axis, - columns=self.panel.minor_axis) - assert_frame_equal(result, expected) - result = self.panel.apply(lambda x: x.dtype, axis='major_axis') - expected = DataFrame(np.dtype('float64'), - index=self.panel.minor_axis, - columns=self.panel.items) - assert_frame_equal(result, expected) - result = self.panel.apply(lambda x: x.dtype, axis='minor_axis') - expected = DataFrame(np.dtype('float64'), - index=self.panel.major_axis, - columns=self.panel.items) - assert_frame_equal(result, expected) - - # reductions via other dims - expected = self.panel.sum(0) - result = self.panel.apply(lambda x: x.sum(), axis='items') - assert_frame_equal(result, expected) - expected = self.panel.sum(1) - result = self.panel.apply(lambda x: x.sum(), axis='major_axis') - assert_frame_equal(result, expected) - expected = self.panel.sum(2) - result = self.panel.apply(lambda x: x.sum(), axis='minor_axis') - assert_frame_equal(result, expected) - - # pass kwargs - result = self.panel.apply( - lambda x, y: x.sum() + y, axis='items', y=5) - expected = self.panel.sum(0) + 5 - assert_frame_equal(result, expected) + # GH1148 + + # ufunc + applied = self.panel.apply(np.sqrt) + with np.errstate(invalid='ignore'): + expected = np.sqrt(self.panel.values) + assert_almost_equal(applied.values, expected) + + # ufunc same shape + result = self.panel.apply(lambda x: x * 2, axis='items') + expected = self.panel * 2 + assert_panel_equal(result, expected) + result = self.panel.apply(lambda x: x * 2, axis='major_axis') + expected = self.panel * 2 + assert_panel_equal(result, expected) + result = self.panel.apply(lambda x: x * 2, axis='minor_axis') + expected = self.panel * 2 + assert_panel_equal(result, expected) + + # reduction to DataFrame + result = self.panel.apply(lambda x: x.dtype, axis='items') + expected = DataFrame(np.dtype('float64'), + index=self.panel.major_axis, + columns=self.panel.minor_axis) + assert_frame_equal(result, expected) + result = self.panel.apply(lambda x: x.dtype, axis='major_axis') + expected = DataFrame(np.dtype('float64'), + index=self.panel.minor_axis, + columns=self.panel.items) + assert_frame_equal(result, expected) + result = self.panel.apply(lambda x: x.dtype, axis='minor_axis') + expected = DataFrame(np.dtype('float64'), + index=self.panel.major_axis, + columns=self.panel.items) + assert_frame_equal(result, expected) + + # reductions via other dims + expected = self.panel.sum(0) + result = self.panel.apply(lambda x: x.sum(), axis='items') + assert_frame_equal(result, expected) + expected = self.panel.sum(1) + result = self.panel.apply(lambda x: x.sum(), axis='major_axis') + assert_frame_equal(result, expected) + expected = self.panel.sum(2) + result = self.panel.apply(lambda x: x.sum(), axis='minor_axis') + assert_frame_equal(result, expected) + + # pass kwargs + result = self.panel.apply( + lambda x, y: x.sum() + y, axis='items', y=5) + expected = self.panel.sum(0) + 5 + assert_frame_equal(result, expected) def test_apply_slabs(self): - with catch_warnings(record=True): - - # same shape as original - result = self.panel.apply(lambda x: x * 2, - axis=['items', 'major_axis']) - expected = (self.panel * 2).transpose('minor_axis', 'major_axis', - 'items') - assert_panel_equal(result, expected) - result = self.panel.apply(lambda x: x * 2, - axis=['major_axis', 'items']) - assert_panel_equal(result, expected) - - result = self.panel.apply(lambda x: x * 2, - axis=['items', 'minor_axis']) - expected = (self.panel * 2).transpose('major_axis', 'minor_axis', - 'items') - assert_panel_equal(result, expected) - result = self.panel.apply(lambda x: x * 2, - axis=['minor_axis', 'items']) - assert_panel_equal(result, expected) - - result = self.panel.apply(lambda x: x * 2, - axis=['major_axis', 'minor_axis']) - expected = self.panel * 2 - assert_panel_equal(result, expected) - result = self.panel.apply(lambda x: x * 2, - axis=['minor_axis', 'major_axis']) - assert_panel_equal(result, expected) - - # reductions - result = self.panel.apply(lambda x: x.sum(0), axis=[ - 'items', 'major_axis' - ]) - expected = self.panel.sum(1).T - assert_frame_equal(result, expected) + + # same shape as original + result = self.panel.apply(lambda x: x * 2, + axis=['items', 'major_axis']) + expected = (self.panel * 2).transpose('minor_axis', 'major_axis', + 'items') + assert_panel_equal(result, expected) + result = self.panel.apply(lambda x: x * 2, + axis=['major_axis', 'items']) + assert_panel_equal(result, expected) + + result = self.panel.apply(lambda x: x * 2, + axis=['items', 'minor_axis']) + expected = (self.panel * 2).transpose('major_axis', 'minor_axis', + 'items') + assert_panel_equal(result, expected) + result = self.panel.apply(lambda x: x * 2, + axis=['minor_axis', 'items']) + assert_panel_equal(result, expected) + + result = self.panel.apply(lambda x: x * 2, + axis=['major_axis', 'minor_axis']) + expected = self.panel * 2 + assert_panel_equal(result, expected) + result = self.panel.apply(lambda x: x * 2, + axis=['minor_axis', 'major_axis']) + assert_panel_equal(result, expected) + + # reductions + result = self.panel.apply(lambda x: x.sum(0), axis=[ + 'items', 'major_axis' + ]) + expected = self.panel.sum(1).T + assert_frame_equal(result, expected) + + result = self.panel.apply(lambda x: x.sum(1), axis=[ + 'items', 'major_axis' + ]) + expected = self.panel.sum(0) + assert_frame_equal(result, expected) + + # transforms + f = lambda x: ((x.T - x.mean(1)) / x.std(1)).T # make sure that we don't trigger any warnings - with catch_warnings(record=True): - result = self.panel.apply(lambda x: x.sum(1), axis=[ - 'items', 'major_axis' - ]) - expected = self.panel.sum(0) - assert_frame_equal(result, expected) - - # transforms - f = lambda x: ((x.T - x.mean(1)) / x.std(1)).T - - # make sure that we don't trigger any warnings - result = self.panel.apply(f, axis=['items', 'major_axis']) - expected = Panel({ax: f(self.panel.loc[:, :, ax]) - for ax in self.panel.minor_axis}) - assert_panel_equal(result, expected) - - result = self.panel.apply(f, axis=['major_axis', 'minor_axis']) - expected = Panel({ax: f(self.panel.loc[ax]) - for ax in self.panel.items}) - assert_panel_equal(result, expected) - - result = self.panel.apply(f, axis=['minor_axis', 'items']) - expected = Panel({ax: f(self.panel.loc[:, ax]) - for ax in self.panel.major_axis}) - assert_panel_equal(result, expected) - - # with multi-indexes - # GH7469 - index = MultiIndex.from_tuples([('one', 'a'), ('one', 'b'), ( - 'two', 'a'), ('two', 'b')]) - dfa = DataFrame(np.array(np.arange(12, dtype='int64')).reshape( - 4, 3), columns=list("ABC"), index=index) - dfb = DataFrame(np.array(np.arange(10, 22, dtype='int64')).reshape( - 4, 3), columns=list("ABC"), index=index) - p = Panel({'f': dfa, 'g': dfb}) - result = p.apply(lambda x: x.sum(), axis=0) - - # on windows this will be in32 - result = result.astype('int64') - expected = p.sum(0) - assert_frame_equal(result, expected) + result = self.panel.apply(f, axis=['items', 'major_axis']) + expected = Panel({ax: f(self.panel.loc[:, :, ax]) + for ax in self.panel.minor_axis}) + assert_panel_equal(result, expected) + + result = self.panel.apply(f, axis=['major_axis', 'minor_axis']) + expected = Panel({ax: f(self.panel.loc[ax]) + for ax in self.panel.items}) + assert_panel_equal(result, expected) + + result = self.panel.apply(f, axis=['minor_axis', 'items']) + expected = Panel({ax: f(self.panel.loc[:, ax]) + for ax in self.panel.major_axis}) + assert_panel_equal(result, expected) + + # with multi-indexes + # GH7469 + index = MultiIndex.from_tuples([('one', 'a'), ('one', 'b'), ( + 'two', 'a'), ('two', 'b')]) + dfa = DataFrame(np.array(np.arange(12, dtype='int64')).reshape( + 4, 3), columns=list("ABC"), index=index) + dfb = DataFrame(np.array(np.arange(10, 22, dtype='int64')).reshape( + 4, 3), columns=list("ABC"), index=index) + p = Panel({'f': dfa, 'g': dfb}) + result = p.apply(lambda x: x.sum(), axis=0) + + # on windows this will be in32 + result = result.astype('int64') + expected = p.sum(0) + assert_frame_equal(result, expected) def test_apply_no_or_zero_ndim(self): - with catch_warnings(record=True): - # GH10332 - self.panel = Panel(np.random.rand(5, 5, 5)) + # GH10332 + self.panel = Panel(np.random.rand(5, 5, 5)) - result_int = self.panel.apply(lambda df: 0, axis=[1, 2]) - result_float = self.panel.apply(lambda df: 0.0, axis=[1, 2]) - result_int64 = self.panel.apply( - lambda df: np.int64(0), axis=[1, 2]) - result_float64 = self.panel.apply(lambda df: np.float64(0.0), - axis=[1, 2]) + result_int = self.panel.apply(lambda df: 0, axis=[1, 2]) + result_float = self.panel.apply(lambda df: 0.0, axis=[1, 2]) + result_int64 = self.panel.apply( + lambda df: np.int64(0), axis=[1, 2]) + result_float64 = self.panel.apply(lambda df: np.float64(0.0), + axis=[1, 2]) - expected_int = expected_int64 = Series([0] * 5) - expected_float = expected_float64 = Series([0.0] * 5) + expected_int = expected_int64 = Series([0] * 5) + expected_float = expected_float64 = Series([0.0] * 5) - assert_series_equal(result_int, expected_int) - assert_series_equal(result_int64, expected_int64) - assert_series_equal(result_float, expected_float) - assert_series_equal(result_float64, expected_float64) + assert_series_equal(result_int, expected_int) + assert_series_equal(result_int64, expected_int64) + assert_series_equal(result_float, expected_float) + assert_series_equal(result_float64, expected_float64) def test_reindex(self): - with catch_warnings(record=True): - ref = self.panel['ItemB'] + ref = self.panel['ItemB'] - # items - result = self.panel.reindex(items=['ItemA', 'ItemB']) - assert_frame_equal(result['ItemB'], ref) + # items + result = self.panel.reindex(items=['ItemA', 'ItemB']) + assert_frame_equal(result['ItemB'], ref) - # major - new_major = list(self.panel.major_axis[:10]) - result = self.panel.reindex(major=new_major) - assert_frame_equal(result['ItemB'], ref.reindex(index=new_major)) + # major + new_major = list(self.panel.major_axis[:10]) + result = self.panel.reindex(major=new_major) + assert_frame_equal(result['ItemB'], ref.reindex(index=new_major)) - # raise exception put both major and major_axis - pytest.raises(Exception, self.panel.reindex, - major_axis=new_major, - major=new_major) + # raise exception put both major and major_axis + pytest.raises(Exception, self.panel.reindex, + major_axis=new_major, + major=new_major) - # minor - new_minor = list(self.panel.minor_axis[:2]) - result = self.panel.reindex(minor=new_minor) - assert_frame_equal(result['ItemB'], ref.reindex(columns=new_minor)) + # minor + new_minor = list(self.panel.minor_axis[:2]) + result = self.panel.reindex(minor=new_minor) + assert_frame_equal(result['ItemB'], ref.reindex(columns=new_minor)) - # raise exception put both major and major_axis - pytest.raises(Exception, self.panel.reindex, - minor_axis=new_minor, - minor=new_minor) + # raise exception put both major and major_axis + pytest.raises(Exception, self.panel.reindex, + minor_axis=new_minor, + minor=new_minor) - # this ok - result = self.panel.reindex() - assert_panel_equal(result, self.panel) - assert result is not self.panel + # this ok + result = self.panel.reindex() + assert_panel_equal(result, self.panel) + assert result is not self.panel - # with filling - smaller_major = self.panel.major_axis[::5] - smaller = self.panel.reindex(major=smaller_major) + # with filling + smaller_major = self.panel.major_axis[::5] + smaller = self.panel.reindex(major=smaller_major) - larger = smaller.reindex(major=self.panel.major_axis, method='pad') + larger = smaller.reindex(major=self.panel.major_axis, method='pad') - assert_frame_equal(larger.major_xs(self.panel.major_axis[1]), - smaller.major_xs(smaller_major[0])) + assert_frame_equal(larger.major_xs(self.panel.major_axis[1]), + smaller.major_xs(smaller_major[0])) - # don't necessarily copy - result = self.panel.reindex( - major=self.panel.major_axis, copy=False) - assert_panel_equal(result, self.panel) - assert result is self.panel + # don't necessarily copy + result = self.panel.reindex( + major=self.panel.major_axis, copy=False) + assert_panel_equal(result, self.panel) + assert result is self.panel def test_reindex_axis_style(self): - with catch_warnings(record=True): - panel = Panel(np.random.rand(5, 5, 5)) - expected0 = Panel(panel.values).iloc[[0, 1]] - expected1 = Panel(panel.values).iloc[:, [0, 1]] - expected2 = Panel(panel.values).iloc[:, :, [0, 1]] + panel = Panel(np.random.rand(5, 5, 5)) + expected0 = Panel(panel.values).iloc[[0, 1]] + expected1 = Panel(panel.values).iloc[:, [0, 1]] + expected2 = Panel(panel.values).iloc[:, :, [0, 1]] - result = panel.reindex([0, 1], axis=0) - assert_panel_equal(result, expected0) + result = panel.reindex([0, 1], axis=0) + assert_panel_equal(result, expected0) - result = panel.reindex([0, 1], axis=1) - assert_panel_equal(result, expected1) + result = panel.reindex([0, 1], axis=1) + assert_panel_equal(result, expected1) - result = panel.reindex([0, 1], axis=2) - assert_panel_equal(result, expected2) + result = panel.reindex([0, 1], axis=2) + assert_panel_equal(result, expected2) - result = panel.reindex([0, 1], axis=2) - assert_panel_equal(result, expected2) + result = panel.reindex([0, 1], axis=2) + assert_panel_equal(result, expected2) def test_reindex_multi(self): - with catch_warnings(record=True): - - # with and without copy full reindexing - result = self.panel.reindex( - items=self.panel.items, - major=self.panel.major_axis, - minor=self.panel.minor_axis, copy=False) - - assert result.items is self.panel.items - assert result.major_axis is self.panel.major_axis - assert result.minor_axis is self.panel.minor_axis - - result = self.panel.reindex( - items=self.panel.items, - major=self.panel.major_axis, - minor=self.panel.minor_axis, copy=False) - assert_panel_equal(result, self.panel) - - # multi-axis indexing consistency - # GH 5900 - df = DataFrame(np.random.randn(4, 3)) - p = Panel({'Item1': df}) - expected = Panel({'Item1': df}) - expected['Item2'] = np.nan - - items = ['Item1', 'Item2'] - major_axis = np.arange(4) - minor_axis = np.arange(3) - - results = [] - results.append(p.reindex(items=items, major_axis=major_axis, - copy=True)) - results.append(p.reindex(items=items, major_axis=major_axis, - copy=False)) - results.append(p.reindex(items=items, minor_axis=minor_axis, - copy=True)) - results.append(p.reindex(items=items, minor_axis=minor_axis, - copy=False)) - results.append(p.reindex(items=items, major_axis=major_axis, - minor_axis=minor_axis, copy=True)) - results.append(p.reindex(items=items, major_axis=major_axis, - minor_axis=minor_axis, copy=False)) - - for i, r in enumerate(results): - assert_panel_equal(expected, r) + + # with and without copy full reindexing + result = self.panel.reindex( + items=self.panel.items, + major=self.panel.major_axis, + minor=self.panel.minor_axis, copy=False) + + assert result.items is self.panel.items + assert result.major_axis is self.panel.major_axis + assert result.minor_axis is self.panel.minor_axis + + result = self.panel.reindex( + items=self.panel.items, + major=self.panel.major_axis, + minor=self.panel.minor_axis, copy=False) + assert_panel_equal(result, self.panel) + + # multi-axis indexing consistency + # GH 5900 + df = DataFrame(np.random.randn(4, 3)) + p = Panel({'Item1': df}) + expected = Panel({'Item1': df}) + expected['Item2'] = np.nan + + items = ['Item1', 'Item2'] + major_axis = np.arange(4) + minor_axis = np.arange(3) + + results = [] + results.append(p.reindex(items=items, major_axis=major_axis, + copy=True)) + results.append(p.reindex(items=items, major_axis=major_axis, + copy=False)) + results.append(p.reindex(items=items, minor_axis=minor_axis, + copy=True)) + results.append(p.reindex(items=items, minor_axis=minor_axis, + copy=False)) + results.append(p.reindex(items=items, major_axis=major_axis, + minor_axis=minor_axis, copy=True)) + results.append(p.reindex(items=items, major_axis=major_axis, + minor_axis=minor_axis, copy=False)) + + for i, r in enumerate(results): + assert_panel_equal(expected, r) def test_reindex_like(self): - with catch_warnings(record=True): - # reindex_like - smaller = self.panel.reindex(items=self.panel.items[:-1], - major=self.panel.major_axis[:-1], - minor=self.panel.minor_axis[:-1]) - smaller_like = self.panel.reindex_like(smaller) - assert_panel_equal(smaller, smaller_like) + # reindex_like + smaller = self.panel.reindex(items=self.panel.items[:-1], + major=self.panel.major_axis[:-1], + minor=self.panel.minor_axis[:-1]) + smaller_like = self.panel.reindex_like(smaller) + assert_panel_equal(smaller, smaller_like) def test_take(self): - with catch_warnings(record=True): - # axis == 0 - result = self.panel.take([2, 0, 1], axis=0) - expected = self.panel.reindex(items=['ItemC', 'ItemA', 'ItemB']) - assert_panel_equal(result, expected) + # axis == 0 + result = self.panel.take([2, 0, 1], axis=0) + expected = self.panel.reindex(items=['ItemC', 'ItemA', 'ItemB']) + assert_panel_equal(result, expected) - # axis >= 1 - result = self.panel.take([3, 0, 1, 2], axis=2) - expected = self.panel.reindex(minor=['D', 'A', 'B', 'C']) - assert_panel_equal(result, expected) + # axis >= 1 + result = self.panel.take([3, 0, 1, 2], axis=2) + expected = self.panel.reindex(minor=['D', 'A', 'B', 'C']) + assert_panel_equal(result, expected) - # neg indices ok - expected = self.panel.reindex(minor=['D', 'D', 'B', 'C']) - result = self.panel.take([3, -1, 1, 2], axis=2) - assert_panel_equal(result, expected) + # neg indices ok + expected = self.panel.reindex(minor=['D', 'D', 'B', 'C']) + result = self.panel.take([3, -1, 1, 2], axis=2) + assert_panel_equal(result, expected) - pytest.raises(Exception, self.panel.take, [4, 0, 1, 2], axis=2) + pytest.raises(Exception, self.panel.take, [4, 0, 1, 2], axis=2) def test_sort_index(self): - with catch_warnings(record=True): - import random - - ritems = list(self.panel.items) - rmajor = list(self.panel.major_axis) - rminor = list(self.panel.minor_axis) - random.shuffle(ritems) - random.shuffle(rmajor) - random.shuffle(rminor) - - random_order = self.panel.reindex(items=ritems) - sorted_panel = random_order.sort_index(axis=0) - assert_panel_equal(sorted_panel, self.panel) - - # descending - random_order = self.panel.reindex(items=ritems) - sorted_panel = random_order.sort_index(axis=0, ascending=False) - assert_panel_equal( - sorted_panel, - self.panel.reindex(items=self.panel.items[::-1])) - - random_order = self.panel.reindex(major=rmajor) - sorted_panel = random_order.sort_index(axis=1) - assert_panel_equal(sorted_panel, self.panel) - - random_order = self.panel.reindex(minor=rminor) - sorted_panel = random_order.sort_index(axis=2) - assert_panel_equal(sorted_panel, self.panel) + import random + + ritems = list(self.panel.items) + rmajor = list(self.panel.major_axis) + rminor = list(self.panel.minor_axis) + random.shuffle(ritems) + random.shuffle(rmajor) + random.shuffle(rminor) + + random_order = self.panel.reindex(items=ritems) + sorted_panel = random_order.sort_index(axis=0) + assert_panel_equal(sorted_panel, self.panel) + + # descending + random_order = self.panel.reindex(items=ritems) + sorted_panel = random_order.sort_index(axis=0, ascending=False) + assert_panel_equal( + sorted_panel, + self.panel.reindex(items=self.panel.items[::-1])) + + random_order = self.panel.reindex(major=rmajor) + sorted_panel = random_order.sort_index(axis=1) + assert_panel_equal(sorted_panel, self.panel) + + random_order = self.panel.reindex(minor=rminor) + sorted_panel = random_order.sort_index(axis=2) + assert_panel_equal(sorted_panel, self.panel) def test_fillna(self): - with catch_warnings(record=True): - filled = self.panel.fillna(0) - assert np.isfinite(filled.values).all() - - filled = self.panel.fillna(method='backfill') - assert_frame_equal(filled['ItemA'], - self.panel['ItemA'].fillna(method='backfill')) - - panel = self.panel.copy() - panel['str'] = 'foo' - - filled = panel.fillna(method='backfill') - assert_frame_equal(filled['ItemA'], - panel['ItemA'].fillna(method='backfill')) - - empty = self.panel.reindex(items=[]) - filled = empty.fillna(0) - assert_panel_equal(filled, empty) - - pytest.raises(ValueError, self.panel.fillna) - pytest.raises(ValueError, self.panel.fillna, 5, method='ffill') - - pytest.raises(TypeError, self.panel.fillna, [1, 2]) - pytest.raises(TypeError, self.panel.fillna, (1, 2)) - - # limit not implemented when only value is specified - p = Panel(np.random.randn(3, 4, 5)) - p.iloc[0:2, 0:2, 0:2] = np.nan - pytest.raises(NotImplementedError, - lambda: p.fillna(999, limit=1)) - - # Test in place fillNA - # Expected result - expected = Panel([[[0, 1], [2, 1]], [[10, 11], [12, 11]]], - items=['a', 'b'], minor_axis=['x', 'y'], - dtype=np.float64) - # method='ffill' - p1 = Panel([[[0, 1], [2, np.nan]], [[10, 11], [12, np.nan]]], - items=['a', 'b'], minor_axis=['x', 'y'], - dtype=np.float64) - p1.fillna(method='ffill', inplace=True) - assert_panel_equal(p1, expected) - - # method='bfill' - p2 = Panel([[[0, np.nan], [2, 1]], [[10, np.nan], [12, 11]]], - items=['a', 'b'], minor_axis=['x', 'y'], - dtype=np.float64) - p2.fillna(method='bfill', inplace=True) - assert_panel_equal(p2, expected) + filled = self.panel.fillna(0) + assert np.isfinite(filled.values).all() + + filled = self.panel.fillna(method='backfill') + assert_frame_equal(filled['ItemA'], + self.panel['ItemA'].fillna(method='backfill')) + + panel = self.panel.copy() + panel['str'] = 'foo' + + filled = panel.fillna(method='backfill') + assert_frame_equal(filled['ItemA'], + panel['ItemA'].fillna(method='backfill')) + + empty = self.panel.reindex(items=[]) + filled = empty.fillna(0) + assert_panel_equal(filled, empty) + + pytest.raises(ValueError, self.panel.fillna) + pytest.raises(ValueError, self.panel.fillna, 5, method='ffill') + + pytest.raises(TypeError, self.panel.fillna, [1, 2]) + pytest.raises(TypeError, self.panel.fillna, (1, 2)) + + # limit not implemented when only value is specified + p = Panel(np.random.randn(3, 4, 5)) + p.iloc[0:2, 0:2, 0:2] = np.nan + pytest.raises(NotImplementedError, + lambda: p.fillna(999, limit=1)) + + # Test in place fillNA + # Expected result + expected = Panel([[[0, 1], [2, 1]], [[10, 11], [12, 11]]], + items=['a', 'b'], minor_axis=['x', 'y'], + dtype=np.float64) + # method='ffill' + p1 = Panel([[[0, 1], [2, np.nan]], [[10, 11], [12, np.nan]]], + items=['a', 'b'], minor_axis=['x', 'y'], + dtype=np.float64) + p1.fillna(method='ffill', inplace=True) + assert_panel_equal(p1, expected) + + # method='bfill' + p2 = Panel([[[0, np.nan], [2, 1]], [[10, np.nan], [12, 11]]], + items=['a', 'b'], minor_axis=['x', 'y'], + dtype=np.float64) + p2.fillna(method='bfill', inplace=True) + assert_panel_equal(p2, expected) def test_ffill_bfill(self): - with catch_warnings(record=True): - assert_panel_equal(self.panel.ffill(), - self.panel.fillna(method='ffill')) - assert_panel_equal(self.panel.bfill(), - self.panel.fillna(method='bfill')) + assert_panel_equal(self.panel.ffill(), + self.panel.fillna(method='ffill')) + assert_panel_equal(self.panel.bfill(), + self.panel.fillna(method='bfill')) def test_truncate_fillna_bug(self): - with catch_warnings(record=True): - # #1823 - result = self.panel.truncate(before=None, after=None, axis='items') + # #1823 + result = self.panel.truncate(before=None, after=None, axis='items') - # it works! - result.fillna(value=0.0) + # it works! + result.fillna(value=0.0) def test_swapaxes(self): - with catch_warnings(record=True): - result = self.panel.swapaxes('items', 'minor') - assert result.items is self.panel.minor_axis + result = self.panel.swapaxes('items', 'minor') + assert result.items is self.panel.minor_axis - result = self.panel.swapaxes('items', 'major') - assert result.items is self.panel.major_axis + result = self.panel.swapaxes('items', 'major') + assert result.items is self.panel.major_axis - result = self.panel.swapaxes('major', 'minor') - assert result.major_axis is self.panel.minor_axis + result = self.panel.swapaxes('major', 'minor') + assert result.major_axis is self.panel.minor_axis - panel = self.panel.copy() - result = panel.swapaxes('major', 'minor') - panel.values[0, 0, 1] = np.nan - expected = panel.swapaxes('major', 'minor') - assert_panel_equal(result, expected) + panel = self.panel.copy() + result = panel.swapaxes('major', 'minor') + panel.values[0, 0, 1] = np.nan + expected = panel.swapaxes('major', 'minor') + assert_panel_equal(result, expected) - # this should also work - result = self.panel.swapaxes(0, 1) - assert result.items is self.panel.major_axis + # this should also work + result = self.panel.swapaxes(0, 1) + assert result.items is self.panel.major_axis - # this works, but return a copy - result = self.panel.swapaxes('items', 'items') - assert_panel_equal(self.panel, result) - assert id(self.panel) != id(result) + # this works, but return a copy + result = self.panel.swapaxes('items', 'items') + assert_panel_equal(self.panel, result) + assert id(self.panel) != id(result) def test_transpose(self): - with catch_warnings(record=True): - result = self.panel.transpose('minor', 'major', 'items') - expected = self.panel.swapaxes('items', 'minor') - assert_panel_equal(result, expected) - - # test kwargs - result = self.panel.transpose(items='minor', major='major', - minor='items') - expected = self.panel.swapaxes('items', 'minor') - assert_panel_equal(result, expected) - - # text mixture of args - result = self.panel.transpose( - 'minor', major='major', minor='items') - expected = self.panel.swapaxes('items', 'minor') - assert_panel_equal(result, expected) - - result = self.panel.transpose('minor', - 'major', - minor='items') - expected = self.panel.swapaxes('items', 'minor') - assert_panel_equal(result, expected) - - # duplicate axes - with tm.assert_raises_regex(TypeError, - 'not enough/duplicate arguments'): - self.panel.transpose('minor', maj='major', minor='items') - - with tm.assert_raises_regex(ValueError, - 'repeated axis in transpose'): - self.panel.transpose('minor', 'major', major='minor', - minor='items') - - result = self.panel.transpose(2, 1, 0) - assert_panel_equal(result, expected) - - result = self.panel.transpose('minor', 'items', 'major') - expected = self.panel.swapaxes('items', 'minor') - expected = expected.swapaxes('major', 'minor') - assert_panel_equal(result, expected) - - result = self.panel.transpose(2, 0, 1) - assert_panel_equal(result, expected) - - pytest.raises(ValueError, self.panel.transpose, 0, 0, 1) + result = self.panel.transpose('minor', 'major', 'items') + expected = self.panel.swapaxes('items', 'minor') + assert_panel_equal(result, expected) + + # test kwargs + result = self.panel.transpose(items='minor', major='major', + minor='items') + expected = self.panel.swapaxes('items', 'minor') + assert_panel_equal(result, expected) + + # text mixture of args + result = self.panel.transpose( + 'minor', major='major', minor='items') + expected = self.panel.swapaxes('items', 'minor') + assert_panel_equal(result, expected) + + result = self.panel.transpose('minor', + 'major', + minor='items') + expected = self.panel.swapaxes('items', 'minor') + assert_panel_equal(result, expected) + + # duplicate axes + with tm.assert_raises_regex(TypeError, + 'not enough/duplicate arguments'): + self.panel.transpose('minor', maj='major', minor='items') + + with tm.assert_raises_regex(ValueError, + 'repeated axis in transpose'): + self.panel.transpose('minor', 'major', major='minor', + minor='items') + + result = self.panel.transpose(2, 1, 0) + assert_panel_equal(result, expected) + + result = self.panel.transpose('minor', 'items', 'major') + expected = self.panel.swapaxes('items', 'minor') + expected = expected.swapaxes('major', 'minor') + assert_panel_equal(result, expected) + + result = self.panel.transpose(2, 0, 1) + assert_panel_equal(result, expected) + + pytest.raises(ValueError, self.panel.transpose, 0, 0, 1) def test_transpose_copy(self): - with catch_warnings(record=True): - panel = self.panel.copy() - result = panel.transpose(2, 0, 1, copy=True) - expected = panel.swapaxes('items', 'minor') - expected = expected.swapaxes('major', 'minor') - assert_panel_equal(result, expected) + panel = self.panel.copy() + result = panel.transpose(2, 0, 1, copy=True) + expected = panel.swapaxes('items', 'minor') + expected = expected.swapaxes('major', 'minor') + assert_panel_equal(result, expected) - panel.values[0, 1, 1] = np.nan - assert notna(result.values[1, 0, 1]) + panel.values[0, 1, 1] = np.nan + assert notna(result.values[1, 0, 1]) def test_to_frame(self): - with catch_warnings(record=True): - # filtered - filtered = self.panel.to_frame() - expected = self.panel.to_frame().dropna(how='any') - assert_frame_equal(filtered, expected) - - # unfiltered - unfiltered = self.panel.to_frame(filter_observations=False) - assert_panel_equal(unfiltered.to_panel(), self.panel) - - # names - assert unfiltered.index.names == ('major', 'minor') - - # unsorted, round trip - df = self.panel.to_frame(filter_observations=False) - unsorted = df.take(np.random.permutation(len(df))) - pan = unsorted.to_panel() - assert_panel_equal(pan, self.panel) - - # preserve original index names - df = DataFrame(np.random.randn(6, 2), - index=[['a', 'a', 'b', 'b', 'c', 'c'], - [0, 1, 0, 1, 0, 1]], - columns=['one', 'two']) - df.index.names = ['foo', 'bar'] - df.columns.name = 'baz' - - rdf = df.to_panel().to_frame() - assert rdf.index.names == df.index.names - assert rdf.columns.names == df.columns.names + # filtered + filtered = self.panel.to_frame() + expected = self.panel.to_frame().dropna(how='any') + assert_frame_equal(filtered, expected) + + # unfiltered + unfiltered = self.panel.to_frame(filter_observations=False) + assert_panel_equal(unfiltered.to_panel(), self.panel) + + # names + assert unfiltered.index.names == ('major', 'minor') + + # unsorted, round trip + df = self.panel.to_frame(filter_observations=False) + unsorted = df.take(np.random.permutation(len(df))) + pan = unsorted.to_panel() + assert_panel_equal(pan, self.panel) + + # preserve original index names + df = DataFrame(np.random.randn(6, 2), + index=[['a', 'a', 'b', 'b', 'c', 'c'], + [0, 1, 0, 1, 0, 1]], + columns=['one', 'two']) + df.index.names = ['foo', 'bar'] + df.columns.name = 'baz' + + rdf = df.to_panel().to_frame() + assert rdf.index.names == df.index.names + assert rdf.columns.names == df.columns.names def test_to_frame_mixed(self): - with catch_warnings(record=True): - panel = self.panel.fillna(0) - panel['str'] = 'foo' - panel['bool'] = panel['ItemA'] > 0 - - lp = panel.to_frame() - wp = lp.to_panel() - assert wp['bool'].values.dtype == np.bool_ - # Previously, this was mutating the underlying - # index and changing its name - assert_frame_equal(wp['bool'], panel['bool'], check_names=False) - - # GH 8704 - # with categorical - df = panel.to_frame() - df['category'] = df['str'].astype('category') - - # to_panel - # TODO: this converts back to object - p = df.to_panel() - expected = panel.copy() - expected['category'] = 'foo' - assert_panel_equal(p, expected) + panel = self.panel.fillna(0) + panel['str'] = 'foo' + panel['bool'] = panel['ItemA'] > 0 + + lp = panel.to_frame() + wp = lp.to_panel() + assert wp['bool'].values.dtype == np.bool_ + # Previously, this was mutating the underlying + # index and changing its name + assert_frame_equal(wp['bool'], panel['bool'], check_names=False) + + # GH 8704 + # with categorical + df = panel.to_frame() + df['category'] = df['str'].astype('category') + + # to_panel + # TODO: this converts back to object + p = df.to_panel() + expected = panel.copy() + expected['category'] = 'foo' + assert_panel_equal(p, expected) def test_to_frame_multi_major(self): - with catch_warnings(record=True): - idx = MultiIndex.from_tuples( - [(1, 'one'), (1, 'two'), (2, 'one'), (2, 'two')]) - df = DataFrame([[1, 'a', 1], [2, 'b', 1], - [3, 'c', 1], [4, 'd', 1]], - columns=['A', 'B', 'C'], index=idx) - wp = Panel({'i1': df, 'i2': df}) - expected_idx = MultiIndex.from_tuples( - [ - (1, 'one', 'A'), (1, 'one', 'B'), - (1, 'one', 'C'), (1, 'two', 'A'), - (1, 'two', 'B'), (1, 'two', 'C'), - (2, 'one', 'A'), (2, 'one', 'B'), - (2, 'one', 'C'), (2, 'two', 'A'), - (2, 'two', 'B'), (2, 'two', 'C') - ], - names=[None, None, 'minor']) - expected = DataFrame({'i1': [1, 'a', 1, 2, 'b', 1, 3, - 'c', 1, 4, 'd', 1], - 'i2': [1, 'a', 1, 2, 'b', - 1, 3, 'c', 1, 4, 'd', 1]}, - index=expected_idx) - result = wp.to_frame() - assert_frame_equal(result, expected) - - wp.iloc[0, 0].iloc[0] = np.nan # BUG on setting. GH #5773 - result = wp.to_frame() - assert_frame_equal(result, expected[1:]) - - idx = MultiIndex.from_tuples( - [(1, 'two'), (1, 'one'), (2, 'one'), (np.nan, 'two')]) - df = DataFrame([[1, 'a', 1], [2, 'b', 1], - [3, 'c', 1], [4, 'd', 1]], - columns=['A', 'B', 'C'], index=idx) - wp = Panel({'i1': df, 'i2': df}) - ex_idx = MultiIndex.from_tuples([(1, 'two', 'A'), (1, 'two', 'B'), - (1, 'two', 'C'), - (1, 'one', 'A'), - (1, 'one', 'B'), - (1, 'one', 'C'), - (2, 'one', 'A'), - (2, 'one', 'B'), - (2, 'one', 'C'), - (np.nan, 'two', 'A'), - (np.nan, 'two', 'B'), - (np.nan, 'two', 'C')], - names=[None, None, 'minor']) - expected.index = ex_idx - result = wp.to_frame() - assert_frame_equal(result, expected) + idx = MultiIndex.from_tuples( + [(1, 'one'), (1, 'two'), (2, 'one'), (2, 'two')]) + df = DataFrame([[1, 'a', 1], [2, 'b', 1], + [3, 'c', 1], [4, 'd', 1]], + columns=['A', 'B', 'C'], index=idx) + wp = Panel({'i1': df, 'i2': df}) + expected_idx = MultiIndex.from_tuples( + [ + (1, 'one', 'A'), (1, 'one', 'B'), + (1, 'one', 'C'), (1, 'two', 'A'), + (1, 'two', 'B'), (1, 'two', 'C'), + (2, 'one', 'A'), (2, 'one', 'B'), + (2, 'one', 'C'), (2, 'two', 'A'), + (2, 'two', 'B'), (2, 'two', 'C') + ], + names=[None, None, 'minor']) + expected = DataFrame({'i1': [1, 'a', 1, 2, 'b', 1, 3, + 'c', 1, 4, 'd', 1], + 'i2': [1, 'a', 1, 2, 'b', + 1, 3, 'c', 1, 4, 'd', 1]}, + index=expected_idx) + result = wp.to_frame() + assert_frame_equal(result, expected) + + wp.iloc[0, 0].iloc[0] = np.nan # BUG on setting. GH #5773 + result = wp.to_frame() + assert_frame_equal(result, expected[1:]) + + idx = MultiIndex.from_tuples( + [(1, 'two'), (1, 'one'), (2, 'one'), (np.nan, 'two')]) + df = DataFrame([[1, 'a', 1], [2, 'b', 1], + [3, 'c', 1], [4, 'd', 1]], + columns=['A', 'B', 'C'], index=idx) + wp = Panel({'i1': df, 'i2': df}) + ex_idx = MultiIndex.from_tuples([(1, 'two', 'A'), (1, 'two', 'B'), + (1, 'two', 'C'), + (1, 'one', 'A'), + (1, 'one', 'B'), + (1, 'one', 'C'), + (2, 'one', 'A'), + (2, 'one', 'B'), + (2, 'one', 'C'), + (np.nan, 'two', 'A'), + (np.nan, 'two', 'B'), + (np.nan, 'two', 'C')], + names=[None, None, 'minor']) + expected.index = ex_idx + result = wp.to_frame() + assert_frame_equal(result, expected) def test_to_frame_multi_major_minor(self): - with catch_warnings(record=True): - cols = MultiIndex(levels=[['C_A', 'C_B'], ['C_1', 'C_2']], - labels=[[0, 0, 1, 1], [0, 1, 0, 1]]) - idx = MultiIndex.from_tuples([(1, 'one'), (1, 'two'), (2, 'one'), ( - 2, 'two'), (3, 'three'), (4, 'four')]) - df = DataFrame([[1, 2, 11, 12], [3, 4, 13, 14], - ['a', 'b', 'w', 'x'], - ['c', 'd', 'y', 'z'], [-1, -2, -3, -4], - [-5, -6, -7, -8]], columns=cols, index=idx) - wp = Panel({'i1': df, 'i2': df}) - - exp_idx = MultiIndex.from_tuples( - [(1, 'one', 'C_A', 'C_1'), (1, 'one', 'C_A', 'C_2'), - (1, 'one', 'C_B', 'C_1'), (1, 'one', 'C_B', 'C_2'), - (1, 'two', 'C_A', 'C_1'), (1, 'two', 'C_A', 'C_2'), - (1, 'two', 'C_B', 'C_1'), (1, 'two', 'C_B', 'C_2'), - (2, 'one', 'C_A', 'C_1'), (2, 'one', 'C_A', 'C_2'), - (2, 'one', 'C_B', 'C_1'), (2, 'one', 'C_B', 'C_2'), - (2, 'two', 'C_A', 'C_1'), (2, 'two', 'C_A', 'C_2'), - (2, 'two', 'C_B', 'C_1'), (2, 'two', 'C_B', 'C_2'), - (3, 'three', 'C_A', 'C_1'), (3, 'three', 'C_A', 'C_2'), - (3, 'three', 'C_B', 'C_1'), (3, 'three', 'C_B', 'C_2'), - (4, 'four', 'C_A', 'C_1'), (4, 'four', 'C_A', 'C_2'), - (4, 'four', 'C_B', 'C_1'), (4, 'four', 'C_B', 'C_2')], - names=[None, None, None, None]) - exp_val = [[1, 1], [2, 2], [11, 11], [12, 12], - [3, 3], [4, 4], - [13, 13], [14, 14], ['a', 'a'], - ['b', 'b'], ['w', 'w'], - ['x', 'x'], ['c', 'c'], ['d', 'd'], [ - 'y', 'y'], ['z', 'z'], - [-1, -1], [-2, -2], [-3, -3], [-4, -4], - [-5, -5], [-6, -6], - [-7, -7], [-8, -8]] - result = wp.to_frame() - expected = DataFrame(exp_val, columns=['i1', 'i2'], index=exp_idx) - assert_frame_equal(result, expected) + cols = MultiIndex(levels=[['C_A', 'C_B'], ['C_1', 'C_2']], + labels=[[0, 0, 1, 1], [0, 1, 0, 1]]) + idx = MultiIndex.from_tuples([(1, 'one'), (1, 'two'), (2, 'one'), ( + 2, 'two'), (3, 'three'), (4, 'four')]) + df = DataFrame([[1, 2, 11, 12], [3, 4, 13, 14], + ['a', 'b', 'w', 'x'], + ['c', 'd', 'y', 'z'], [-1, -2, -3, -4], + [-5, -6, -7, -8]], columns=cols, index=idx) + wp = Panel({'i1': df, 'i2': df}) + + exp_idx = MultiIndex.from_tuples( + [(1, 'one', 'C_A', 'C_1'), (1, 'one', 'C_A', 'C_2'), + (1, 'one', 'C_B', 'C_1'), (1, 'one', 'C_B', 'C_2'), + (1, 'two', 'C_A', 'C_1'), (1, 'two', 'C_A', 'C_2'), + (1, 'two', 'C_B', 'C_1'), (1, 'two', 'C_B', 'C_2'), + (2, 'one', 'C_A', 'C_1'), (2, 'one', 'C_A', 'C_2'), + (2, 'one', 'C_B', 'C_1'), (2, 'one', 'C_B', 'C_2'), + (2, 'two', 'C_A', 'C_1'), (2, 'two', 'C_A', 'C_2'), + (2, 'two', 'C_B', 'C_1'), (2, 'two', 'C_B', 'C_2'), + (3, 'three', 'C_A', 'C_1'), (3, 'three', 'C_A', 'C_2'), + (3, 'three', 'C_B', 'C_1'), (3, 'three', 'C_B', 'C_2'), + (4, 'four', 'C_A', 'C_1'), (4, 'four', 'C_A', 'C_2'), + (4, 'four', 'C_B', 'C_1'), (4, 'four', 'C_B', 'C_2')], + names=[None, None, None, None]) + exp_val = [[1, 1], [2, 2], [11, 11], [12, 12], + [3, 3], [4, 4], + [13, 13], [14, 14], ['a', 'a'], + ['b', 'b'], ['w', 'w'], + ['x', 'x'], ['c', 'c'], ['d', 'd'], [ + 'y', 'y'], ['z', 'z'], + [-1, -1], [-2, -2], [-3, -3], [-4, -4], + [-5, -5], [-6, -6], + [-7, -7], [-8, -8]] + result = wp.to_frame() + expected = DataFrame(exp_val, columns=['i1', 'i2'], index=exp_idx) + assert_frame_equal(result, expected) def test_to_frame_multi_drop_level(self): - with catch_warnings(record=True): - idx = MultiIndex.from_tuples([(1, 'one'), (2, 'one'), (2, 'two')]) - df = DataFrame({'A': [np.nan, 1, 2]}, index=idx) - wp = Panel({'i1': df, 'i2': df}) - result = wp.to_frame() - exp_idx = MultiIndex.from_tuples( - [(2, 'one', 'A'), (2, 'two', 'A')], - names=[None, None, 'minor']) - expected = DataFrame({'i1': [1., 2], 'i2': [1., 2]}, index=exp_idx) - assert_frame_equal(result, expected) + idx = MultiIndex.from_tuples([(1, 'one'), (2, 'one'), (2, 'two')]) + df = DataFrame({'A': [np.nan, 1, 2]}, index=idx) + wp = Panel({'i1': df, 'i2': df}) + result = wp.to_frame() + exp_idx = MultiIndex.from_tuples( + [(2, 'one', 'A'), (2, 'two', 'A')], + names=[None, None, 'minor']) + expected = DataFrame({'i1': [1., 2], 'i2': [1., 2]}, index=exp_idx) + assert_frame_equal(result, expected) def test_to_panel_na_handling(self): - with catch_warnings(record=True): - df = DataFrame(np.random.randint(0, 10, size=20).reshape((10, 2)), - index=[[0, 0, 0, 0, 0, 0, 1, 1, 1, 1], - [0, 1, 2, 3, 4, 5, 2, 3, 4, 5]]) + df = DataFrame(np.random.randint(0, 10, size=20).reshape((10, 2)), + index=[[0, 0, 0, 0, 0, 0, 1, 1, 1, 1], + [0, 1, 2, 3, 4, 5, 2, 3, 4, 5]]) - panel = df.to_panel() - assert isna(panel[0].loc[1, [0, 1]]).all() + panel = df.to_panel() + assert isna(panel[0].loc[1, [0, 1]]).all() def test_to_panel_duplicates(self): # #2441 - with catch_warnings(record=True): - df = DataFrame({'a': [0, 0, 1], 'b': [1, 1, 1], 'c': [1, 2, 3]}) - idf = df.set_index(['a', 'b']) - tm.assert_raises_regex( - ValueError, 'non-uniquely indexed', idf.to_panel) + df = DataFrame({'a': [0, 0, 1], 'b': [1, 1, 1], 'c': [1, 2, 3]}) + idf = df.set_index(['a', 'b']) + tm.assert_raises_regex( + ValueError, 'non-uniquely indexed', idf.to_panel) def test_panel_dups(self): - with catch_warnings(record=True): - # GH 4960 - # duplicates in an index + # GH 4960 + # duplicates in an index - # items - data = np.random.randn(5, 100, 5) - no_dup_panel = Panel(data, items=list("ABCDE")) - panel = Panel(data, items=list("AACDE")) + # items + data = np.random.randn(5, 100, 5) + no_dup_panel = Panel(data, items=list("ABCDE")) + panel = Panel(data, items=list("AACDE")) - expected = no_dup_panel['A'] - result = panel.iloc[0] - assert_frame_equal(result, expected) + expected = no_dup_panel['A'] + result = panel.iloc[0] + assert_frame_equal(result, expected) - expected = no_dup_panel['E'] - result = panel.loc['E'] - assert_frame_equal(result, expected) + expected = no_dup_panel['E'] + result = panel.loc['E'] + assert_frame_equal(result, expected) - expected = no_dup_panel.loc[['A', 'B']] - expected.items = ['A', 'A'] - result = panel.loc['A'] - assert_panel_equal(result, expected) + expected = no_dup_panel.loc[['A', 'B']] + expected.items = ['A', 'A'] + result = panel.loc['A'] + assert_panel_equal(result, expected) - # major - data = np.random.randn(5, 5, 5) - no_dup_panel = Panel(data, major_axis=list("ABCDE")) - panel = Panel(data, major_axis=list("AACDE")) + # major + data = np.random.randn(5, 5, 5) + no_dup_panel = Panel(data, major_axis=list("ABCDE")) + panel = Panel(data, major_axis=list("AACDE")) - expected = no_dup_panel.loc[:, 'A'] - result = panel.iloc[:, 0] - assert_frame_equal(result, expected) + expected = no_dup_panel.loc[:, 'A'] + result = panel.iloc[:, 0] + assert_frame_equal(result, expected) - expected = no_dup_panel.loc[:, 'E'] - result = panel.loc[:, 'E'] - assert_frame_equal(result, expected) + expected = no_dup_panel.loc[:, 'E'] + result = panel.loc[:, 'E'] + assert_frame_equal(result, expected) - expected = no_dup_panel.loc[:, ['A', 'B']] - expected.major_axis = ['A', 'A'] - result = panel.loc[:, 'A'] - assert_panel_equal(result, expected) + expected = no_dup_panel.loc[:, ['A', 'B']] + expected.major_axis = ['A', 'A'] + result = panel.loc[:, 'A'] + assert_panel_equal(result, expected) - # minor - data = np.random.randn(5, 100, 5) - no_dup_panel = Panel(data, minor_axis=list("ABCDE")) - panel = Panel(data, minor_axis=list("AACDE")) + # minor + data = np.random.randn(5, 100, 5) + no_dup_panel = Panel(data, minor_axis=list("ABCDE")) + panel = Panel(data, minor_axis=list("AACDE")) - expected = no_dup_panel.loc[:, :, 'A'] - result = panel.iloc[:, :, 0] - assert_frame_equal(result, expected) + expected = no_dup_panel.loc[:, :, 'A'] + result = panel.iloc[:, :, 0] + assert_frame_equal(result, expected) - expected = no_dup_panel.loc[:, :, 'E'] - result = panel.loc[:, :, 'E'] - assert_frame_equal(result, expected) + expected = no_dup_panel.loc[:, :, 'E'] + result = panel.loc[:, :, 'E'] + assert_frame_equal(result, expected) - expected = no_dup_panel.loc[:, :, ['A', 'B']] - expected.minor_axis = ['A', 'A'] - result = panel.loc[:, :, 'A'] - assert_panel_equal(result, expected) + expected = no_dup_panel.loc[:, :, ['A', 'B']] + expected.minor_axis = ['A', 'A'] + result = panel.loc[:, :, 'A'] + assert_panel_equal(result, expected) def test_filter(self): pass def test_compound(self): - with catch_warnings(record=True): - compounded = self.panel.compound() + compounded = self.panel.compound() - assert_series_equal(compounded['ItemA'], - (1 + self.panel['ItemA']).product(0) - 1, - check_names=False) + assert_series_equal(compounded['ItemA'], + (1 + self.panel['ItemA']).product(0) - 1, + check_names=False) def test_shift(self): - with catch_warnings(record=True): - # major - idx = self.panel.major_axis[0] - idx_lag = self.panel.major_axis[1] - shifted = self.panel.shift(1) - assert_frame_equal(self.panel.major_xs(idx), - shifted.major_xs(idx_lag)) - - # minor - idx = self.panel.minor_axis[0] - idx_lag = self.panel.minor_axis[1] - shifted = self.panel.shift(1, axis='minor') - assert_frame_equal(self.panel.minor_xs(idx), - shifted.minor_xs(idx_lag)) - - # items - idx = self.panel.items[0] - idx_lag = self.panel.items[1] - shifted = self.panel.shift(1, axis='items') - assert_frame_equal(self.panel[idx], shifted[idx_lag]) - - # negative numbers, #2164 - result = self.panel.shift(-1) - expected = Panel({i: f.shift(-1)[:-1] - for i, f in self.panel.iteritems()}) - assert_panel_equal(result, expected) - - # mixed dtypes #6959 - data = [('item ' + ch, makeMixedDataFrame()) - for ch in list('abcde')] - data = dict(data) - mixed_panel = Panel.from_dict(data, orient='minor') - shifted = mixed_panel.shift(1) - assert_series_equal(mixed_panel.dtypes, shifted.dtypes) + # major + idx = self.panel.major_axis[0] + idx_lag = self.panel.major_axis[1] + shifted = self.panel.shift(1) + assert_frame_equal(self.panel.major_xs(idx), + shifted.major_xs(idx_lag)) + + # minor + idx = self.panel.minor_axis[0] + idx_lag = self.panel.minor_axis[1] + shifted = self.panel.shift(1, axis='minor') + assert_frame_equal(self.panel.minor_xs(idx), + shifted.minor_xs(idx_lag)) + + # items + idx = self.panel.items[0] + idx_lag = self.panel.items[1] + shifted = self.panel.shift(1, axis='items') + assert_frame_equal(self.panel[idx], shifted[idx_lag]) + + # negative numbers, #2164 + result = self.panel.shift(-1) + expected = Panel({i: f.shift(-1)[:-1] + for i, f in self.panel.iteritems()}) + assert_panel_equal(result, expected) + + # mixed dtypes #6959 + data = [('item ' + ch, makeMixedDataFrame()) + for ch in list('abcde')] + data = dict(data) + mixed_panel = Panel.from_dict(data, orient='minor') + shifted = mixed_panel.shift(1) + assert_series_equal(mixed_panel.dtypes, shifted.dtypes) def test_tshift(self): # PeriodIndex - with catch_warnings(record=True): - ps = tm.makePeriodPanel() - shifted = ps.tshift(1) - unshifted = shifted.tshift(-1) + ps = tm.makePeriodPanel() + shifted = ps.tshift(1) + unshifted = shifted.tshift(-1) - assert_panel_equal(unshifted, ps) + assert_panel_equal(unshifted, ps) - shifted2 = ps.tshift(freq='B') - assert_panel_equal(shifted, shifted2) + shifted2 = ps.tshift(freq='B') + assert_panel_equal(shifted, shifted2) - shifted3 = ps.tshift(freq=BDay()) - assert_panel_equal(shifted, shifted3) + shifted3 = ps.tshift(freq=BDay()) + assert_panel_equal(shifted, shifted3) - tm.assert_raises_regex(ValueError, 'does not match', - ps.tshift, freq='M') + tm.assert_raises_regex(ValueError, 'does not match', + ps.tshift, freq='M') - # DatetimeIndex - panel = make_test_panel() - shifted = panel.tshift(1) - unshifted = shifted.tshift(-1) + # DatetimeIndex + panel = make_test_panel() + shifted = panel.tshift(1) + unshifted = shifted.tshift(-1) - assert_panel_equal(panel, unshifted) + assert_panel_equal(panel, unshifted) - shifted2 = panel.tshift(freq=panel.major_axis.freq) - assert_panel_equal(shifted, shifted2) + shifted2 = panel.tshift(freq=panel.major_axis.freq) + assert_panel_equal(shifted, shifted2) - inferred_ts = Panel(panel.values, items=panel.items, - major_axis=Index(np.asarray(panel.major_axis)), - minor_axis=panel.minor_axis) - shifted = inferred_ts.tshift(1) - unshifted = shifted.tshift(-1) - assert_panel_equal(shifted, panel.tshift(1)) - assert_panel_equal(unshifted, inferred_ts) + inferred_ts = Panel(panel.values, items=panel.items, + major_axis=Index(np.asarray(panel.major_axis)), + minor_axis=panel.minor_axis) + shifted = inferred_ts.tshift(1) + unshifted = shifted.tshift(-1) + assert_panel_equal(shifted, panel.tshift(1)) + assert_panel_equal(unshifted, inferred_ts) - no_freq = panel.iloc[:, [0, 5, 7], :] - pytest.raises(ValueError, no_freq.tshift) + no_freq = panel.iloc[:, [0, 5, 7], :] + pytest.raises(ValueError, no_freq.tshift) def test_pct_change(self): - with catch_warnings(record=True): - df1 = DataFrame({'c1': [1, 2, 5], 'c2': [3, 4, 6]}) - df2 = df1 + 1 - df3 = DataFrame({'c1': [3, 4, 7], 'c2': [5, 6, 8]}) - wp = Panel({'i1': df1, 'i2': df2, 'i3': df3}) - # major, 1 - result = wp.pct_change() # axis='major' - expected = Panel({'i1': df1.pct_change(), - 'i2': df2.pct_change(), - 'i3': df3.pct_change()}) - assert_panel_equal(result, expected) - result = wp.pct_change(axis=1) - assert_panel_equal(result, expected) - # major, 2 - result = wp.pct_change(periods=2) - expected = Panel({'i1': df1.pct_change(2), - 'i2': df2.pct_change(2), - 'i3': df3.pct_change(2)}) - assert_panel_equal(result, expected) - # minor, 1 - result = wp.pct_change(axis='minor') - expected = Panel({'i1': df1.pct_change(axis=1), - 'i2': df2.pct_change(axis=1), - 'i3': df3.pct_change(axis=1)}) - assert_panel_equal(result, expected) - result = wp.pct_change(axis=2) - assert_panel_equal(result, expected) - # minor, 2 - result = wp.pct_change(periods=2, axis='minor') - expected = Panel({'i1': df1.pct_change(periods=2, axis=1), - 'i2': df2.pct_change(periods=2, axis=1), - 'i3': df3.pct_change(periods=2, axis=1)}) - assert_panel_equal(result, expected) - # items, 1 - result = wp.pct_change(axis='items') - expected = Panel( - {'i1': DataFrame({'c1': [np.nan, np.nan, np.nan], - 'c2': [np.nan, np.nan, np.nan]}), - 'i2': DataFrame({'c1': [1, 0.5, .2], - 'c2': [1. / 3, 0.25, 1. / 6]}), - 'i3': DataFrame({'c1': [.5, 1. / 3, 1. / 6], - 'c2': [.25, .2, 1. / 7]})}) - assert_panel_equal(result, expected) - result = wp.pct_change(axis=0) - assert_panel_equal(result, expected) - # items, 2 - result = wp.pct_change(periods=2, axis='items') - expected = Panel( - {'i1': DataFrame({'c1': [np.nan, np.nan, np.nan], - 'c2': [np.nan, np.nan, np.nan]}), - 'i2': DataFrame({'c1': [np.nan, np.nan, np.nan], - 'c2': [np.nan, np.nan, np.nan]}), - 'i3': DataFrame({'c1': [2, 1, .4], - 'c2': [2. / 3, .5, 1. / 3]})}) - assert_panel_equal(result, expected) + df1 = DataFrame({'c1': [1, 2, 5], 'c2': [3, 4, 6]}) + df2 = df1 + 1 + df3 = DataFrame({'c1': [3, 4, 7], 'c2': [5, 6, 8]}) + wp = Panel({'i1': df1, 'i2': df2, 'i3': df3}) + # major, 1 + result = wp.pct_change() # axis='major' + expected = Panel({'i1': df1.pct_change(), + 'i2': df2.pct_change(), + 'i3': df3.pct_change()}) + assert_panel_equal(result, expected) + result = wp.pct_change(axis=1) + assert_panel_equal(result, expected) + # major, 2 + result = wp.pct_change(periods=2) + expected = Panel({'i1': df1.pct_change(2), + 'i2': df2.pct_change(2), + 'i3': df3.pct_change(2)}) + assert_panel_equal(result, expected) + # minor, 1 + result = wp.pct_change(axis='minor') + expected = Panel({'i1': df1.pct_change(axis=1), + 'i2': df2.pct_change(axis=1), + 'i3': df3.pct_change(axis=1)}) + assert_panel_equal(result, expected) + result = wp.pct_change(axis=2) + assert_panel_equal(result, expected) + # minor, 2 + result = wp.pct_change(periods=2, axis='minor') + expected = Panel({'i1': df1.pct_change(periods=2, axis=1), + 'i2': df2.pct_change(periods=2, axis=1), + 'i3': df3.pct_change(periods=2, axis=1)}) + assert_panel_equal(result, expected) + # items, 1 + result = wp.pct_change(axis='items') + expected = Panel( + {'i1': DataFrame({'c1': [np.nan, np.nan, np.nan], + 'c2': [np.nan, np.nan, np.nan]}), + 'i2': DataFrame({'c1': [1, 0.5, .2], + 'c2': [1. / 3, 0.25, 1. / 6]}), + 'i3': DataFrame({'c1': [.5, 1. / 3, 1. / 6], + 'c2': [.25, .2, 1. / 7]})}) + assert_panel_equal(result, expected) + result = wp.pct_change(axis=0) + assert_panel_equal(result, expected) + # items, 2 + result = wp.pct_change(periods=2, axis='items') + expected = Panel( + {'i1': DataFrame({'c1': [np.nan, np.nan, np.nan], + 'c2': [np.nan, np.nan, np.nan]}), + 'i2': DataFrame({'c1': [np.nan, np.nan, np.nan], + 'c2': [np.nan, np.nan, np.nan]}), + 'i3': DataFrame({'c1': [2, 1, .4], + 'c2': [2. / 3, .5, 1. / 3]})}) + assert_panel_equal(result, expected) def test_round(self): - with catch_warnings(record=True): - values = [[[-3.2, 2.2], [0, -4.8213], [3.123, 123.12], - [-1566.213, 88.88], [-12, 94.5]], - [[-5.82, 3.5], [6.21, -73.272], [-9.087, 23.12], - [272.212, -99.99], [23, -76.5]]] - evalues = [[[float(np.around(i)) for i in j] for j in k] - for k in values] - p = Panel(values, items=['Item1', 'Item2'], - major_axis=date_range('1/1/2000', periods=5), - minor_axis=['A', 'B']) - expected = Panel(evalues, items=['Item1', 'Item2'], - major_axis=date_range('1/1/2000', periods=5), - minor_axis=['A', 'B']) - result = p.round() - assert_panel_equal(expected, result) + values = [[[-3.2, 2.2], [0, -4.8213], [3.123, 123.12], + [-1566.213, 88.88], [-12, 94.5]], + [[-5.82, 3.5], [6.21, -73.272], [-9.087, 23.12], + [272.212, -99.99], [23, -76.5]]] + evalues = [[[float(np.around(i)) for i in j] for j in k] + for k in values] + p = Panel(values, items=['Item1', 'Item2'], + major_axis=date_range('1/1/2000', periods=5), + minor_axis=['A', 'B']) + expected = Panel(evalues, items=['Item1', 'Item2'], + major_axis=date_range('1/1/2000', periods=5), + minor_axis=['A', 'B']) + result = p.round() + assert_panel_equal(expected, result) def test_numpy_round(self): - with catch_warnings(record=True): - values = [[[-3.2, 2.2], [0, -4.8213], [3.123, 123.12], - [-1566.213, 88.88], [-12, 94.5]], - [[-5.82, 3.5], [6.21, -73.272], [-9.087, 23.12], - [272.212, -99.99], [23, -76.5]]] - evalues = [[[float(np.around(i)) for i in j] for j in k] - for k in values] - p = Panel(values, items=['Item1', 'Item2'], - major_axis=date_range('1/1/2000', periods=5), - minor_axis=['A', 'B']) - expected = Panel(evalues, items=['Item1', 'Item2'], - major_axis=date_range('1/1/2000', periods=5), - minor_axis=['A', 'B']) - result = np.round(p) - assert_panel_equal(expected, result) - - msg = "the 'out' parameter is not supported" - tm.assert_raises_regex(ValueError, msg, np.round, p, out=p) - + values = [[[-3.2, 2.2], [0, -4.8213], [3.123, 123.12], + [-1566.213, 88.88], [-12, 94.5]], + [[-5.82, 3.5], [6.21, -73.272], [-9.087, 23.12], + [272.212, -99.99], [23, -76.5]]] + evalues = [[[float(np.around(i)) for i in j] for j in k] + for k in values] + p = Panel(values, items=['Item1', 'Item2'], + major_axis=date_range('1/1/2000', periods=5), + minor_axis=['A', 'B']) + expected = Panel(evalues, items=['Item1', 'Item2'], + major_axis=date_range('1/1/2000', periods=5), + minor_axis=['A', 'B']) + result = np.round(p) + assert_panel_equal(expected, result) + + msg = "the 'out' parameter is not supported" + tm.assert_raises_regex(ValueError, msg, np.round, p, out=p) + + # removing Panel before NumPy enforces, so just ignore + @pytest.mark.filterwarnings("ignore:Using a non-tuple:FutureWarning") def test_multiindex_get(self): - with catch_warnings(record=True): - ind = MultiIndex.from_tuples( - [('a', 1), ('a', 2), ('b', 1), ('b', 2)], - names=['first', 'second']) - wp = Panel(np.random.random((4, 5, 5)), - items=ind, - major_axis=np.arange(5), - minor_axis=np.arange(5)) - f1 = wp['a'] - f2 = wp.loc['a'] - assert_panel_equal(f1, f2) - - assert (f1.items == [1, 2]).all() - assert (f2.items == [1, 2]).all() - - MultiIndex.from_tuples([('a', 1), ('a', 2), ('b', 1)], - names=['first', 'second']) - + ind = MultiIndex.from_tuples( + [('a', 1), ('a', 2), ('b', 1), ('b', 2)], + names=['first', 'second']) + wp = Panel(np.random.random((4, 5, 5)), + items=ind, + major_axis=np.arange(5), + minor_axis=np.arange(5)) + f1 = wp['a'] + f2 = wp.loc['a'] + assert_panel_equal(f1, f2) + + assert (f1.items == [1, 2]).all() + assert (f2.items == [1, 2]).all() + + MultiIndex.from_tuples([('a', 1), ('a', 2), ('b', 1)], + names=['first', 'second']) + + @pytest.mark.filterwarnings("ignore:Using a non-tuple:FutureWarning") def test_multiindex_blocks(self): - with catch_warnings(record=True): - ind = MultiIndex.from_tuples([('a', 1), ('a', 2), ('b', 1)], - names=['first', 'second']) - wp = Panel(self.panel._data) - wp.items = ind - f1 = wp['a'] - assert (f1.items == [1, 2]).all() + ind = MultiIndex.from_tuples([('a', 1), ('a', 2), ('b', 1)], + names=['first', 'second']) + wp = Panel(self.panel._data) + wp.items = ind + f1 = wp['a'] + assert (f1.items == [1, 2]).all() - f1 = wp[('b', 1)] - assert (f1.columns == ['A', 'B', 'C', 'D']).all() + f1 = wp[('b', 1)] + assert (f1.columns == ['A', 'B', 'C', 'D']).all() def test_repr_empty(self): - with catch_warnings(record=True): - empty = Panel() - repr(empty) + empty = Panel() + repr(empty) + # ignore warning from us, because removing panel + @pytest.mark.filterwarnings("ignore:Using:FutureWarning") def test_rename(self): - with catch_warnings(record=True): - mapper = {'ItemA': 'foo', 'ItemB': 'bar', 'ItemC': 'baz'} + mapper = {'ItemA': 'foo', 'ItemB': 'bar', 'ItemC': 'baz'} - renamed = self.panel.rename_axis(mapper, axis=0) - exp = Index(['foo', 'bar', 'baz']) - tm.assert_index_equal(renamed.items, exp) + renamed = self.panel.rename_axis(mapper, axis=0) + exp = Index(['foo', 'bar', 'baz']) + tm.assert_index_equal(renamed.items, exp) - renamed = self.panel.rename_axis(str.lower, axis=2) - exp = Index(['a', 'b', 'c', 'd']) - tm.assert_index_equal(renamed.minor_axis, exp) + renamed = self.panel.rename_axis(str.lower, axis=2) + exp = Index(['a', 'b', 'c', 'd']) + tm.assert_index_equal(renamed.minor_axis, exp) - # don't copy - renamed_nocopy = self.panel.rename_axis(mapper, axis=0, copy=False) - renamed_nocopy['foo'] = 3. - assert (self.panel['ItemA'].values == 3).all() + # don't copy + renamed_nocopy = self.panel.rename_axis(mapper, axis=0, copy=False) + renamed_nocopy['foo'] = 3. + assert (self.panel['ItemA'].values == 3).all() def test_get_attr(self): assert_frame_equal(self.panel['ItemA'], self.panel.ItemA) @@ -2195,13 +2132,12 @@ def test_get_attr(self): assert_frame_equal(self.panel['i'], self.panel.i) def test_from_frame_level1_unsorted(self): - with catch_warnings(record=True): - tuples = [('MSFT', 3), ('MSFT', 2), ('AAPL', 2), ('AAPL', 1), - ('MSFT', 1)] - midx = MultiIndex.from_tuples(tuples) - df = DataFrame(np.random.rand(5, 4), index=midx) - p = df.to_panel() - assert_frame_equal(p.minor_xs(2), df.xs(2, level=1).sort_index()) + tuples = [('MSFT', 3), ('MSFT', 2), ('AAPL', 2), ('AAPL', 1), + ('MSFT', 1)] + midx = MultiIndex.from_tuples(tuples) + df = DataFrame(np.random.rand(5, 4), index=midx) + p = df.to_panel() + assert_frame_equal(p.minor_xs(2), df.xs(2, level=1).sort_index()) def test_to_excel(self): try: @@ -2243,194 +2179,188 @@ def test_to_excel_xlsxwriter(self): recdf = reader.parse(str(item), index_col=0) assert_frame_equal(df, recdf) + @pytest.mark.filterwarnings("ignore:'.reindex:FutureWarning") def test_dropna(self): - with catch_warnings(record=True): - p = Panel(np.random.randn(4, 5, 6), major_axis=list('abcde')) - p.loc[:, ['b', 'd'], 0] = np.nan + p = Panel(np.random.randn(4, 5, 6), major_axis=list('abcde')) + p.loc[:, ['b', 'd'], 0] = np.nan - result = p.dropna(axis=1) - exp = p.loc[:, ['a', 'c', 'e'], :] - assert_panel_equal(result, exp) - inp = p.copy() - inp.dropna(axis=1, inplace=True) - assert_panel_equal(inp, exp) + result = p.dropna(axis=1) + exp = p.loc[:, ['a', 'c', 'e'], :] + assert_panel_equal(result, exp) + inp = p.copy() + inp.dropna(axis=1, inplace=True) + assert_panel_equal(inp, exp) - result = p.dropna(axis=1, how='all') - assert_panel_equal(result, p) + result = p.dropna(axis=1, how='all') + assert_panel_equal(result, p) - p.loc[:, ['b', 'd'], :] = np.nan - result = p.dropna(axis=1, how='all') - exp = p.loc[:, ['a', 'c', 'e'], :] - assert_panel_equal(result, exp) + p.loc[:, ['b', 'd'], :] = np.nan + result = p.dropna(axis=1, how='all') + exp = p.loc[:, ['a', 'c', 'e'], :] + assert_panel_equal(result, exp) - p = Panel(np.random.randn(4, 5, 6), items=list('abcd')) - p.loc[['b'], :, 0] = np.nan + p = Panel(np.random.randn(4, 5, 6), items=list('abcd')) + p.loc[['b'], :, 0] = np.nan - result = p.dropna() - exp = p.loc[['a', 'c', 'd']] - assert_panel_equal(result, exp) + result = p.dropna() + exp = p.loc[['a', 'c', 'd']] + assert_panel_equal(result, exp) - result = p.dropna(how='all') - assert_panel_equal(result, p) + result = p.dropna(how='all') + assert_panel_equal(result, p) - p.loc['b'] = np.nan - result = p.dropna(how='all') - exp = p.loc[['a', 'c', 'd']] - assert_panel_equal(result, exp) + p.loc['b'] = np.nan + result = p.dropna(how='all') + exp = p.loc[['a', 'c', 'd']] + assert_panel_equal(result, exp) def test_drop(self): - with catch_warnings(record=True): - df = DataFrame({"A": [1, 2], "B": [3, 4]}) - panel = Panel({"One": df, "Two": df}) + df = DataFrame({"A": [1, 2], "B": [3, 4]}) + panel = Panel({"One": df, "Two": df}) - def check_drop(drop_val, axis_number, aliases, expected): - try: - actual = panel.drop(drop_val, axis=axis_number) + def check_drop(drop_val, axis_number, aliases, expected): + try: + actual = panel.drop(drop_val, axis=axis_number) + assert_panel_equal(actual, expected) + for alias in aliases: + actual = panel.drop(drop_val, axis=alias) assert_panel_equal(actual, expected) - for alias in aliases: - actual = panel.drop(drop_val, axis=alias) - assert_panel_equal(actual, expected) - except AssertionError: - pprint_thing("Failed with axis_number %d and aliases: %s" % - (axis_number, aliases)) - raise - # Items - expected = Panel({"One": df}) - check_drop('Two', 0, ['items'], expected) - - pytest.raises(KeyError, panel.drop, 'Three') - - # errors = 'ignore' - dropped = panel.drop('Three', errors='ignore') - assert_panel_equal(dropped, panel) - dropped = panel.drop(['Two', 'Three'], errors='ignore') - expected = Panel({"One": df}) - assert_panel_equal(dropped, expected) - - # Major - exp_df = DataFrame({"A": [2], "B": [4]}, index=[1]) - expected = Panel({"One": exp_df, "Two": exp_df}) - check_drop(0, 1, ['major_axis', 'major'], expected) - - exp_df = DataFrame({"A": [1], "B": [3]}, index=[0]) - expected = Panel({"One": exp_df, "Two": exp_df}) - check_drop([1], 1, ['major_axis', 'major'], expected) - - # Minor - exp_df = df[['B']] - expected = Panel({"One": exp_df, "Two": exp_df}) - check_drop(["A"], 2, ['minor_axis', 'minor'], expected) - - exp_df = df[['A']] - expected = Panel({"One": exp_df, "Two": exp_df}) - check_drop("B", 2, ['minor_axis', 'minor'], expected) + except AssertionError: + pprint_thing("Failed with axis_number %d and aliases: %s" % + (axis_number, aliases)) + raise + # Items + expected = Panel({"One": df}) + check_drop('Two', 0, ['items'], expected) + + pytest.raises(KeyError, panel.drop, 'Three') + + # errors = 'ignore' + dropped = panel.drop('Three', errors='ignore') + assert_panel_equal(dropped, panel) + dropped = panel.drop(['Two', 'Three'], errors='ignore') + expected = Panel({"One": df}) + assert_panel_equal(dropped, expected) + + # Major + exp_df = DataFrame({"A": [2], "B": [4]}, index=[1]) + expected = Panel({"One": exp_df, "Two": exp_df}) + check_drop(0, 1, ['major_axis', 'major'], expected) + + exp_df = DataFrame({"A": [1], "B": [3]}, index=[0]) + expected = Panel({"One": exp_df, "Two": exp_df}) + check_drop([1], 1, ['major_axis', 'major'], expected) + + # Minor + exp_df = df[['B']] + expected = Panel({"One": exp_df, "Two": exp_df}) + check_drop(["A"], 2, ['minor_axis', 'minor'], expected) + + exp_df = df[['A']] + expected = Panel({"One": exp_df, "Two": exp_df}) + check_drop("B", 2, ['minor_axis', 'minor'], expected) def test_update(self): - with catch_warnings(record=True): - pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]], - [[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]]]) - - other = Panel( - [[[3.6, 2., np.nan], [np.nan, np.nan, 7]]], items=[1]) - - pan.update(other) - - expected = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], [1.5, np.nan, 3.]], - [[3.6, 2., 3], [1.5, np.nan, 7], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]]]) + pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]], + [[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]]]) - assert_panel_equal(pan, expected) + other = Panel( + [[[3.6, 2., np.nan], [np.nan, np.nan, 7]]], items=[1]) + + pan.update(other) + + expected = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], [1.5, np.nan, 3.]], + [[3.6, 2., 3], [1.5, np.nan, 7], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]]]) + + assert_panel_equal(pan, expected) def test_update_from_dict(self): - with catch_warnings(record=True): - pan = Panel({'one': DataFrame([[1.5, np.nan, 3], - [1.5, np.nan, 3], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]]), - 'two': DataFrame([[1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]])}) - - other = {'two': DataFrame( - [[3.6, 2., np.nan], [np.nan, np.nan, 7]])} - - pan.update(other) - - expected = Panel( - {'one': DataFrame([[1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]]), - 'two': DataFrame([[3.6, 2., 3], - [1.5, np.nan, 7], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]]) - } - ) - - assert_panel_equal(pan, expected) + pan = Panel({'one': DataFrame([[1.5, np.nan, 3], + [1.5, np.nan, 3], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]]), + 'two': DataFrame([[1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]])}) + + other = {'two': DataFrame( + [[3.6, 2., np.nan], [np.nan, np.nan, 7]])} + + pan.update(other) + + expected = Panel( + {'one': DataFrame([[1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]]), + 'two': DataFrame([[3.6, 2., 3], + [1.5, np.nan, 7], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]]) + } + ) + + assert_panel_equal(pan, expected) def test_update_nooverwrite(self): - with catch_warnings(record=True): - pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]], - [[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]]]) - - other = Panel( - [[[3.6, 2., np.nan], [np.nan, np.nan, 7]]], items=[1]) - - pan.update(other, overwrite=False) - - expected = Panel([[[1.5, np.nan, 3], [1.5, np.nan, 3], - [1.5, np.nan, 3.], [1.5, np.nan, 3.]], - [[1.5, 2., 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]]]) + pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]], + [[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]]]) + + other = Panel( + [[[3.6, 2., np.nan], [np.nan, np.nan, 7]]], items=[1]) + + pan.update(other, overwrite=False) - assert_panel_equal(pan, expected) + expected = Panel([[[1.5, np.nan, 3], [1.5, np.nan, 3], + [1.5, np.nan, 3.], [1.5, np.nan, 3.]], + [[1.5, 2., 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]]]) + + assert_panel_equal(pan, expected) def test_update_filtered(self): - with catch_warnings(record=True): - pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]], - [[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]]]) + pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]], + [[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]]]) - other = Panel( - [[[3.6, 2., np.nan], [np.nan, np.nan, 7]]], items=[1]) + other = Panel( + [[[3.6, 2., np.nan], [np.nan, np.nan, 7]]], items=[1]) - pan.update(other, filter_func=lambda x: x > 2) + pan.update(other, filter_func=lambda x: x > 2) - expected = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], [1.5, np.nan, 3.]], - [[1.5, np.nan, 3], [1.5, np.nan, 7], - [1.5, np.nan, 3.], [1.5, np.nan, 3.]]]) + expected = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], [1.5, np.nan, 3.]], + [[1.5, np.nan, 3], [1.5, np.nan, 7], + [1.5, np.nan, 3.], [1.5, np.nan, 3.]]]) - assert_panel_equal(pan, expected) + assert_panel_equal(pan, expected) def test_update_raise(self): - with catch_warnings(record=True): - pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]], - [[1.5, np.nan, 3.], [1.5, np.nan, 3.], - [1.5, np.nan, 3.], - [1.5, np.nan, 3.]]]) + pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]], + [[1.5, np.nan, 3.], [1.5, np.nan, 3.], + [1.5, np.nan, 3.], + [1.5, np.nan, 3.]]]) - pytest.raises(Exception, pan.update, *(pan, ), - **{'raise_conflict': True}) + pytest.raises(Exception, pan.update, *(pan, ), + **{'raise_conflict': True}) def test_all_any(self): assert (self.panel.all(axis=0).values == nanall( @@ -2456,6 +2386,7 @@ def test_sort_values(self): pytest.raises(NotImplementedError, self.panel.sort_values, 'ItemA') +@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") class TestPanelFrame(object): """ Check that conversions to and from Panel to DataFrame work. @@ -2467,90 +2398,82 @@ def setup_method(self, method): self.unfiltered_panel = panel.to_frame(filter_observations=False) def test_ops_differently_indexed(self): - with catch_warnings(record=True): - # trying to set non-identically indexed panel - wp = self.panel.to_panel() - wp2 = wp.reindex(major=wp.major_axis[:-1]) - lp2 = wp2.to_frame() + # trying to set non-identically indexed panel + wp = self.panel.to_panel() + wp2 = wp.reindex(major=wp.major_axis[:-1]) + lp2 = wp2.to_frame() - result = self.panel + lp2 - assert_frame_equal(result.reindex(lp2.index), lp2 * 2) + result = self.panel + lp2 + assert_frame_equal(result.reindex(lp2.index), lp2 * 2) - # careful, mutation - self.panel['foo'] = lp2['ItemA'] - assert_series_equal(self.panel['foo'].reindex(lp2.index), - lp2['ItemA'], - check_names=False) + # careful, mutation + self.panel['foo'] = lp2['ItemA'] + assert_series_equal(self.panel['foo'].reindex(lp2.index), + lp2['ItemA'], + check_names=False) def test_ops_scalar(self): - with catch_warnings(record=True): - result = self.panel.mul(2) - expected = DataFrame.__mul__(self.panel, 2) - assert_frame_equal(result, expected) + result = self.panel.mul(2) + expected = DataFrame.__mul__(self.panel, 2) + assert_frame_equal(result, expected) def test_combineFrame(self): - with catch_warnings(record=True): - wp = self.panel.to_panel() - result = self.panel.add(wp['ItemA'].stack(), axis=0) - assert_frame_equal(result.to_panel()['ItemA'], wp['ItemA'] * 2) + wp = self.panel.to_panel() + result = self.panel.add(wp['ItemA'].stack(), axis=0) + assert_frame_equal(result.to_panel()['ItemA'], wp['ItemA'] * 2) def test_combinePanel(self): - with catch_warnings(record=True): - wp = self.panel.to_panel() - result = self.panel.add(self.panel) - wide_result = result.to_panel() - assert_frame_equal(wp['ItemA'] * 2, wide_result['ItemA']) + wp = self.panel.to_panel() + result = self.panel.add(self.panel) + wide_result = result.to_panel() + assert_frame_equal(wp['ItemA'] * 2, wide_result['ItemA']) - # one item - result = self.panel.add(self.panel.filter(['ItemA'])) + # one item + result = self.panel.add(self.panel.filter(['ItemA'])) def test_combine_scalar(self): - with catch_warnings(record=True): - result = self.panel.mul(2) - expected = DataFrame(self.panel._data) * 2 - assert_frame_equal(result, expected) + result = self.panel.mul(2) + expected = DataFrame(self.panel._data) * 2 + assert_frame_equal(result, expected) def test_combine_series(self): - with catch_warnings(record=True): - s = self.panel['ItemA'][:10] - result = self.panel.add(s, axis=0) - expected = DataFrame.add(self.panel, s, axis=0) - assert_frame_equal(result, expected) + s = self.panel['ItemA'][:10] + result = self.panel.add(s, axis=0) + expected = DataFrame.add(self.panel, s, axis=0) + assert_frame_equal(result, expected) - s = self.panel.iloc[5] - result = self.panel + s - expected = DataFrame.add(self.panel, s, axis=1) - assert_frame_equal(result, expected) + s = self.panel.iloc[5] + result = self.panel + s + expected = DataFrame.add(self.panel, s, axis=1) + assert_frame_equal(result, expected) def test_operators(self): - with catch_warnings(record=True): - wp = self.panel.to_panel() - result = (self.panel + 1).to_panel() - assert_frame_equal(wp['ItemA'] + 1, result['ItemA']) + wp = self.panel.to_panel() + result = (self.panel + 1).to_panel() + assert_frame_equal(wp['ItemA'] + 1, result['ItemA']) def test_arith_flex_panel(self): - with catch_warnings(record=True): - ops = ['add', 'sub', 'mul', 'div', - 'truediv', 'pow', 'floordiv', 'mod'] - if not compat.PY3: - aliases = {} - else: - aliases = {'div': 'truediv'} - self.panel = self.panel.to_panel() - - for n in [np.random.randint(-50, -1), np.random.randint(1, 50), 0]: - for op in ops: - alias = aliases.get(op, op) - f = getattr(operator, alias) - exp = f(self.panel, n) - result = getattr(self.panel, op)(n) - assert_panel_equal(result, exp, check_panel_type=True) - - # rops - r_f = lambda x, y: f(y, x) - exp = r_f(self.panel, n) - result = getattr(self.panel, 'r' + op)(n) - assert_panel_equal(result, exp) + ops = ['add', 'sub', 'mul', 'div', + 'truediv', 'pow', 'floordiv', 'mod'] + if not compat.PY3: + aliases = {} + else: + aliases = {'div': 'truediv'} + self.panel = self.panel.to_panel() + + for n in [np.random.randint(-50, -1), np.random.randint(1, 50), 0]: + for op in ops: + alias = aliases.get(op, op) + f = getattr(operator, alias) + exp = f(self.panel, n) + result = getattr(self.panel, op)(n) + assert_panel_equal(result, exp, check_panel_type=True) + + # rops + r_f = lambda x, y: f(y, x) + exp = r_f(self.panel, n) + result = getattr(self.panel, 'r' + op)(n) + assert_panel_equal(result, exp) def test_sort(self): def is_sorted(arr): @@ -2573,44 +2496,43 @@ def test_to_sparse(self): self.panel.to_sparse) def test_truncate(self): - with catch_warnings(record=True): - dates = self.panel.index.levels[0] - start, end = dates[1], dates[5] + dates = self.panel.index.levels[0] + start, end = dates[1], dates[5] - trunced = self.panel.truncate(start, end).to_panel() - expected = self.panel.to_panel()['ItemA'].truncate(start, end) + trunced = self.panel.truncate(start, end).to_panel() + expected = self.panel.to_panel()['ItemA'].truncate(start, end) - # TODO truncate drops index.names - assert_frame_equal(trunced['ItemA'], expected, check_names=False) + # TODO truncate drops index.names + assert_frame_equal(trunced['ItemA'], expected, check_names=False) - trunced = self.panel.truncate(before=start).to_panel() - expected = self.panel.to_panel()['ItemA'].truncate(before=start) + trunced = self.panel.truncate(before=start).to_panel() + expected = self.panel.to_panel()['ItemA'].truncate(before=start) - # TODO truncate drops index.names - assert_frame_equal(trunced['ItemA'], expected, check_names=False) + # TODO truncate drops index.names + assert_frame_equal(trunced['ItemA'], expected, check_names=False) - trunced = self.panel.truncate(after=end).to_panel() - expected = self.panel.to_panel()['ItemA'].truncate(after=end) + trunced = self.panel.truncate(after=end).to_panel() + expected = self.panel.to_panel()['ItemA'].truncate(after=end) - # TODO truncate drops index.names - assert_frame_equal(trunced['ItemA'], expected, check_names=False) + # TODO truncate drops index.names + assert_frame_equal(trunced['ItemA'], expected, check_names=False) - # truncate on dates that aren't in there - wp = self.panel.to_panel() - new_index = wp.major_axis[::5] + # truncate on dates that aren't in there + wp = self.panel.to_panel() + new_index = wp.major_axis[::5] - wp2 = wp.reindex(major=new_index) + wp2 = wp.reindex(major=new_index) - lp2 = wp2.to_frame() - lp_trunc = lp2.truncate(wp.major_axis[2], wp.major_axis[-2]) + lp2 = wp2.to_frame() + lp_trunc = lp2.truncate(wp.major_axis[2], wp.major_axis[-2]) - wp_trunc = wp2.truncate(wp.major_axis[2], wp.major_axis[-2]) + wp_trunc = wp2.truncate(wp.major_axis[2], wp.major_axis[-2]) - assert_panel_equal(wp_trunc, lp_trunc.to_panel()) + assert_panel_equal(wp_trunc, lp_trunc.to_panel()) - # throw proper exception - pytest.raises(Exception, lp2.truncate, wp.major_axis[-2], - wp.major_axis[2]) + # throw proper exception + pytest.raises(Exception, lp2.truncate, wp.major_axis[-2], + wp.major_axis[2]) def test_axis_dummies(self): from pandas.core.reshape.reshape import make_axis_dummies @@ -2639,46 +2561,42 @@ def test_get_dummies(self): tm.assert_numpy_array_equal(dummies.values, minor_dummies.values) def test_mean(self): - with catch_warnings(record=True): - means = self.panel.mean(level='minor') + means = self.panel.mean(level='minor') - # test versus Panel version - wide_means = self.panel.to_panel().mean('major') - assert_frame_equal(means, wide_means) + # test versus Panel version + wide_means = self.panel.to_panel().mean('major') + assert_frame_equal(means, wide_means) def test_sum(self): - with catch_warnings(record=True): - sums = self.panel.sum(level='minor') + sums = self.panel.sum(level='minor') - # test versus Panel version - wide_sums = self.panel.to_panel().sum('major') - assert_frame_equal(sums, wide_sums) + # test versus Panel version + wide_sums = self.panel.to_panel().sum('major') + assert_frame_equal(sums, wide_sums) def test_count(self): - with catch_warnings(record=True): - index = self.panel.index + index = self.panel.index - major_count = self.panel.count(level=0)['ItemA'] - labels = index.labels[0] - for i, idx in enumerate(index.levels[0]): - assert major_count[i] == (labels == i).sum() + major_count = self.panel.count(level=0)['ItemA'] + labels = index.labels[0] + for i, idx in enumerate(index.levels[0]): + assert major_count[i] == (labels == i).sum() - minor_count = self.panel.count(level=1)['ItemA'] - labels = index.labels[1] - for i, idx in enumerate(index.levels[1]): - assert minor_count[i] == (labels == i).sum() + minor_count = self.panel.count(level=1)['ItemA'] + labels = index.labels[1] + for i, idx in enumerate(index.levels[1]): + assert minor_count[i] == (labels == i).sum() def test_join(self): - with catch_warnings(record=True): - lp1 = self.panel.filter(['ItemA', 'ItemB']) - lp2 = self.panel.filter(['ItemC']) + lp1 = self.panel.filter(['ItemA', 'ItemB']) + lp2 = self.panel.filter(['ItemC']) - joined = lp1.join(lp2) + joined = lp1.join(lp2) - assert len(joined.columns) == 3 + assert len(joined.columns) == 3 - pytest.raises(Exception, lp1.join, - self.panel.filter(['ItemB', 'ItemC'])) + pytest.raises(Exception, lp1.join, + self.panel.filter(['ItemB', 'ItemC'])) def test_panel_index(): From cf1ff63c0b9c848d42d20d8a240ace0a798eb3cc Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 09:10:46 -0500 Subject: [PATCH 17/38] more warnings --- pandas/tests/indexing/test_indexing.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/pandas/tests/indexing/test_indexing.py b/pandas/tests/indexing/test_indexing.py index f64c50699461f..33b7c1b8154c7 100644 --- a/pandas/tests/indexing/test_indexing.py +++ b/pandas/tests/indexing/test_indexing.py @@ -6,7 +6,7 @@ import pytest import weakref -from warnings import catch_warnings +from warnings import catch_warnings, simplefilter from datetime import datetime from pandas.core.dtypes.common import ( @@ -419,11 +419,13 @@ def test_setitem_list(self): # ix with a list df = DataFrame(index=[0, 1], columns=[0]) with catch_warnings(record=True): + simplefilter("ignore") df.ix[1, 0] = [1, 2, 3] df.ix[1, 0] = [1, 2] result = DataFrame(index=[0, 1], columns=[0]) with catch_warnings(record=True): + simplefilter("ignore") result.ix[1, 0] = [1, 2] tm.assert_frame_equal(result, df) @@ -447,11 +449,13 @@ def view(self): df = DataFrame(index=[0, 1], columns=[0]) with catch_warnings(record=True): + simplefilter("ignore") df.ix[1, 0] = TO(1) df.ix[1, 0] = TO(2) result = DataFrame(index=[0, 1], columns=[0]) with catch_warnings(record=True): + simplefilter("ignore") result.ix[1, 0] = TO(2) tm.assert_frame_equal(result, df) @@ -459,6 +463,7 @@ def view(self): # remains object dtype even after setting it back df = DataFrame(index=[0, 1], columns=[0]) with catch_warnings(record=True): + simplefilter("ignore") df.ix[1, 0] = TO(1) df.ix[1, 0] = np.nan result = DataFrame(index=[0, 1], columns=[0]) @@ -629,6 +634,7 @@ def test_mixed_index_not_contains(self, index, val): def test_index_type_coercion(self): with catch_warnings(record=True): + simplefilter("ignore") # GH 11836 # if we have an index type and set it with something that looks @@ -760,16 +766,20 @@ def run_tests(df, rhs, right): left = df.copy() with catch_warnings(record=True): + # XXX: finer-filter here. + simplefilter("ignore") left.ix[s, l] = rhs tm.assert_frame_equal(left, right) left = df.copy() with catch_warnings(record=True): + simplefilter("ignore") left.ix[i, j] = rhs tm.assert_frame_equal(left, right) left = df.copy() with catch_warnings(record=True): + simplefilter("ignore") left.ix[r, c] = rhs tm.assert_frame_equal(left, right) @@ -821,6 +831,7 @@ def test_slice_with_zero_step_raises(self): tm.assert_raises_regex(ValueError, 'slice step cannot be zero', lambda: s.loc[::0]) with catch_warnings(record=True): + simplefilter("ignore") tm.assert_raises_regex(ValueError, 'slice step cannot be zero', lambda: s.ix[::0]) @@ -839,11 +850,13 @@ def test_indexing_dtypes_on_empty(self): # Check that .iloc and .ix return correct dtypes GH9983 df = DataFrame({'a': [1, 2, 3], 'b': ['b', 'b2', 'b3']}) with catch_warnings(record=True): + simplefilter("ignore") df2 = df.ix[[], :] assert df2.loc[:, 'a'].dtype == np.int64 tm.assert_series_equal(df2.loc[:, 'a'], df2.iloc[:, 0]) with catch_warnings(record=True): + simplefilter("ignore") tm.assert_series_equal(df2.loc[:, 'a'], df2.ix[:, 0]) def test_range_in_series_indexing(self): @@ -917,6 +930,7 @@ def test_no_reference_cycle(self): for name in ('loc', 'iloc', 'at', 'iat'): getattr(df, name) with catch_warnings(record=True): + simplefilter("ignore") getattr(df, 'ix') wr = weakref.ref(df) del df From a9a672d7d321af368815b5e4370101aea941c9a8 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 09:13:45 -0500 Subject: [PATCH 18/38] lint --- pandas/conftest.py | 1 - pandas/core/dtypes/inference.py | 1 - pandas/core/series.py | 1 - pandas/tests/frame/test_constructors.py | 1 - pandas/tests/frame/test_convert_to.py | 1 - pandas/tests/groupby/test_groupby.py | 2 -- pandas/tests/sparse/frame/test_to_from_scipy.py | 1 - 7 files changed, 8 deletions(-) diff --git a/pandas/conftest.py b/pandas/conftest.py index 71b9a6dc2d9e5..28ec5dbee31e5 100644 --- a/pandas/conftest.py +++ b/pandas/conftest.py @@ -1,5 +1,4 @@ import os -import sys import importlib import pytest diff --git a/pandas/core/dtypes/inference.py b/pandas/core/dtypes/inference.py index 21ced504c5953..67f391615eedb 100644 --- a/pandas/core/dtypes/inference.py +++ b/pandas/core/dtypes/inference.py @@ -1,6 +1,5 @@ """ basic inference routines """ -import collections import re import numpy as np from numbers import Number diff --git a/pandas/core/series.py b/pandas/core/series.py index 83e3fdc5507b5..db6ddcf189c56 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -6,7 +6,6 @@ # pylint: disable=E1101,E1103 # pylint: disable=W0703,W0622,W0613,W0201 -import collections import warnings from textwrap import dedent diff --git a/pandas/tests/frame/test_constructors.py b/pandas/tests/frame/test_constructors.py index 9a65f63be6eaf..2f1c9e05a01b0 100644 --- a/pandas/tests/frame/test_constructors.py +++ b/pandas/tests/frame/test_constructors.py @@ -916,7 +916,6 @@ def test_constructor_list_of_lists(self): def test_constructor_sequence_like(self): # GH 3783 # collections.Squence like - import collections class DummyContainer(compat.Sequence): diff --git a/pandas/tests/frame/test_convert_to.py b/pandas/tests/frame/test_convert_to.py index 148888c3aea55..a0e23d256c25b 100644 --- a/pandas/tests/frame/test_convert_to.py +++ b/pandas/tests/frame/test_convert_to.py @@ -110,7 +110,6 @@ def test_to_records_with_multindex(self): def test_to_records_with_Mapping_type(self): import email from email.parser import Parser - import collections compat.Mapping.register(email.message.Message) diff --git a/pandas/tests/groupby/test_groupby.py b/pandas/tests/groupby/test_groupby.py index fa20620fc6db1..a2a9b44917862 100644 --- a/pandas/tests/groupby/test_groupby.py +++ b/pandas/tests/groupby/test_groupby.py @@ -1180,7 +1180,6 @@ def test_groupby_nat_exclude(): pytest.raises(KeyError, grouped.get_group, pd.NaT) - @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_sparse_friendly(df): sdf = df[['C', 'D']].to_sparse() @@ -1201,7 +1200,6 @@ def _check_work(gp): # _check_work(panel.groupby(lambda x: x.month, axis=1)) - @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_panel_groupby(): panel = tm.makePanel() diff --git a/pandas/tests/sparse/frame/test_to_from_scipy.py b/pandas/tests/sparse/frame/test_to_from_scipy.py index d60c2d423fa9f..a7f64bbe9a49f 100644 --- a/pandas/tests/sparse/frame/test_to_from_scipy.py +++ b/pandas/tests/sparse/frame/test_to_from_scipy.py @@ -1,6 +1,5 @@ import pytest import numpy as np -from warnings import catch_warnings from pandas.util import testing as tm from pandas import SparseDataFrame, SparseSeries from distutils.version import LooseVersion From da4961ae87b921aa8eabe21c76fa51a9d85ef341 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 09:30:17 -0500 Subject: [PATCH 19/38] another --- pandas/tests/test_panel.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pandas/tests/test_panel.py b/pandas/tests/test_panel.py index cbb75fba08015..51c779c6a97a3 100644 --- a/pandas/tests/test_panel.py +++ b/pandas/tests/test_panel.py @@ -90,6 +90,7 @@ def test_prod(self): self._check_stat_op('prod', np.prod, skipna_alternative=np.nanprod) @pytest.mark.filterwarnings("ignore:Invalid value:RuntimeWarning") + @pytest.mark.filterwarnings("ignore:All-NaN:RuntimeWarning") def test_median(self): def wrapper(x): if isna(x).any(): From bed003b913236988e1df5a8b9875c253708cc9ce Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 11:02:35 -0500 Subject: [PATCH 20/38] some more --- pandas/tests/indexing/common.py | 1 + pandas/tests/indexing/test_iloc.py | 12 +++++++++++- pandas/tests/indexing/test_loc.py | 3 ++- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/pandas/tests/indexing/common.py b/pandas/tests/indexing/common.py index 653c5cda4b464..3ef6aba3c0fbb 100644 --- a/pandas/tests/indexing/common.py +++ b/pandas/tests/indexing/common.py @@ -169,6 +169,7 @@ def get_value(self, f, i, values=False): # v = v.__getitem__(a) # return v with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) return f.ix[i] def check_values(self, f, func, values=False): diff --git a/pandas/tests/indexing/test_iloc.py b/pandas/tests/indexing/test_iloc.py index 3dcfe6a68ad9f..ccbff49e2899a 100644 --- a/pandas/tests/indexing/test_iloc.py +++ b/pandas/tests/indexing/test_iloc.py @@ -2,7 +2,7 @@ import pytest -from warnings import catch_warnings +from warnings import catch_warnings, filterwarnings, simplefilter import numpy as np import pandas as pd @@ -388,45 +388,53 @@ def test_iloc_getitem_frame(self): result = df.iloc[2] with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) exp = df.ix[4] tm.assert_series_equal(result, exp) result = df.iloc[2, 2] with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) exp = df.ix[4, 4] assert result == exp # slice result = df.iloc[4:8] with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) expected = df.ix[8:14] tm.assert_frame_equal(result, expected) result = df.iloc[:, 2:3] with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) expected = df.ix[:, 4:5] tm.assert_frame_equal(result, expected) # list of integers result = df.iloc[[0, 1, 3]] with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) expected = df.ix[[0, 2, 6]] tm.assert_frame_equal(result, expected) result = df.iloc[[0, 1, 3], [0, 1]] with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) expected = df.ix[[0, 2, 6], [0, 2]] tm.assert_frame_equal(result, expected) # neg indices result = df.iloc[[-1, 1, 3], [-1, 1]] with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) expected = df.ix[[18, 2, 6], [6, 2]] tm.assert_frame_equal(result, expected) # dups indices result = df.iloc[[-1, -1, 1, 3], [-1, 1]] with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) expected = df.ix[[18, 18, 2, 6], [6, 2]] tm.assert_frame_equal(result, expected) @@ -434,6 +442,7 @@ def test_iloc_getitem_frame(self): s = Series(index=lrange(1, 5)) result = df.iloc[s.index] with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) expected = df.ix[[2, 4, 6, 8]] tm.assert_frame_equal(result, expected) @@ -609,6 +618,7 @@ def test_iloc_mask(self): # UserWarnings from reindex of a boolean mask with catch_warnings(record=True): + simplefilter("ignore", UserWarning) result = dict() for idx in [None, 'index', 'locs']: mask = (df.nums > 2).values diff --git a/pandas/tests/indexing/test_loc.py b/pandas/tests/indexing/test_loc.py index 2e52154d7679b..9fdcd8b4287a9 100644 --- a/pandas/tests/indexing/test_loc.py +++ b/pandas/tests/indexing/test_loc.py @@ -3,7 +3,7 @@ import itertools import pytest -from warnings import catch_warnings +from warnings import catch_warnings, filterwarnings import numpy as np import pandas as pd @@ -699,6 +699,7 @@ def test_loc_name(self): assert result == 'index_name' with catch_warnings(record=True): + filterwarnings("ignore", "\\n.ix", FutureWarning) result = df.ix[[0, 1]].index.name assert result == 'index_name' From 8bd87113f44250e9af09103451f02b07b92f0dcd Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 11:03:51 -0500 Subject: [PATCH 21/38] future->depr --- pandas/tests/indexing/test_iloc.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pandas/tests/indexing/test_iloc.py b/pandas/tests/indexing/test_iloc.py index ccbff49e2899a..538d9706d54d6 100644 --- a/pandas/tests/indexing/test_iloc.py +++ b/pandas/tests/indexing/test_iloc.py @@ -388,53 +388,53 @@ def test_iloc_getitem_frame(self): result = df.iloc[2] with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) exp = df.ix[4] tm.assert_series_equal(result, exp) result = df.iloc[2, 2] with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) exp = df.ix[4, 4] assert result == exp # slice result = df.iloc[4:8] with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) expected = df.ix[8:14] tm.assert_frame_equal(result, expected) result = df.iloc[:, 2:3] with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) expected = df.ix[:, 4:5] tm.assert_frame_equal(result, expected) # list of integers result = df.iloc[[0, 1, 3]] with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) expected = df.ix[[0, 2, 6]] tm.assert_frame_equal(result, expected) result = df.iloc[[0, 1, 3], [0, 1]] with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) expected = df.ix[[0, 2, 6], [0, 2]] tm.assert_frame_equal(result, expected) # neg indices result = df.iloc[[-1, 1, 3], [-1, 1]] with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) expected = df.ix[[18, 2, 6], [6, 2]] tm.assert_frame_equal(result, expected) # dups indices result = df.iloc[[-1, -1, 1, 3], [-1, 1]] with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) expected = df.ix[[18, 18, 2, 6], [6, 2]] tm.assert_frame_equal(result, expected) @@ -442,7 +442,7 @@ def test_iloc_getitem_frame(self): s = Series(index=lrange(1, 5)) result = df.iloc[s.index] with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) expected = df.ix[[2, 4, 6, 8]] tm.assert_frame_equal(result, expected) From ba46eef9e67ebba08933cffffbb8e34bf5fa545b Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 13:10:58 -0500 Subject: [PATCH 22/38] future->depr --- pandas/tests/indexing/common.py | 2 +- pandas/tests/indexing/test_loc.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pandas/tests/indexing/common.py b/pandas/tests/indexing/common.py index 3ef6aba3c0fbb..127548bdaf106 100644 --- a/pandas/tests/indexing/common.py +++ b/pandas/tests/indexing/common.py @@ -169,7 +169,7 @@ def get_value(self, f, i, values=False): # v = v.__getitem__(a) # return v with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) return f.ix[i] def check_values(self, f, func, values=False): diff --git a/pandas/tests/indexing/test_loc.py b/pandas/tests/indexing/test_loc.py index 9fdcd8b4287a9..9fa705f923c88 100644 --- a/pandas/tests/indexing/test_loc.py +++ b/pandas/tests/indexing/test_loc.py @@ -699,7 +699,7 @@ def test_loc_name(self): assert result == 'index_name' with catch_warnings(record=True): - filterwarnings("ignore", "\\n.ix", FutureWarning) + filterwarnings("ignore", "\\n.ix", DeprecationWarning) result = df.ix[[0, 1]].index.name assert result == 'index_name' From 3b0b9b071a47a145932d3fe412de32cd162e94e5 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 14:15:24 -0500 Subject: [PATCH 23/38] silence --- pandas/tests/frame/test_analytics.py | 4 ++++ pandas/tests/groupby/aggregate/test_cython.py | 5 ++++- pandas/tests/groupby/test_groupby.py | 2 ++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/pandas/tests/frame/test_analytics.py b/pandas/tests/frame/test_analytics.py index 80ec26dca4a6c..52a52a1fd8752 100644 --- a/pandas/tests/frame/test_analytics.py +++ b/pandas/tests/frame/test_analytics.py @@ -549,6 +549,8 @@ def test_mean(self): def test_product(self): self._check_stat_op('product', np.prod) + # TODO: Ensure warning isn't emitted in the first place + @pytest.mark.filterwarnings("ignore:All-NaN:RuntimeWarning") def test_median(self): def wrapper(x): if isna(x).any(): @@ -1125,6 +1127,8 @@ def test_stats_mixed_type(self): self.mixed_frame.mean(1) self.mixed_frame.skew(1) + # TODO: Ensure warning isn't emitted in the first place + @pytest.mark.filterwarnings("ignore:All-NaN:RuntimeWarning") def test_median_corner(self): def wrapper(x): if isna(x).any(): diff --git a/pandas/tests/groupby/aggregate/test_cython.py b/pandas/tests/groupby/aggregate/test_cython.py index 48a45e93e1e8e..5cf3536f69218 100644 --- a/pandas/tests/groupby/aggregate/test_cython.py +++ b/pandas/tests/groupby/aggregate/test_cython.py @@ -25,7 +25,10 @@ 'var', 'sem', 'mean', - 'median', + pytest.param('median', + marks=[pytest.mark.filterwarnings( + "ignore:All-NaN:RuntimeWarning" + )]), 'prod', 'min', 'max', diff --git a/pandas/tests/groupby/test_groupby.py b/pandas/tests/groupby/test_groupby.py index a2a9b44917862..1f95abe31a089 100644 --- a/pandas/tests/groupby/test_groupby.py +++ b/pandas/tests/groupby/test_groupby.py @@ -1031,6 +1031,8 @@ def test_groupby_mixed_type_columns(): tm.assert_frame_equal(result, expected) +# TODO: Ensure warning isn't emitted in the first place +@pytest.mark.filterwarnings("ignore:All-NaN:RuntimeWarning") def test_cython_grouper_series_bug_noncontig(): arr = np.empty((100, 100)) arr.fill(np.nan) From e85d8d741e29436c45e5e0f0ba62c446c204fd0c Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Fri, 14 Sep 2018 14:20:12 -0500 Subject: [PATCH 24/38] mean --- pandas/tests/groupby/test_groupby.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pandas/tests/groupby/test_groupby.py b/pandas/tests/groupby/test_groupby.py index 1f95abe31a089..483f814bc8383 100644 --- a/pandas/tests/groupby/test_groupby.py +++ b/pandas/tests/groupby/test_groupby.py @@ -1032,7 +1032,7 @@ def test_groupby_mixed_type_columns(): # TODO: Ensure warning isn't emitted in the first place -@pytest.mark.filterwarnings("ignore:All-NaN:RuntimeWarning") +@pytest.mark.filterwarnings("ignore:Mean of:RuntimeWarning") def test_cython_grouper_series_bug_noncontig(): arr = np.empty((100, 100)) arr.fill(np.nan) From a89721e662db3db1d39e94718fd66df60a838c76 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Sun, 16 Sep 2018 14:46:23 -0500 Subject: [PATCH 25/38] import again --- pandas/tests/api/test_types.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pandas/tests/api/test_types.py b/pandas/tests/api/test_types.py index 41c54e6ac8328..5450d48ef9c51 100644 --- a/pandas/tests/api/test_types.py +++ b/pandas/tests/api/test_types.py @@ -60,9 +60,10 @@ def test_moved_infer_dtype(): # del from sys.modules to ensure we try to freshly load. # if this was imported from another test previously, we would # not see the warning, since the import is otherwise cached. - sys.modules.pop("pandas.lib", None) with tm.assert_produces_warning(FutureWarning): + import pandas.lib + e = pandas.lib.infer_dtype('foo') assert e is not None From 9813b4c13abe8d0ae909e9741e201d8cba0c336f Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Sun, 16 Sep 2018 14:49:23 -0500 Subject: [PATCH 26/38] ignore both --- pandas/tests/groupby/aggregate/test_cython.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pandas/tests/groupby/aggregate/test_cython.py b/pandas/tests/groupby/aggregate/test_cython.py index 5cf3536f69218..665aba6eb17f6 100644 --- a/pandas/tests/groupby/aggregate/test_cython.py +++ b/pandas/tests/groupby/aggregate/test_cython.py @@ -26,8 +26,10 @@ 'sem', 'mean', pytest.param('median', + # ignore mean of empty slice + # and all-NaN marks=[pytest.mark.filterwarnings( - "ignore:All-NaN:RuntimeWarning" + "ignore:RuntimeWarning" )]), 'prod', 'min', From b7f0198af65d68be740d3d615426c3b55c2a630b Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Sun, 16 Sep 2018 14:50:00 -0500 Subject: [PATCH 27/38] fixed syntax --- pandas/tests/groupby/aggregate/test_cython.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pandas/tests/groupby/aggregate/test_cython.py b/pandas/tests/groupby/aggregate/test_cython.py index 665aba6eb17f6..d8a545b323674 100644 --- a/pandas/tests/groupby/aggregate/test_cython.py +++ b/pandas/tests/groupby/aggregate/test_cython.py @@ -29,7 +29,7 @@ # ignore mean of empty slice # and all-NaN marks=[pytest.mark.filterwarnings( - "ignore:RuntimeWarning" + "ignore::RuntimeWarning" )]), 'prod', 'min', From dfc767cf08114d890e9152f3e6bcb5f0fd38ba7c Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Sun, 16 Sep 2018 15:37:33 -0500 Subject: [PATCH 28/38] Lint --- pandas/tests/api/test_types.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pandas/tests/api/test_types.py b/pandas/tests/api/test_types.py index 5450d48ef9c51..ed80c1414dbaa 100644 --- a/pandas/tests/api/test_types.py +++ b/pandas/tests/api/test_types.py @@ -2,7 +2,6 @@ import sys import pytest -import pandas from pandas.api import types from pandas.util import testing as tm From 51f77b5eaaa93a33401f5da7401d162b77b4bccb Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Mon, 17 Sep 2018 05:54:57 -0500 Subject: [PATCH 29/38] Fix docs --- doc/source/contributing.rst | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst index 532621dc016c5..cdc510a6b3fa2 100644 --- a/doc/source/contributing.rst +++ b/doc/source/contributing.rst @@ -632,6 +632,14 @@ Otherwise, you need to do it manually: warnings.warn('Use new_func instead.', FutureWarning, stacklevel=2) new_func() +You'll also need to + +1. write a new test that asserts a warning is issued when calling with the deprecated argument +2. Update all of pandas existing tests and code to use the new argument + +See :ref:`contributing.warnings` for more. + + .. _contributing.ci: Testing With Continuous Integration @@ -859,20 +867,30 @@ preferred if the inputs or logic are simple, with Hypothesis tests reserved for cases with complex logic or where there are too many combinations of options or subtle interactions to test (or think of!) all of them. -.. _warnings: +.. _contributing.warnings: -Warnings -~~~~~~~~ +Testing Warnings +~~~~~~~~~~~~~~~~ -By default, pandas test suite will fail if any unhandled warnings are emitted. +By default, one of pandas CI workers will fail if any unhandled warnings are emitted. If your change involves checking that a warning is actually emitted, use -``tm.assert_produces_warning(ExpectedWarning)``. We prefer this to pytest's -``pytest.warns`` context manager because ours checks that the warning's stacklevel -is set correctly. +``tm.assert_produces_warning(ExpectedWarning)``. + + +.. code-block:: python + + with tm.assert_prodcues_warning(FutureWarning): + df.some_operation() + +We prefer this to the ``pytest.warns`` context manager because ours checks that the warning's +stacklevel is set correctly. The stacklevel is what ensure the *user's* file name and line number +is printed in the warning, rather than something internal to pandas. It represents the nubmer of +function calls from user code (e.g. ``df.some_operation()``) to the function that actually emits +the warning. If you have a test that would emit a warning, but you aren't actually testing the -warning it self (say because it's going to be removed in the future, or because we're +warning itself (say because it's going to be removed in the future, or because we're matching a 3rd-party library's behavior), then use ``pytest.mark.filterwarnings`` to ignore the error. From 5a1b8ee2f97dd63d5f4812dbb8d547a5de8cd770 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Mon, 17 Sep 2018 10:19:21 -0500 Subject: [PATCH 30/38] remove record --- doc/source/contributing.rst | 2 +- pandas/core/arrays/integer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst index cdc510a6b3fa2..8d47a543729ba 100644 --- a/doc/source/contributing.rst +++ b/doc/source/contributing.rst @@ -885,7 +885,7 @@ If your change involves checking that a warning is actually emitted, use We prefer this to the ``pytest.warns`` context manager because ours checks that the warning's stacklevel is set correctly. The stacklevel is what ensure the *user's* file name and line number -is printed in the warning, rather than something internal to pandas. It represents the nubmer of +is printed in the warning, rather than something internal to pandas. It represents the number of function calls from user code (e.g. ``df.some_operation()``) to the function that actually emits the warning. diff --git a/pandas/core/arrays/integer.py b/pandas/core/arrays/integer.py index 5ca0bed87c6d3..e58109a25e1a5 100644 --- a/pandas/core/arrays/integer.py +++ b/pandas/core/arrays/integer.py @@ -512,7 +512,7 @@ def cmp_method(self, other): # numpy will show a DeprecationWarning on invalid elementwise # comparisons, this will raise in the future - with warnings.catch_warnings(record=True): + with warnings.catch_warnings(): warnings.filterwarnings("ignore", "elementwise", FutureWarning) with np.errstate(all='ignore'): result = op(self._data, other) From 83cd9ae7afea11a7751a37dc9cca2ebf2bd12bca Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Mon, 17 Sep 2018 10:56:49 -0500 Subject: [PATCH 31/38] fixups --- pandas/tests/extension/base/dtype.py | 5 ++--- pandas/tests/indexes/datetimes/test_tools.py | 9 ++++++--- pandas/tests/test_downstream.py | 12 +++++++----- 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/pandas/tests/extension/base/dtype.py b/pandas/tests/extension/base/dtype.py index ab17fa7f02b16..2ab01086d2c95 100644 --- a/pandas/tests/extension/base/dtype.py +++ b/pandas/tests/extension/base/dtype.py @@ -69,11 +69,10 @@ def test_check_dtype(self, data): expected = pd.Series([True, True, False, False], index=list('ABCD')) - with warnings.catch_warnings(): # XXX: This should probably be *fixed* not ignored. # See libops.scalar_compare - warnings.simplefilter("ignore", DeprecationWarning) - result = df.dtypes == str(dtype) + # warnings.simplefilter("ignore", DeprecationWarning) + result = df.dtypes == str(dtype) self.assert_series_equal(result, expected) expected = pd.Series([True, True, False, False], diff --git a/pandas/tests/indexes/datetimes/test_tools.py b/pandas/tests/indexes/datetimes/test_tools.py index 13d1d05833361..ce0bf93695b38 100644 --- a/pandas/tests/indexes/datetimes/test_tools.py +++ b/pandas/tests/indexes/datetimes/test_tools.py @@ -1589,15 +1589,18 @@ def units_from_epochs(): return list(range(5)) -@pytest.fixture(params=['timestamp', 'pydatetime', 'datetime64']) +@pytest.fixture(params=['timestamp', 'pydatetime', 'datetime64', 'str_1960']) def epochs(epoch_1960, request): - assert request.param in {'timestamp', 'pydatetime', 'datetime64'} + assert request.param in {'timestamp', 'pydatetime', 'datetime64', + "str_1960"} if request.param == 'timestamp': return epoch_1960 elif request.param == 'pydatetime': return epoch_1960.to_pydatetime() - else: + elif request.param == "datetime64": return epoch_1960.to_datetime64() + else: + return str(epoch_1960) @pytest.fixture diff --git a/pandas/tests/test_downstream.py b/pandas/tests/test_downstream.py index fe1f41e7eb606..abcfa4b320b22 100644 --- a/pandas/tests/test_downstream.py +++ b/pandas/tests/test_downstream.py @@ -63,7 +63,7 @@ def test_oo_optimizable(): @tm.network # Cython import warning -@pytest.mark.filterwarnings("ignore::ImportWarning") +@pytest.mark.filterwarnings("ignore:can't:ImportWarning") def test_statsmodels(): statsmodels = import_module('statsmodels') # noqa @@ -74,7 +74,7 @@ def test_statsmodels(): # Cython import warning -@pytest.mark.filterwarnings("ignore::ImportWarning") +@pytest.mark.filterwarnings("ignore:can't:ImportWarning") def test_scikit_learn(df): sklearn = import_module('sklearn') # noqa @@ -110,8 +110,10 @@ def test_pandas_datareader(): 'F', 'quandl', '2017-01-01', '2017-02-01') -# importing from pandas, Cython mport warning -@pytest.mark.filterwarnings("ignore") +# importing from pandas, Cython import warning +@pytest.mark.filterwarnings("ignore:The 'warn':DeprecationWarning") +@pytest.mark.filterwarnings("ignore:pandas.util:DeprecationWarning") +@pytest.mark.filterwarnings("ignore:can't resolve:ImportWarning") def test_geopandas(): geopandas = import_module('geopandas') # noqa @@ -120,7 +122,7 @@ def test_geopandas(): # Cython import warning -@pytest.mark.filterwarnings("ignore") +@pytest.mark.filterwarnings("ignore:can't resolve:ImportWarning") def test_pyarrow(df): pyarrow = import_module('pyarrow') # noqa From d10a0ccc2a13c65d328a0279deafbe5fec23a1e1 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Mon, 17 Sep 2018 11:40:50 -0500 Subject: [PATCH 32/38] Filter again --- pandas/tests/extension/base/dtype.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pandas/tests/extension/base/dtype.py b/pandas/tests/extension/base/dtype.py index 2ab01086d2c95..8d1f1cadcc23f 100644 --- a/pandas/tests/extension/base/dtype.py +++ b/pandas/tests/extension/base/dtype.py @@ -69,10 +69,12 @@ def test_check_dtype(self, data): expected = pd.Series([True, True, False, False], index=list('ABCD')) - # XXX: This should probably be *fixed* not ignored. - # See libops.scalar_compare - # warnings.simplefilter("ignore", DeprecationWarning) - result = df.dtypes == str(dtype) + # XXX: This should probably be *fixed* not ignored. + # See libops.scalar_compare + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + result = df.dtypes == str(dtype) + self.assert_series_equal(result, expected) expected = pd.Series([True, True, False, False], From cf60217a3cf5ba84fd0e77f12cbca56b51039f1c Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Mon, 17 Sep 2018 14:07:17 -0500 Subject: [PATCH 33/38] print --- .circleci/config.yml | 8 ++++---- .travis.yml | 12 ++++++------ 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e947f30d285cd..3d8d14475a9f1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -32,7 +32,7 @@ jobs: ./ci/show_circle.sh - run: name: test - command: ./ci/run_circle.sh --skip-slow --skip-network + command: ./ci/run_circle.sh --skip-slow --skip-network -v -s # -------------------------------------------------------------------------- # 1. py36_locale @@ -66,7 +66,7 @@ jobs: ./ci/show_circle.sh - run: name: test - command: ./ci/run_circle.sh --skip-slow --skip-network + command: ./ci/run_circle.sh --skip-slow --skip-network -v -s # -------------------------------------------------------------------------- # 2. py36_locale_slow @@ -100,7 +100,7 @@ jobs: ./ci/show_circle.sh - run: name: test - command: ./ci/run_circle.sh --only-slow --skip-network + command: ./ci/run_circle.sh --only-slow --skip-network -v -s # -------------------------------------------------------------------------- # 3. py35_ascii @@ -134,7 +134,7 @@ jobs: ./ci/show_circle.sh - run: name: test - command: ./ci/run_circle.sh --skip-slow --skip-network + command: ./ci/run_circle.sh --skip-slow --skip-network -v -s workflows: diff --git a/.travis.yml b/.travis.yml index 76f4715a4abb2..03161c8bdeb5c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -34,11 +34,11 @@ matrix: - os: osx language: generic env: - - JOB="3.5, OSX" ENV_FILE="ci/travis-35-osx.yaml" TEST_ARGS="--skip-slow --skip-network" + - JOB="3.5, OSX" ENV_FILE="ci/travis-35-osx.yaml" TEST_ARGS="--skip-slow --skip-network -v -s" - dist: trusty env: - - JOB="3.7" ENV_FILE="ci/travis-37.yaml" TEST_ARGS="--skip-slow --skip-network" + - JOB="3.7" ENV_FILE="ci/travis-37.yaml" TEST_ARGS="--skip-slow --skip-network -v -s" - dist: trusty env: @@ -49,14 +49,14 @@ matrix: - language-pack-zh-hans - dist: trusty env: - - JOB="2.7, lint" ENV_FILE="ci/travis-27.yaml" TEST_ARGS="--skip-slow" LINT=true + - JOB="2.7, lint" ENV_FILE="ci/travis-27.yaml" TEST_ARGS="--skip-slow -v -s" LINT=true addons: apt: packages: - python-gtk2 - dist: trusty env: - - JOB="3.6, coverage" ENV_FILE="ci/travis-36.yaml" TEST_ARGS="--skip-slow --skip-network" PANDAS_TESTING_MODE="deprecate" COVERAGE=true DOCTEST=true + - JOB="3.6, coverage" ENV_FILE="ci/travis-36.yaml" TEST_ARGS="--skip-slow --skip-network -v -s" PANDAS_TESTING_MODE="deprecate" COVERAGE=true DOCTEST=true # In allow_failures - dist: trusty env: @@ -64,7 +64,7 @@ matrix: # In allow_failures - dist: trusty env: - - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error" PANDAS_TESTING_MODE="deprecate" + - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error -v -s" PANDAS_TESTING_MODE="deprecate" addons: apt: packages: @@ -79,7 +79,7 @@ matrix: - JOB="3.6, slow" ENV_FILE="ci/travis-36-slow.yaml" SLOW=true - dist: trusty env: - - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error" PANDAS_TESTING_MODE="deprecate" + - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error -v -s" PANDAS_TESTING_MODE="deprecate" addons: apt: packages: From 61587ec0c9c53cdaa0a7e0893ceb7c46305b2c27 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Mon, 17 Sep 2018 14:50:12 -0500 Subject: [PATCH 34/38] Revert "print" This reverts commit cf60217a3cf5ba84fd0e77f12cbca56b51039f1c. --- .circleci/config.yml | 8 ++++---- .travis.yml | 12 ++++++------ 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3d8d14475a9f1..e947f30d285cd 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -32,7 +32,7 @@ jobs: ./ci/show_circle.sh - run: name: test - command: ./ci/run_circle.sh --skip-slow --skip-network -v -s + command: ./ci/run_circle.sh --skip-slow --skip-network # -------------------------------------------------------------------------- # 1. py36_locale @@ -66,7 +66,7 @@ jobs: ./ci/show_circle.sh - run: name: test - command: ./ci/run_circle.sh --skip-slow --skip-network -v -s + command: ./ci/run_circle.sh --skip-slow --skip-network # -------------------------------------------------------------------------- # 2. py36_locale_slow @@ -100,7 +100,7 @@ jobs: ./ci/show_circle.sh - run: name: test - command: ./ci/run_circle.sh --only-slow --skip-network -v -s + command: ./ci/run_circle.sh --only-slow --skip-network # -------------------------------------------------------------------------- # 3. py35_ascii @@ -134,7 +134,7 @@ jobs: ./ci/show_circle.sh - run: name: test - command: ./ci/run_circle.sh --skip-slow --skip-network -v -s + command: ./ci/run_circle.sh --skip-slow --skip-network workflows: diff --git a/.travis.yml b/.travis.yml index 03161c8bdeb5c..76f4715a4abb2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -34,11 +34,11 @@ matrix: - os: osx language: generic env: - - JOB="3.5, OSX" ENV_FILE="ci/travis-35-osx.yaml" TEST_ARGS="--skip-slow --skip-network -v -s" + - JOB="3.5, OSX" ENV_FILE="ci/travis-35-osx.yaml" TEST_ARGS="--skip-slow --skip-network" - dist: trusty env: - - JOB="3.7" ENV_FILE="ci/travis-37.yaml" TEST_ARGS="--skip-slow --skip-network -v -s" + - JOB="3.7" ENV_FILE="ci/travis-37.yaml" TEST_ARGS="--skip-slow --skip-network" - dist: trusty env: @@ -49,14 +49,14 @@ matrix: - language-pack-zh-hans - dist: trusty env: - - JOB="2.7, lint" ENV_FILE="ci/travis-27.yaml" TEST_ARGS="--skip-slow -v -s" LINT=true + - JOB="2.7, lint" ENV_FILE="ci/travis-27.yaml" TEST_ARGS="--skip-slow" LINT=true addons: apt: packages: - python-gtk2 - dist: trusty env: - - JOB="3.6, coverage" ENV_FILE="ci/travis-36.yaml" TEST_ARGS="--skip-slow --skip-network -v -s" PANDAS_TESTING_MODE="deprecate" COVERAGE=true DOCTEST=true + - JOB="3.6, coverage" ENV_FILE="ci/travis-36.yaml" TEST_ARGS="--skip-slow --skip-network" PANDAS_TESTING_MODE="deprecate" COVERAGE=true DOCTEST=true # In allow_failures - dist: trusty env: @@ -64,7 +64,7 @@ matrix: # In allow_failures - dist: trusty env: - - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error -v -s" PANDAS_TESTING_MODE="deprecate" + - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error" PANDAS_TESTING_MODE="deprecate" addons: apt: packages: @@ -79,7 +79,7 @@ matrix: - JOB="3.6, slow" ENV_FILE="ci/travis-36-slow.yaml" SLOW=true - dist: trusty env: - - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error -v -s" PANDAS_TESTING_MODE="deprecate" + - JOB="3.7, NumPy dev" ENV_FILE="ci/travis-37-numpydev.yaml" TEST_ARGS="--skip-slow --skip-network -W error" PANDAS_TESTING_MODE="deprecate" addons: apt: packages: From cfb5ae2d4ac8b24b871da69f24e09960d8868cba Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Mon, 17 Sep 2018 15:33:19 -0500 Subject: [PATCH 35/38] Close all the handles --- pandas/conftest.py | 10 ---------- pandas/io/common.py | 2 ++ pandas/tests/io/parser/compression.py | 26 +++++++++++--------------- pandas/tests/io/test_pickle.py | 9 ++++----- pandas/util/testing.py | 8 ++++++-- 5 files changed, 23 insertions(+), 32 deletions(-) diff --git a/pandas/conftest.py b/pandas/conftest.py index 28ec5dbee31e5..28c24fc8c0640 100644 --- a/pandas/conftest.py +++ b/pandas/conftest.py @@ -31,16 +31,6 @@ def pytest_addoption(parser): help="Fail if a test is skipped for missing data file.") -def pytest_collection_modifyitems(items): - # Make unhandled ResourceWarnings fail early to track down - # https://github.com/pandas-dev/pandas/issues/22675 - if PY3: - for item in items: - item.add_marker( - pytest.mark.filterwarnings("error::ResourceWarning") - ) - - def pytest_runtest_setup(item): if 'slow' in item.keywords and item.config.getoption("--skip-slow"): pytest.skip("skipping due to --skip-slow") diff --git a/pandas/io/common.py b/pandas/io/common.py index 69cb9ed46419c..405911eda7e9e 100644 --- a/pandas/io/common.py +++ b/pandas/io/common.py @@ -386,6 +386,8 @@ def _get_handle(path_or_buf, mode, encoding=None, compression=None, # ZIP Compression elif compression == 'zip': zf = BytesZipFile(path_or_buf, mode) + # Ensure the container is closed as well. + handles.append(zf) if zf.mode == 'w': f = zf elif zf.mode == 'r': diff --git a/pandas/tests/io/parser/compression.py b/pandas/tests/io/parser/compression.py index e4950af19ea95..5a28b6263f20f 100644 --- a/pandas/tests/io/parser/compression.py +++ b/pandas/tests/io/parser/compression.py @@ -30,9 +30,8 @@ def test_zip(self): expected = self.read_csv(self.csv1) with tm.ensure_clean('test_file.zip') as path: - tmp = zipfile.ZipFile(path, mode='w') - tmp.writestr('test_file', data) - tmp.close() + with zipfile.ZipFile(path, mode='w') as tmp: + tmp.writestr('test_file', data) result = self.read_csv(path, compression='zip') tm.assert_frame_equal(result, expected) @@ -47,10 +46,9 @@ def test_zip(self): with tm.ensure_clean('combined_zip.zip') as path: inner_file_names = ['test_file', 'second_file'] - tmp = zipfile.ZipFile(path, mode='w') - for file_name in inner_file_names: - tmp.writestr(file_name, data) - tmp.close() + with zipfile.ZipFile(path, mode='w') as tmp: + for file_name in inner_file_names: + tmp.writestr(file_name, data) tm.assert_raises_regex(ValueError, 'Multiple files', self.read_csv, path, compression='zip') @@ -60,8 +58,8 @@ def test_zip(self): compression='infer') with tm.ensure_clean() as path: - tmp = zipfile.ZipFile(path, mode='w') - tmp.close() + with zipfile.ZipFile(path, mode='w') as tmp: + pass tm.assert_raises_regex(ValueError, 'Zero files', self.read_csv, path, compression='zip') @@ -84,9 +82,8 @@ def test_other_compression(self, compress_type, compress_method, ext): expected = self.read_csv(self.csv1) with tm.ensure_clean() as path: - tmp = compress_method(path, mode='wb') - tmp.write(data) - tmp.close() + with compress_method(path, mode='wb') as tmp: + tmp.write(data) result = self.read_csv(path, compression=compress_type) tm.assert_frame_equal(result, expected) @@ -100,9 +97,8 @@ def test_other_compression(self, compress_type, compress_method, ext): tm.assert_frame_equal(result, expected) with tm.ensure_clean('test.{}'.format(ext)) as path: - tmp = compress_method(path, mode='wb') - tmp.write(data) - tmp.close() + with compress_method(path, mode='wb') as tmp: + tmp.write(data) result = self.read_csv(path, compression='infer') tm.assert_frame_equal(result, expected) diff --git a/pandas/tests/io/test_pickle.py b/pandas/tests/io/test_pickle.py index 97aed7671449c..a47c3c01fc80e 100644 --- a/pandas/tests/io/test_pickle.py +++ b/pandas/tests/io/test_pickle.py @@ -333,9 +333,9 @@ def compress_file(self, src_path, dest_path, compression): f = bz2.BZ2File(dest_path, "w") elif compression == 'zip': import zipfile - f = zipfile.ZipFile(dest_path, "w", - compression=zipfile.ZIP_DEFLATED) - f.write(src_path, os.path.basename(src_path)) + with zipfile.ZipFile(dest_path, "w", + compression=zipfile.ZIP_DEFLATED) as f: + f.write(src_path, os.path.basename(src_path)) elif compression == 'xz': lzma = pandas.compat.import_lzma() f = lzma.LZMAFile(dest_path, "w") @@ -344,9 +344,8 @@ def compress_file(self, src_path, dest_path, compression): raise ValueError(msg) if compression != "zip": - with open(src_path, "rb") as fh: + with open(src_path, "rb") as fh, f: f.write(fh.read()) - f.close() def test_write_explicit(self, compression, get_random_path): base = get_random_path diff --git a/pandas/util/testing.py b/pandas/util/testing.py index b3bf5b3e7a208..edd0b0aa82d23 100644 --- a/pandas/util/testing.py +++ b/pandas/util/testing.py @@ -205,8 +205,12 @@ def decompress_file(path, compression): msg = 'Unrecognized compression type: {}'.format(compression) raise ValueError(msg) - yield f - f.close() + try: + yield f + finally: + f.close() + if compression == "zip": + zip_file.close() def assert_almost_equal(left, right, check_dtype="equiv", From 2591677d06b5424e7c24b6dbb24aab4f44adbe0a Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Tue, 18 Sep 2018 06:17:43 -0500 Subject: [PATCH 36/38] update contributing --- doc/source/contributing.rst | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst index 8d47a543729ba..da05f7a074707 100644 --- a/doc/source/contributing.rst +++ b/doc/source/contributing.rst @@ -894,15 +894,28 @@ warning itself (say because it's going to be removed in the future, or because w matching a 3rd-party library's behavior), then use ``pytest.mark.filterwarnings`` to ignore the error. -``` -@pytest.mark.filterwarnings("ignore:msg:category") -def test_thing(self): - ... -``` +.. code-block:: python + + @pytest.mark.filterwarnings("ignore:msg:category") + def test_thing(self): + ... If the test generates a warning of class ``category`` whose message starts with ``msg``, the warning will be ignored and the test will pass. +If you need finer-grained control, you can use Python's usual +`warnings module `__ +to control whether a warning is ignored / raised at different places within +a single test. + +.. code-block:: python + + with warch.catch_warnings(): + warnings.simplefilter("ignore", FutureWarning) + # Or use warnings.filterwarnings(...) + +Alternatively, consider breaking up the unit test. + Running the test suite ---------------------- From 5f0eefec33cd93393a391cef5c1dd203cd7eb0c9 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Tue, 18 Sep 2018 08:04:27 -0500 Subject: [PATCH 37/38] Docs * fixture docstrings * linter fails --- doc/source/contributing.rst | 2 +- pandas/tests/indexes/datetimes/test_tools.py | 17 ++++++++++++++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst index da05f7a074707..65e151feeba67 100644 --- a/doc/source/contributing.rst +++ b/doc/source/contributing.rst @@ -887,7 +887,7 @@ We prefer this to the ``pytest.warns`` context manager because ours checks that stacklevel is set correctly. The stacklevel is what ensure the *user's* file name and line number is printed in the warning, rather than something internal to pandas. It represents the number of function calls from user code (e.g. ``df.some_operation()``) to the function that actually emits -the warning. +the warning. Our linter will fail the build if you use ``pytest.warns`` in a test. If you have a test that would emit a warning, but you aren't actually testing the warning itself (say because it's going to be removed in the future, or because we're diff --git a/pandas/tests/indexes/datetimes/test_tools.py b/pandas/tests/indexes/datetimes/test_tools.py index ce0bf93695b38..cc6db8f5854c8 100644 --- a/pandas/tests/indexes/datetimes/test_tools.py +++ b/pandas/tests/indexes/datetimes/test_tools.py @@ -1575,12 +1575,20 @@ def test_parsers_timezone_minute_offsets_roundtrip(self, cache, dt_string, @pytest.fixture(params=['D', 's', 'ms', 'us', 'ns']) def units(request): + """Day and some time units. + + * D + * s + * ms + * us + * ns + """ return request.param @pytest.fixture def epoch_1960(): - # for origin as 1960-01-01 + """Timestamp at 1960-01-01.""" return Timestamp('1960-01-01') @@ -1591,6 +1599,13 @@ def units_from_epochs(): @pytest.fixture(params=['timestamp', 'pydatetime', 'datetime64', 'str_1960']) def epochs(epoch_1960, request): + """Timestamp at 1960-01-01 in various forms. + + * pd.Timestamp + * datetime.datetime + * numpy.datetime64 + * str + """ assert request.param in {'timestamp', 'pydatetime', 'datetime64', "str_1960"} if request.param == 'timestamp': From 4990fc2d984b536e5c7c97ad55d4f550ae661867 Mon Sep 17 00:00:00 2001 From: Tom Augspurger Date: Tue, 18 Sep 2018 10:02:49 -0500 Subject: [PATCH 38/38] Fixed merge conflict --- pandas/tests/sparse/frame/test_frame.py | 26 ++++++++++++------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/pandas/tests/sparse/frame/test_frame.py b/pandas/tests/sparse/frame/test_frame.py index e6cc47eaa4f0c..5e5a341ca76d6 100644 --- a/pandas/tests/sparse/frame/test_frame.py +++ b/pandas/tests/sparse/frame/test_frame.py @@ -973,25 +973,23 @@ def _check(frame, orig): @pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning") def test_stack_sparse_frame(self, float_frame, float_frame_int_kind, float_frame_fill0, float_frame_fill2): - with catch_warnings(record=True): + def _check(frame): + dense_frame = frame.to_dense() # noqa - def _check(frame): - dense_frame = frame.to_dense() # noqa + wp = Panel.from_dict({'foo': frame}) + from_dense_lp = wp.to_frame() - wp = Panel.from_dict({'foo': frame}) - from_dense_lp = wp.to_frame() + from_sparse_lp = spf.stack_sparse_frame(frame) - from_sparse_lp = spf.stack_sparse_frame(frame) + tm.assert_numpy_array_equal(from_dense_lp.values, + from_sparse_lp.values) - tm.assert_numpy_array_equal(from_dense_lp.values, - from_sparse_lp.values) + _check(float_frame) + _check(float_frame_int_kind) - _check(float_frame) - _check(float_frame_int_kind) - - # for now - pytest.raises(Exception, _check, float_frame_fill0) - pytest.raises(Exception, _check, float_frame_fill2) + # for now + pytest.raises(Exception, _check, float_frame_fill0) + pytest.raises(Exception, _check, float_frame_fill2) def test_transpose(self, float_frame, float_frame_int_kind, float_frame_dense,