diff --git a/pandas/plotting/_matplotlib/converter.py b/pandas/plotting/_matplotlib/converter.py index 8260684c02ea6..11c59a0903440 100644 --- a/pandas/plotting/_matplotlib/converter.py +++ b/pandas/plotting/_matplotlib/converter.py @@ -387,23 +387,6 @@ def __call__(self): return [] # We need to cap at the endpoints of valid datetime - - # FIXME: dont leave commented-out - # TODO(wesm) unused? - # if dmin > dmax: - # dmax, dmin = dmin, dmax - # delta = relativedelta(dmax, dmin) - # try: - # start = dmin - delta - # except ValueError: - # start = _from_ordinal(1.0) - - # try: - # stop = dmax + delta - # except ValueError: - # # The magic number! - # stop = _from_ordinal(3652059.9999999) - nmax, nmin = dates.date2num((dmax, dmin)) num = (nmax - nmin) * 86400 * 1000 @@ -449,27 +432,7 @@ def autoscale(self): """ Set the view limits to include the data range. """ - dmin, dmax = self.datalim_to_dt() - if dmin > dmax: - dmax, dmin = dmin, dmax - # We need to cap at the endpoints of valid datetime - - # FIXME: dont leave commented-out - # TODO(wesm): unused? - - # delta = relativedelta(dmax, dmin) - # try: - # start = dmin - delta - # except ValueError: - # start = _from_ordinal(1.0) - - # try: - # stop = dmax + delta - # except ValueError: - # # The magic number! - # stop = _from_ordinal(3652059.9999999) - dmin, dmax = self.datalim_to_dt() vmin = dates.date2num(dmin) diff --git a/pandas/tests/extension/test_boolean.py b/pandas/tests/extension/test_boolean.py index 0b8b334e53d68..50b72698629bb 100644 --- a/pandas/tests/extension/test_boolean.py +++ b/pandas/tests/extension/test_boolean.py @@ -356,7 +356,5 @@ class TestUnaryOps(base.BaseUnaryOpsTests): pass -# FIXME: dont leave commented-out -# TODO parsing not yet supported -# class TestParsing(base.BaseParsingTests): -# pass +class TestParsing(base.BaseParsingTests): + pass diff --git a/pandas/tests/indexes/multi/test_indexing.py b/pandas/tests/indexes/multi/test_indexing.py index a10bf6b6aa11a..4cc67986ad065 100644 --- a/pandas/tests/indexes/multi/test_indexing.py +++ b/pandas/tests/indexes/multi/test_indexing.py @@ -519,14 +519,15 @@ def test_get_loc_duplicates(self): result = index.get_loc(2) expected = slice(0, 4) assert result == expected - # FIXME: dont leave commented-out - # pytest.raises(Exception, index.get_loc, 2) index = Index(["c", "a", "a", "b", "b"]) rs = index.get_loc("c") xp = 0 assert rs == xp + with pytest.raises(KeyError): + index.get_loc(2) + def test_get_loc_level(self): index = MultiIndex( levels=[Index(np.arange(4)), Index(np.arange(4)), Index(np.arange(4))], diff --git a/pandas/tests/indexes/multi/test_take.py b/pandas/tests/indexes/multi/test_take.py index 85043ff8812af..f8e7632c91ab2 100644 --- a/pandas/tests/indexes/multi/test_take.py +++ b/pandas/tests/indexes/multi/test_take.py @@ -11,9 +11,6 @@ def test_take(idx): expected = idx[indexer] assert result.equals(expected) - # FIXME: Remove Commented Code - # if not isinstance(idx, - # (DatetimeIndex, PeriodIndex, TimedeltaIndex)): # GH 10791 msg = "'MultiIndex' object has no attribute 'freq'" with pytest.raises(AttributeError, match=msg): diff --git a/pandas/tests/indexing/test_indexing.py b/pandas/tests/indexing/test_indexing.py index 294e3e27c4df5..51a7aa9bb586b 100644 --- a/pandas/tests/indexing/test_indexing.py +++ b/pandas/tests/indexing/test_indexing.py @@ -809,8 +809,7 @@ def test_range_in_series_indexing(self, size): @pytest.mark.parametrize( "slc", [ - # FIXME: dont leave commented-out - # pd.IndexSlice[:, :], + pd.IndexSlice[:, :], pd.IndexSlice[:, 1], pd.IndexSlice[1, :], pd.IndexSlice[[1], [1]], diff --git a/pandas/tests/io/test_sql.py b/pandas/tests/io/test_sql.py index 8c424e07601b8..382d4e611c837 100644 --- a/pandas/tests/io/test_sql.py +++ b/pandas/tests/io/test_sql.py @@ -1130,8 +1130,6 @@ def setup_method(self, load_iris_data): self.conn.close() self.conn = self.__engine self.pandasSQL = sql.SQLDatabase(self.__engine) - # FIXME: dont leave commented-out - # super().teardown_method(method) @pytest.mark.single diff --git a/pandas/tests/reductions/test_reductions.py b/pandas/tests/reductions/test_reductions.py index 235aa8e4aa922..f6e0d2f0c1751 100644 --- a/pandas/tests/reductions/test_reductions.py +++ b/pandas/tests/reductions/test_reductions.py @@ -942,10 +942,8 @@ def test_timedelta64_analytics(self): s2 = Series(pd.date_range("20120102", periods=3)) expected = Series(s2 - s1) - # FIXME: don't leave commented-out code - # this fails as numpy returns timedelta64[us] - # result = np.abs(s1-s2) - # assert_frame_equal(result,expected) + result = np.abs(s1 - s2) + tm.assert_series_equal(result, expected) result = (s1 - s2).abs() tm.assert_series_equal(result, expected) diff --git a/pandas/tests/series/test_constructors.py b/pandas/tests/series/test_constructors.py index effb324298c95..36a581a8ca492 100644 --- a/pandas/tests/series/test_constructors.py +++ b/pandas/tests/series/test_constructors.py @@ -1323,18 +1323,22 @@ def test_convert_non_ns(self): # convert from a numpy array of non-ns datetime64 # note that creating a numpy datetime64 is in LOCAL time!!!! # seems to work for M8[D], but not for M8[s] + # TODO: is the above comment still accurate/needed? - s = Series( - np.array(["2013-01-01", "2013-01-02", "2013-01-03"], dtype="datetime64[D]") + arr = np.array( + ["2013-01-01", "2013-01-02", "2013-01-03"], dtype="datetime64[D]" ) - tm.assert_series_equal(s, Series(date_range("20130101", periods=3, freq="D"))) - - # FIXME: dont leave commented-out - # s = Series(np.array(['2013-01-01 00:00:01','2013-01-01 - # 00:00:02','2013-01-01 00:00:03'],dtype='datetime64[s]')) + ser = Series(arr) + expected = Series(date_range("20130101", periods=3, freq="D")) + tm.assert_series_equal(ser, expected) - # tm.assert_series_equal(s,date_range('20130101 - # 00:00:01',period=3,freq='s')) + arr = np.array( + ["2013-01-01 00:00:01", "2013-01-01 00:00:02", "2013-01-01 00:00:03"], + dtype="datetime64[s]", + ) + ser = Series(arr) + expected = Series(date_range("20130101 00:00:01", periods=3, freq="s")) + tm.assert_series_equal(ser, expected) @pytest.mark.parametrize( "index",