Skip to content

CLN: address FIXME comments #33881

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Apr 30, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 0 additions & 37 deletions pandas/plotting/_matplotlib/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,23 +387,6 @@ def __call__(self):
return []

# We need to cap at the endpoints of valid datetime

# FIXME: dont leave commented-out
# TODO(wesm) unused?
# if dmin > dmax:
# dmax, dmin = dmin, dmax
# delta = relativedelta(dmax, dmin)
# try:
# start = dmin - delta
# except ValueError:
# start = _from_ordinal(1.0)

# try:
# stop = dmax + delta
# except ValueError:
# # The magic number!
# stop = _from_ordinal(3652059.9999999)

nmax, nmin = dates.date2num((dmax, dmin))

num = (nmax - nmin) * 86400 * 1000
Expand Down Expand Up @@ -449,27 +432,7 @@ def autoscale(self):
"""
Set the view limits to include the data range.
"""
dmin, dmax = self.datalim_to_dt()
if dmin > dmax:
dmax, dmin = dmin, dmax

# We need to cap at the endpoints of valid datetime

# FIXME: dont leave commented-out
# TODO(wesm): unused?

# delta = relativedelta(dmax, dmin)
# try:
# start = dmin - delta
# except ValueError:
# start = _from_ordinal(1.0)

# try:
# stop = dmax + delta
# except ValueError:
# # The magic number!
# stop = _from_ordinal(3652059.9999999)

dmin, dmax = self.datalim_to_dt()

vmin = dates.date2num(dmin)
Expand Down
6 changes: 2 additions & 4 deletions pandas/tests/extension/test_boolean.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,5 @@ class TestUnaryOps(base.BaseUnaryOpsTests):
pass


# FIXME: dont leave commented-out
# TODO parsing not yet supported
# class TestParsing(base.BaseParsingTests):
# pass
class TestParsing(base.BaseParsingTests):
pass
5 changes: 3 additions & 2 deletions pandas/tests/indexes/multi/test_indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -519,14 +519,15 @@ def test_get_loc_duplicates(self):
result = index.get_loc(2)
expected = slice(0, 4)
assert result == expected
# FIXME: dont leave commented-out
# pytest.raises(Exception, index.get_loc, 2)

index = Index(["c", "a", "a", "b", "b"])
rs = index.get_loc("c")
xp = 0
assert rs == xp

with pytest.raises(KeyError):
index.get_loc(2)

def test_get_loc_level(self):
index = MultiIndex(
levels=[Index(np.arange(4)), Index(np.arange(4)), Index(np.arange(4))],
Expand Down
3 changes: 0 additions & 3 deletions pandas/tests/indexes/multi/test_take.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,6 @@ def test_take(idx):
expected = idx[indexer]
assert result.equals(expected)

# FIXME: Remove Commented Code
# if not isinstance(idx,
# (DatetimeIndex, PeriodIndex, TimedeltaIndex)):
# GH 10791
msg = "'MultiIndex' object has no attribute 'freq'"
with pytest.raises(AttributeError, match=msg):
Expand Down
3 changes: 1 addition & 2 deletions pandas/tests/indexing/test_indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -809,8 +809,7 @@ def test_range_in_series_indexing(self, size):
@pytest.mark.parametrize(
"slc",
[
# FIXME: dont leave commented-out
# pd.IndexSlice[:, :],
pd.IndexSlice[:, :],
pd.IndexSlice[:, 1],
pd.IndexSlice[1, :],
pd.IndexSlice[[1], [1]],
Expand Down
2 changes: 0 additions & 2 deletions pandas/tests/io/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -1130,8 +1130,6 @@ def setup_method(self, load_iris_data):
self.conn.close()
self.conn = self.__engine
self.pandasSQL = sql.SQLDatabase(self.__engine)
# FIXME: dont leave commented-out
# super().teardown_method(method)


@pytest.mark.single
Expand Down
6 changes: 2 additions & 4 deletions pandas/tests/reductions/test_reductions.py
Original file line number Diff line number Diff line change
Expand Up @@ -942,10 +942,8 @@ def test_timedelta64_analytics(self):
s2 = Series(pd.date_range("20120102", periods=3))
expected = Series(s2 - s1)

# FIXME: don't leave commented-out code
# this fails as numpy returns timedelta64[us]
# result = np.abs(s1-s2)
# assert_frame_equal(result,expected)
result = np.abs(s1 - s2)
tm.assert_series_equal(result, expected)

result = (s1 - s2).abs()
tm.assert_series_equal(result, expected)
Expand Down
22 changes: 13 additions & 9 deletions pandas/tests/series/test_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -1323,18 +1323,22 @@ def test_convert_non_ns(self):
# convert from a numpy array of non-ns datetime64
# note that creating a numpy datetime64 is in LOCAL time!!!!
# seems to work for M8[D], but not for M8[s]
# TODO: is the above comment still accurate/needed?

s = Series(
np.array(["2013-01-01", "2013-01-02", "2013-01-03"], dtype="datetime64[D]")
arr = np.array(
["2013-01-01", "2013-01-02", "2013-01-03"], dtype="datetime64[D]"
)
tm.assert_series_equal(s, Series(date_range("20130101", periods=3, freq="D")))

# FIXME: dont leave commented-out
# s = Series(np.array(['2013-01-01 00:00:01','2013-01-01
# 00:00:02','2013-01-01 00:00:03'],dtype='datetime64[s]'))
ser = Series(arr)
expected = Series(date_range("20130101", periods=3, freq="D"))
tm.assert_series_equal(ser, expected)

# tm.assert_series_equal(s,date_range('20130101
# 00:00:01',period=3,freq='s'))
arr = np.array(
["2013-01-01 00:00:01", "2013-01-01 00:00:02", "2013-01-01 00:00:03"],
dtype="datetime64[s]",
)
ser = Series(arr)
expected = Series(date_range("20130101 00:00:01", periods=3, freq="s"))
tm.assert_series_equal(ser, expected)

@pytest.mark.parametrize(
"index",
Expand Down