Skip to content

Commit 7c0257f

Browse files
author
MarcoGorelli
committed
adjust some tests
1 parent d058996 commit 7c0257f

File tree

13 files changed

+43
-33
lines changed

13 files changed

+43
-33
lines changed

pandas/tests/apply/test_frame_apply.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -836,7 +836,8 @@ def test_with_dictlike_columns_with_datetime():
836836
df["author"] = ["X", "Y", "Z"]
837837
df["publisher"] = ["BBC", "NBC", "N24"]
838838
df["date"] = pd.to_datetime(
839-
["17-10-2010 07:15:30", "13-05-2011 08:20:35", "15-01-2013 09:09:09"]
839+
["17-10-2010 07:15:30", "13-05-2011 08:20:35", "15-01-2013 09:09:09"],
840+
dayfirst=True,
840841
)
841842
result = df.apply(lambda x: {}, axis=1)
842843
expected = Series([{}, {}, {}])

pandas/tests/arrays/sparse/test_array.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -351,7 +351,7 @@ def test_nbytes_block(self):
351351
assert result == 24
352352

353353
def test_asarray_datetime64(self):
354-
s = SparseArray(pd.to_datetime(["2012", None, None, "2013"]))
354+
s = SparseArray(pd.to_datetime(["2012-01-01", None, None, "2013-01-01"]))
355355
np.asarray(s)
356356

357357
def test_density(self):

pandas/tests/frame/methods/test_drop.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -405,11 +405,11 @@ def test_drop_level_nonunique_datetime(self):
405405
idx = Index([2, 3, 4, 4, 5], name="id")
406406
idxdt = pd.to_datetime(
407407
[
408-
"201603231400",
409-
"201603231500",
410-
"201603231600",
411-
"201603231600",
412-
"201603231700",
408+
"2016-03-23 14:00",
409+
"2016-03-23 15:00",
410+
"2016-03-23 16:00",
411+
"2016-03-23 16:00",
412+
"2016-03-23 17:00",
413413
]
414414
)
415415
df = DataFrame(np.arange(10).reshape(5, 2), columns=list("ab"), index=idx)

pandas/tests/frame/methods/test_to_csv.py

+9-6
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727

2828
class TestDataFrameToCSV:
2929
def read_csv(self, path, **kwargs):
30-
params = {"index_col": 0, "parse_dates": True}
30+
params = {"index_col": 0}
3131
params.update(**kwargs)
3232

3333
return read_csv(path, **params)
@@ -46,17 +46,17 @@ def test_to_csv_from_csv1(self, float_frame, datetime_frame):
4646
# freq does not roundtrip
4747
datetime_frame.index = datetime_frame.index._with_freq(None)
4848
datetime_frame.to_csv(path)
49-
recons = self.read_csv(path)
49+
recons = self.read_csv(path, parse_dates=True)
5050
tm.assert_frame_equal(datetime_frame, recons)
5151

5252
datetime_frame.to_csv(path, index_label="index")
53-
recons = self.read_csv(path, index_col=None)
53+
recons = self.read_csv(path, index_col=None, parse_dates=True)
5454

5555
assert len(recons.columns) == len(datetime_frame.columns) + 1
5656

5757
# no index
5858
datetime_frame.to_csv(path, index=False)
59-
recons = self.read_csv(path, index_col=None)
59+
recons = self.read_csv(path, index_col=None, parse_dates=True)
6060
tm.assert_almost_equal(datetime_frame.values, recons.values)
6161

6262
# corner case
@@ -514,7 +514,10 @@ def test_to_csv_multiindex(self, float_frame, datetime_frame):
514514
tsframe.index = MultiIndex.from_arrays(new_index)
515515

516516
tsframe.to_csv(path, index_label=["time", "foo"])
517-
recons = self.read_csv(path, index_col=[0, 1])
517+
with tm.assert_produces_warning(
518+
UserWarning, match="Could not infer format"
519+
):
520+
recons = self.read_csv(path, index_col=[0, 1], parse_dates=True)
518521

519522
# TODO to_csv drops column name
520523
tm.assert_frame_equal(tsframe, recons, check_names=False)
@@ -1056,7 +1059,7 @@ def test_to_csv_date_format(self, datetime_frame):
10561059

10571060
# test NaTs
10581061
nat_index = to_datetime(
1059-
["NaT"] * 10 + ["2000-01-01", "1/1/2000", "1-1-2000"]
1062+
["NaT"] * 10 + ["2000-01-01", "2000-01-01", "2000-01-01"]
10601063
)
10611064
nat_frame = DataFrame({"A": nat_index}, index=nat_index)
10621065
nat_frame.to_csv(path, date_format="%Y-%m-%d")

pandas/tests/groupby/test_function.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -510,7 +510,7 @@ def test_idxmin_idxmax_returns_int_types(func, values, numeric_only):
510510
"name": ["A", "A", "B", "B"],
511511
"c_int": [1, 2, 3, 4],
512512
"c_float": [4.02, 3.03, 2.04, 1.05],
513-
"c_date": ["2019", "2018", "2016", "2017"],
513+
"c_date": ["2019-01-01", "2018-01-01", "2016-01-01", "2017-01-01"],
514514
}
515515
)
516516
df["c_date"] = pd.to_datetime(df["c_date"])
@@ -717,7 +717,7 @@ def test_max_nan_bug():
717717
-05-06,2013-05-06 00:00:00,,log.log
718718
-05-07,2013-05-07 00:00:00,OE,xlsx"""
719719

720-
df = pd.read_csv(StringIO(raw), parse_dates=[0])
720+
df = pd.read_csv(StringIO(raw), parse_dates=[1])
721721
gb = df.groupby("Date")
722722
r = gb[["File"]].max()
723723
e = gb["File"].max().to_frame()
@@ -880,8 +880,8 @@ def test_cummin(dtypes_for_minmax):
880880
tm.assert_frame_equal(result, expected)
881881

882882
# GH 15561
883-
df = DataFrame({"a": [1], "b": pd.to_datetime(["2001"])})
884-
expected = Series(pd.to_datetime("2001"), index=[0], name="b")
883+
df = DataFrame({"a": [1], "b": pd.to_datetime(["2001-01-01"])})
884+
expected = Series(pd.to_datetime("2001-01-01"), index=[0], name="b")
885885

886886
result = df.groupby("a")["b"].cummin()
887887
tm.assert_series_equal(expected, result)

pandas/tests/groupby/transform/test_transform.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -1070,7 +1070,8 @@ def demean_rename(x):
10701070
@pytest.mark.parametrize("func", [min, max, np.min, np.max, "first", "last"])
10711071
def test_groupby_transform_timezone_column(func):
10721072
# GH 24198
1073-
ts = pd.to_datetime("now", utc=True).tz_convert("Asia/Singapore")
1073+
with tm.assert_produces_warning(UserWarning, match="Could not infer format"):
1074+
ts = pd.to_datetime("now", utc=True).tz_convert("Asia/Singapore")
10741075
result = DataFrame({"end_time": [ts], "id": [1]})
10751076
result["max_end_time"] = result.groupby("id").end_time.transform(func)
10761077
expected = DataFrame([[ts, 1, ts]], columns=["end_time", "id", "max_end_time"])

pandas/tests/io/json/test_json_table_schema.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -145,11 +145,11 @@ def test_as_json_table_type_bool_data(self, bool_type):
145145
@pytest.mark.parametrize(
146146
"date_data",
147147
[
148-
pd.to_datetime(["2016"]),
149-
pd.to_datetime(["2016"], utc=True),
150-
pd.Series(pd.to_datetime(["2016"])),
151-
pd.Series(pd.to_datetime(["2016"], utc=True)),
152-
pd.period_range("2016", freq="A", periods=3),
148+
pd.to_datetime(["2016-01-01"]),
149+
pd.to_datetime(["2016-01-01"], utc=True),
150+
pd.Series(pd.to_datetime(["2016-01-01"])),
151+
pd.Series(pd.to_datetime(["2016-01-01"], utc=True)),
152+
pd.period_range("2016-01-01", freq="A", periods=3),
153153
],
154154
)
155155
def test_as_json_table_type_date_data(self, date_data):

pandas/tests/io/test_sql.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1384,7 +1384,7 @@ def test_sqlalchemy_type_mapping(self):
13841384

13851385
# Test Timestamp objects (no datetime64 because of timezone) (GH9085)
13861386
df = DataFrame(
1387-
{"time": to_datetime(["201412120154", "201412110254"], utc=True)}
1387+
{"time": to_datetime(["2014-12-12 01:54", "2014:12:11 02:54"], utc=True)}
13881388
)
13891389
db = sql.SQLDatabase(self.conn)
13901390
table = sql.SQLTable("test_type", db, frame=df)

pandas/tests/plotting/test_converter.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -161,8 +161,8 @@ def dtc(self):
161161
return converter.DatetimeConverter()
162162

163163
def test_convert_accepts_unicode(self, dtc):
164-
r1 = dtc.convert("12:22", None, None)
165-
r2 = dtc.convert("12:22", None, None)
164+
r1 = dtc.convert("2000-01-01 12:22", None, None)
165+
r2 = dtc.convert("2000-01-01 12:22", None, None)
166166
assert r1 == r2, "DatetimeConverter.convert should accept unicode"
167167

168168
def test_conversion(self, dtc):

pandas/tests/reductions/test_reductions.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ def test_same_tz_min_max_axis_1(self, op, expected_col):
193193
def test_numpy_reduction_with_tz_aware_dtype(self, tz_aware_fixture, func):
194194
# GH 15552
195195
tz = tz_aware_fixture
196-
arg = pd.to_datetime(["2019"]).tz_localize(tz)
196+
arg = pd.to_datetime(["2019-01-01"]).tz_localize(tz)
197197
expected = Series(arg)
198198
result = getattr(np, func)(expected, expected)
199199
tm.assert_series_equal(result, expected)

pandas/tests/reshape/merge/test_merge.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -2425,7 +2425,10 @@ def test_merge_datetime_upcast_dtype():
24252425
# https://github.com/pandas-dev/pandas/issues/31208
24262426
df1 = DataFrame({"x": ["a", "b", "c"], "y": ["1", "2", "4"]})
24272427
df2 = DataFrame(
2428-
{"y": ["1", "2", "3"], "z": pd.to_datetime(["2000", "2001", "2002"])}
2428+
{
2429+
"y": ["1", "2", "3"],
2430+
"z": pd.to_datetime(["2000-01-01", "2001-01-01", "2002-01-01"]),
2431+
}
24292432
)
24302433
result = merge(df1, df2, how="left", on="y")
24312434
expected = DataFrame(

pandas/tests/series/methods/test_nlargest.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,11 @@ def s_main_dtypes():
4343
"""
4444
df = pd.DataFrame(
4545
{
46-
"datetime": pd.to_datetime(["2003", "2002", "2001", "2002", "2005"]),
46+
"datetime": pd.to_datetime(
47+
["2003-01-01", "2002-01-01", "2001-01-01", "2002-01-01", "2005-01-01"]
48+
),
4749
"datetimetz": pd.to_datetime(
48-
["2003", "2002", "2001", "2002", "2005"]
50+
["2003-01-01", "2002-01-01", "2001-01-01", "2002-01-01", "2005-01-01"]
4951
).tz_localize("US/Eastern"),
5052
"timedelta": pd.to_timedelta(["3d", "2d", "1d", "2d", "5d"]),
5153
}

pandas/tests/series/test_ufunc.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -257,9 +257,9 @@ def __add__(self, other):
257257
pd.array([1, 3, 2], dtype="Int64"),
258258
pd.array([1, 3, 2], dtype="Float32"),
259259
pd.array([1, 10, 2], dtype="Sparse[int]"),
260-
pd.to_datetime(["2000", "2010", "2001"]),
261-
pd.to_datetime(["2000", "2010", "2001"]).tz_localize("CET"),
262-
pd.to_datetime(["2000", "2010", "2001"]).to_period(freq="D"),
260+
pd.to_datetime(["2000-01-01", "2010-01-01", "2001-01-01"]),
261+
pd.to_datetime(["2000-01-01", "2010-01-01", "2001-01-01"]).tz_localize("CET"),
262+
pd.to_datetime(["2000-01-01", "2010-01-01", "2001-01-01"]).to_period(freq="D"),
263263
pd.to_timedelta(["1 Day", "3 Days", "2 Days"]),
264264
pd.IntervalIndex([pd.Interval(0, 1), pd.Interval(2, 3), pd.Interval(1, 2)]),
265265
],

0 commit comments

Comments
 (0)