|
12 | 12 | import pandas.util.testing as tm
|
13 | 13 |
|
14 | 14 |
|
15 |
| -class TestParseISO8601(object): |
16 |
| - @pytest.mark.parametrize('date_str, exp', [ |
17 |
| - ('2011-01-02', datetime(2011, 1, 2)), |
18 |
| - ('2011-1-2', datetime(2011, 1, 2)), |
19 |
| - ('2011-01', datetime(2011, 1, 1)), |
20 |
| - ('2011-1', datetime(2011, 1, 1)), |
21 |
| - ('2011 01 02', datetime(2011, 1, 2)), |
22 |
| - ('2011.01.02', datetime(2011, 1, 2)), |
23 |
| - ('2011/01/02', datetime(2011, 1, 2)), |
24 |
| - ('2011\\01\\02', datetime(2011, 1, 2)), |
25 |
| - ('2013-01-01 05:30:00', datetime(2013, 1, 1, 5, 30)), |
26 |
| - ('2013-1-1 5:30:00', datetime(2013, 1, 1, 5, 30))]) |
27 |
| - def test_parsers_iso8601(self, date_str, exp): |
28 |
| - # GH#12060 |
29 |
| - # test only the iso parser - flexibility to different |
30 |
| - # separators and leadings 0s |
31 |
| - # Timestamp construction falls back to dateutil |
32 |
| - actual = tslib._test_parse_iso8601(date_str) |
33 |
| - assert actual == exp |
34 |
| - |
35 |
| - @pytest.mark.parametrize( |
36 |
| - 'date_str', |
37 |
| - ['2011-01/02', '2011^11^11', |
38 |
| - '201401', '201111', '200101', |
39 |
| - # mixed separated and unseparated |
40 |
| - '2005-0101', '200501-01', |
41 |
| - '20010101 12:3456', |
42 |
| - '20010101 1234:56', |
43 |
| - # HHMMSS must have two digits in |
44 |
| - # each component if unseparated |
45 |
| - '20010101 1', '20010101 123', |
46 |
| - '20010101 12345', '20010101 12345Z', |
47 |
| - # wrong separator for HHMMSS |
48 |
| - '2001-01-01 12-34-56']) |
49 |
| - def test_parsers_iso8601_invalid(self, date_str): |
50 |
| - # separators must all match - YYYYMM not valid |
51 |
| - with pytest.raises(ValueError): |
52 |
| - tslib._test_parse_iso8601(date_str) |
53 |
| - |
54 |
| - |
55 |
| -class TestArrayToDatetime(object): |
56 |
| - def test_parsing_valid_dates(self): |
57 |
| - arr = np.array(['01-01-2013', '01-02-2013'], dtype=object) |
58 |
| - result, _ = tslib.array_to_datetime(arr) |
59 |
| - expected = ['2013-01-01T00:00:00.000000000-0000', |
60 |
| - '2013-01-02T00:00:00.000000000-0000'] |
61 |
| - tm.assert_numpy_array_equal( |
62 |
| - result, |
63 |
| - np_array_datetime64_compat(expected, dtype='M8[ns]')) |
| 15 | +@pytest.mark.parametrize("data,expected", [ |
| 16 | + (["01-01-2013", "01-02-2013"], |
| 17 | + ["2013-01-01T00:00:00.000000000-0000", |
| 18 | + "2013-01-02T00:00:00.000000000-0000"]), |
| 19 | + (["Mon Sep 16 2013", "Tue Sep 17 2013"], |
| 20 | + ["2013-09-16T00:00:00.000000000-0000", |
| 21 | + "2013-09-17T00:00:00.000000000-0000"]) |
| 22 | +]) |
| 23 | +def test_parsing_valid_dates(data, expected): |
| 24 | + arr = np.array(data, dtype=object) |
| 25 | + result, _ = tslib.array_to_datetime(arr) |
| 26 | + |
| 27 | + expected = np_array_datetime64_compat(expected, dtype="M8[ns]") |
| 28 | + tm.assert_numpy_array_equal(result, expected) |
| 29 | + |
| 30 | + |
| 31 | +@pytest.mark.parametrize("dt_string, expected_tz", [ |
| 32 | + ["01-01-2013 08:00:00+08:00", 480], |
| 33 | + ["2013-01-01T08:00:00.000000000+0800", 480], |
| 34 | + ["2012-12-31T16:00:00.000000000-0800", -480], |
| 35 | + ["12-31-2012 23:00:00-01:00", -60] |
| 36 | +]) |
| 37 | +def test_parsing_timezone_offsets(dt_string, expected_tz): |
| 38 | + # All of these datetime strings with offsets are equivalent |
| 39 | + # to the same datetime after the timezone offset is added. |
| 40 | + arr = np.array(["01-01-2013 00:00:00"], dtype=object) |
| 41 | + expected, _ = tslib.array_to_datetime(arr) |
| 42 | + |
| 43 | + arr = np.array([dt_string], dtype=object) |
| 44 | + result, result_tz = tslib.array_to_datetime(arr) |
| 45 | + |
| 46 | + tm.assert_numpy_array_equal(result, expected) |
| 47 | + assert result_tz is pytz.FixedOffset(expected_tz) |
| 48 | + |
| 49 | + |
| 50 | +def test_parsing_non_iso_timezone_offset(): |
| 51 | + dt_string = "01-01-2013T00:00:00.000000000+0000" |
| 52 | + arr = np.array([dt_string], dtype=object) |
| 53 | + |
| 54 | + result, result_tz = tslib.array_to_datetime(arr) |
| 55 | + expected = np.array([np.datetime64("2013-01-01 00:00:00.000000000")]) |
| 56 | + |
| 57 | + tm.assert_numpy_array_equal(result, expected) |
| 58 | + assert result_tz is pytz.FixedOffset(0) |
| 59 | + |
| 60 | + |
| 61 | +def test_parsing_different_timezone_offsets(): |
| 62 | + # see gh-17697 |
| 63 | + data = ["2015-11-18 15:30:00+05:30", "2015-11-18 15:30:00+06:30"] |
| 64 | + data = np.array(data, dtype=object) |
| 65 | + |
| 66 | + result, result_tz = tslib.array_to_datetime(data) |
| 67 | + expected = np.array([datetime(2015, 11, 18, 15, 30, |
| 68 | + tzinfo=tzoffset(None, 19800)), |
| 69 | + datetime(2015, 11, 18, 15, 30, |
| 70 | + tzinfo=tzoffset(None, 23400))], |
| 71 | + dtype=object) |
| 72 | + |
| 73 | + tm.assert_numpy_array_equal(result, expected) |
| 74 | + assert result_tz is None |
| 75 | + |
| 76 | + |
| 77 | +@pytest.mark.parametrize("data", [ |
| 78 | + ["-352.737091", "183.575577"], |
| 79 | + ["1", "2", "3", "4", "5"] |
| 80 | +]) |
| 81 | +def test_number_looking_strings_not_into_datetime(data): |
| 82 | + # see gh-4601 |
| 83 | + # |
| 84 | + # These strings don't look like datetimes, so |
| 85 | + # they shouldn't be attempted to be converted. |
| 86 | + arr = np.array(data, dtype=object) |
| 87 | + result, _ = tslib.array_to_datetime(arr, errors="ignore") |
| 88 | + |
| 89 | + tm.assert_numpy_array_equal(result, arr) |
| 90 | + |
| 91 | + |
| 92 | +@pytest.mark.parametrize("invalid_date", [ |
| 93 | + date(1000, 1, 1), |
| 94 | + datetime(1000, 1, 1), |
| 95 | + "1000-01-01", |
| 96 | + "Jan 1, 1000", |
| 97 | + np.datetime64("1000-01-01")]) |
| 98 | +@pytest.mark.parametrize("errors", ["coerce", "raise"]) |
| 99 | +def test_coerce_outside_ns_bounds(invalid_date, errors): |
| 100 | + arr = np.array([invalid_date], dtype="object") |
| 101 | + kwargs = dict(values=arr, errors=errors) |
| 102 | + |
| 103 | + if errors == "raise": |
| 104 | + msg = "Out of bounds nanosecond timestamp" |
| 105 | + |
| 106 | + with pytest.raises(ValueError, match=msg): |
| 107 | + tslib.array_to_datetime(**kwargs) |
| 108 | + else: # coerce. |
| 109 | + result, _ = tslib.array_to_datetime(**kwargs) |
| 110 | + expected = np.array([iNaT], dtype="M8[ns]") |
64 | 111 |
|
65 |
| - arr = np.array(['Mon Sep 16 2013', 'Tue Sep 17 2013'], dtype=object) |
66 |
| - result, _ = tslib.array_to_datetime(arr) |
67 |
| - expected = ['2013-09-16T00:00:00.000000000-0000', |
68 |
| - '2013-09-17T00:00:00.000000000-0000'] |
69 |
| - tm.assert_numpy_array_equal( |
70 |
| - result, |
71 |
| - np_array_datetime64_compat(expected, dtype='M8[ns]')) |
72 |
| - |
73 |
| - @pytest.mark.parametrize('dt_string, expected_tz', [ |
74 |
| - ['01-01-2013 08:00:00+08:00', pytz.FixedOffset(480)], |
75 |
| - ['2013-01-01T08:00:00.000000000+0800', pytz.FixedOffset(480)], |
76 |
| - ['2012-12-31T16:00:00.000000000-0800', pytz.FixedOffset(-480)], |
77 |
| - ['12-31-2012 23:00:00-01:00', pytz.FixedOffset(-60)]]) |
78 |
| - def test_parsing_timezone_offsets(self, dt_string, expected_tz): |
79 |
| - # All of these datetime strings with offsets are equivalent |
80 |
| - # to the same datetime after the timezone offset is added |
81 |
| - arr = np.array(['01-01-2013 00:00:00'], dtype=object) |
82 |
| - expected, _ = tslib.array_to_datetime(arr) |
83 |
| - |
84 |
| - arr = np.array([dt_string], dtype=object) |
85 |
| - result, result_tz = tslib.array_to_datetime(arr) |
86 | 112 | tm.assert_numpy_array_equal(result, expected)
|
87 |
| - assert result_tz is expected_tz |
88 | 113 |
|
89 |
| - def test_parsing_non_iso_timezone_offset(self): |
90 |
| - dt_string = '01-01-2013T00:00:00.000000000+0000' |
91 |
| - arr = np.array([dt_string], dtype=object) |
92 |
| - result, result_tz = tslib.array_to_datetime(arr) |
93 |
| - expected = np.array([np.datetime64('2013-01-01 00:00:00.000000000')]) |
94 |
| - tm.assert_numpy_array_equal(result, expected) |
95 |
| - assert result_tz is pytz.FixedOffset(0) |
96 |
| - |
97 |
| - def test_parsing_different_timezone_offsets(self): |
98 |
| - # GH 17697 |
99 |
| - data = ["2015-11-18 15:30:00+05:30", "2015-11-18 15:30:00+06:30"] |
100 |
| - data = np.array(data, dtype=object) |
101 |
| - result, result_tz = tslib.array_to_datetime(data) |
102 |
| - expected = np.array([datetime(2015, 11, 18, 15, 30, |
103 |
| - tzinfo=tzoffset(None, 19800)), |
104 |
| - datetime(2015, 11, 18, 15, 30, |
105 |
| - tzinfo=tzoffset(None, 23400))], |
106 |
| - dtype=object) |
107 |
| - tm.assert_numpy_array_equal(result, expected) |
108 |
| - assert result_tz is None |
109 |
| - |
110 |
| - def test_number_looking_strings_not_into_datetime(self): |
111 |
| - # GH#4601 |
112 |
| - # These strings don't look like datetimes so they shouldn't be |
113 |
| - # attempted to be converted |
114 |
| - arr = np.array(['-352.737091', '183.575577'], dtype=object) |
115 |
| - result, _ = tslib.array_to_datetime(arr, errors='ignore') |
116 |
| - tm.assert_numpy_array_equal(result, arr) |
117 | 114 |
|
118 |
| - arr = np.array(['1', '2', '3', '4', '5'], dtype=object) |
119 |
| - result, _ = tslib.array_to_datetime(arr, errors='ignore') |
120 |
| - tm.assert_numpy_array_equal(result, arr) |
| 115 | +def test_coerce_outside_ns_bounds_one_valid(): |
| 116 | + arr = np.array(["1/1/1000", "1/1/2000"], dtype=object) |
| 117 | + result, _ = tslib.array_to_datetime(arr, errors="coerce") |
121 | 118 |
|
122 |
| - @pytest.mark.parametrize('invalid_date', [ |
123 |
| - date(1000, 1, 1), |
124 |
| - datetime(1000, 1, 1), |
125 |
| - '1000-01-01', |
126 |
| - 'Jan 1, 1000', |
127 |
| - np.datetime64('1000-01-01')]) |
128 |
| - def test_coerce_outside_ns_bounds(self, invalid_date): |
129 |
| - arr = np.array([invalid_date], dtype='object') |
130 |
| - with pytest.raises(ValueError): |
131 |
| - tslib.array_to_datetime(arr, errors='raise') |
132 |
| - |
133 |
| - result, _ = tslib.array_to_datetime(arr, errors='coerce') |
134 |
| - expected = np.array([iNaT], dtype='M8[ns]') |
135 |
| - tm.assert_numpy_array_equal(result, expected) |
| 119 | + expected = [iNaT, "2000-01-01T00:00:00.000000000-0000"] |
| 120 | + expected = np_array_datetime64_compat(expected, dtype="M8[ns]") |
136 | 121 |
|
137 |
| - def test_coerce_outside_ns_bounds_one_valid(self): |
138 |
| - arr = np.array(['1/1/1000', '1/1/2000'], dtype=object) |
139 |
| - result, _ = tslib.array_to_datetime(arr, errors='coerce') |
140 |
| - expected = [iNaT, |
141 |
| - '2000-01-01T00:00:00.000000000-0000'] |
142 |
| - tm.assert_numpy_array_equal( |
143 |
| - result, |
144 |
| - np_array_datetime64_compat(expected, dtype='M8[ns]')) |
| 122 | + tm.assert_numpy_array_equal(result, expected) |
145 | 123 |
|
146 |
| - def test_coerce_of_invalid_datetimes(self): |
147 |
| - arr = np.array(['01-01-2013', 'not_a_date', '1'], dtype=object) |
148 | 124 |
|
149 |
| - # Without coercing, the presence of any invalid dates prevents |
150 |
| - # any values from being converted |
151 |
| - result, _ = tslib.array_to_datetime(arr, errors='ignore') |
152 |
| - tm.assert_numpy_array_equal(result, arr) |
| 125 | +@pytest.mark.parametrize("errors", ["ignore", "coerce"]) |
| 126 | +def test_coerce_of_invalid_datetimes(errors): |
| 127 | + arr = np.array(["01-01-2013", "not_a_date", "1"], dtype=object) |
| 128 | + kwargs = dict(values=arr, errors=errors) |
153 | 129 |
|
| 130 | + if errors == "ignore": |
| 131 | + # Without coercing, the presence of any invalid |
| 132 | + # dates prevents any values from being converted. |
| 133 | + result, _ = tslib.array_to_datetime(**kwargs) |
| 134 | + tm.assert_numpy_array_equal(result, arr) |
| 135 | + else: # coerce. |
154 | 136 | # With coercing, the invalid dates becomes iNaT
|
155 |
| - result, _ = tslib.array_to_datetime(arr, errors='coerce') |
156 |
| - expected = ['2013-01-01T00:00:00.000000000-0000', |
| 137 | + result, _ = tslib.array_to_datetime(arr, errors="coerce") |
| 138 | + expected = ["2013-01-01T00:00:00.000000000-0000", |
157 | 139 | iNaT,
|
158 | 140 | iNaT]
|
159 | 141 |
|
160 | 142 | tm.assert_numpy_array_equal(
|
161 | 143 | result,
|
162 |
| - np_array_datetime64_compat(expected, dtype='M8[ns]')) |
163 |
| - |
164 |
| - def test_to_datetime_barely_out_of_bounds(self): |
165 |
| - # GH#19529 |
166 |
| - # GH#19382 close enough to bounds that dropping nanos would result |
167 |
| - # in an in-bounds datetime |
168 |
| - arr = np.array(['2262-04-11 23:47:16.854775808'], dtype=object) |
169 |
| - with pytest.raises(tslib.OutOfBoundsDatetime): |
170 |
| - tslib.array_to_datetime(arr) |
| 144 | + np_array_datetime64_compat(expected, dtype="M8[ns]")) |
| 145 | + |
| 146 | + |
| 147 | +def test_to_datetime_barely_out_of_bounds(): |
| 148 | + # see gh-19382, gh-19529 |
| 149 | + # |
| 150 | + # Close enough to bounds that dropping nanos |
| 151 | + # would result in an in-bounds datetime. |
| 152 | + arr = np.array(["2262-04-11 23:47:16.854775808"], dtype=object) |
| 153 | + msg = "Out of bounds nanosecond timestamp: 2262-04-11 23:47:16" |
| 154 | + |
| 155 | + with pytest.raises(tslib.OutOfBoundsDatetime, match=msg): |
| 156 | + tslib.array_to_datetime(arr) |
0 commit comments