Skip to content

BUG: read_json converted date strings with Z to UTC #26170

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
May 7, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/source/whatsnew/v0.25.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,7 @@ Timezones
- Bug in :func:`Series.at` where setting :class:`Timestamp` with timezone raises ``TypeError`` (:issue:`25506`)
- Bug in :func:`DataFrame.update` when updating with timezone aware data would return timezone naive data (:issue:`25807`)
- Bug in :func:`to_datetime` where an uninformative ``RuntimeError`` was raised when passing a naive :class:`Timestamp` with datetime strings with mixed UTC offsets (:issue:`25978`)
- Bug in :func:`to_datetime` with ``unit='ns'`` would drop timezone information from the parsed argument (:issue:`26168`)

Numeric
^^^^^^^
Expand Down Expand Up @@ -368,6 +369,7 @@ I/O
- Improved :meth:`pandas.read_stata` and :class:`pandas.io.stata.StataReader` to read incorrectly formatted 118 format files saved by Stata (:issue:`25960`)
- Fixed bug in loading objects from S3 that contain ``#`` characters in the URL (:issue:`25945`)
- Adds ``use_bqstorage_api`` parameter to :func:`read_gbq` to speed up downloads of large data frames. This feature requires version 0.10.0 of the ``pandas-gbq`` library as well as the ``google-cloud-bigquery-storage`` and ``fastavro`` libraries. (:issue:`26104`)
- Bug in :func:`read_json` where date strings with ``Z`` were not converted to a UTC timezone (:issue:`26168`)

Plotting
^^^^^^^^
Expand Down
2 changes: 1 addition & 1 deletion pandas/_libs/tslib.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,7 @@ def array_with_unit_to_datetime(ndarray values, object unit,
if unit == 'ns':
if issubclass(values.dtype.type, np.integer):
return values.astype('M8[ns]')
return array_to_datetime(values.astype(object), errors=errors)[0]
return array_to_datetime(values.astype(object), errors=errors)

m = cast_from_unit(None, unit)

Expand Down
24 changes: 18 additions & 6 deletions pandas/core/tools/datetimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,19 +200,31 @@ def _convert_listlike_datetimes(arg, box, format, name=None, tz=None,
if format is not None:
raise ValueError("cannot specify both format and unit")
arg = getattr(arg, 'values', arg)
tz_parsed = None
result = tslib.array_with_unit_to_datetime(arg, unit,
errors=errors)
# GH 25546:
# Capture tz information from array_with_unit_to_datetime
if isinstance(result, tuple):
result, tz_parsed = result
if box:
if errors == 'ignore':
from pandas import Index
result = Index(result, name=name)
# GH 23758: We may still need to localize the result with tz
try:
return result.tz_localize(tz)
except AttributeError:
if not isinstance(result, DatetimeIndex):
return result

return DatetimeIndex(result, tz=tz, name=name)
else:
result = DatetimeIndex(result, name=name)
# GH 23758: We may still need to localize the result with tz
# Apply tz_parsed first (from arg) and then tz (from caller)
if tz_parsed is not None:
# result will be naive but in UTC
result = result.tz_localize('UTC').tz_convert(tz_parsed)
if tz is not None:
if result.tz is None:
result = result.tz_localize(tz)
else:
result = result.tz_convert(tz)
return result
elif getattr(arg, 'ndim', 1) > 1:
raise TypeError('arg must be a string, datetime, list, tuple, '
Expand Down
13 changes: 13 additions & 0 deletions pandas/tests/indexes/datetimes/test_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -1968,3 +1968,16 @@ def test_processing_order(self):
result = pd.to_datetime(300 * 365, unit='D', origin='1870-01-01')
expected = Timestamp('2169-10-20 00:00:00')
assert result == expected

@pytest.mark.parametrize('offset,utc,exp', [
["Z", True, "2019-01-01T00:00:00.000Z"],
["Z", None, "2019-01-01T00:00:00.000Z"],
["-01:00", True, "2019-01-01T01:00:00.000Z"],
["-01:00", None, "2019-01-01T00:00:00.000-01:00"],
])
def test_arg_tz_ns_unit(self, offset, utc, exp):
# GH 25546
arg = "2019-01-01T00:00:00.000" + offset
result = to_datetime([arg], unit='ns', utc=utc)
expected = to_datetime([exp])
tm.assert_index_equal(result, expected)
29 changes: 24 additions & 5 deletions pandas/tests/io/json/test_pandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -763,7 +763,10 @@ def test_w_date(date, date_unit=None):
else:
json = df.to_json(date_format='iso')
result = read_json(json)
assert_frame_equal(result, df)
expected = df.copy()
expected.index = expected.index.tz_localize('UTC')
expected['date'] = expected['date'].dt.tz_localize('UTC')
assert_frame_equal(result, expected)

test_w_date('20130101 20:43:42.123')
test_w_date('20130101 20:43:42', date_unit='s')
Expand All @@ -785,7 +788,10 @@ def test_w_date(date, date_unit=None):
else:
json = ts.to_json(date_format='iso')
result = read_json(json, typ='series')
assert_series_equal(result, ts)
expected = ts.copy()
expected.index = expected.index.tz_localize('UTC')
expected = expected.dt.tz_localize('UTC')
assert_series_equal(result, expected)

test_w_date('20130101 20:43:42.123')
test_w_date('20130101 20:43:42', date_unit='s')
Expand Down Expand Up @@ -881,11 +887,15 @@ def test_round_trip_exception_(self):

@network
@pytest.mark.single
def test_url(self):
@pytest.mark.parametrize('field,dtype', [
['created_at', pd.DatetimeTZDtype(tz='UTC')],
['closed_at', 'datetime64[ns]'],
['updated_at', pd.DatetimeTZDtype(tz='UTC')]
])
def test_url(self, field, dtype):
url = 'https://api.github.com/repos/pandas-dev/pandas/issues?per_page=5' # noqa
result = read_json(url, convert_dates=True)
for c in ['created_at', 'closed_at', 'updated_at']:
assert result[c].dtype == 'datetime64[ns]'
assert result[field].dtype == dtype

def test_timedelta(self):
converter = lambda x: pd.to_timedelta(x, unit='ms')
Expand Down Expand Up @@ -1299,3 +1309,12 @@ def test_index_false_from_json_to_json(self, orient, index):
dfjson = expected.to_json(orient=orient, index=index)
result = read_json(dfjson, orient=orient)
assert_frame_equal(result, expected)

def test_read_timezone_information(self):
# GH 25546
result = read_json('{"2019-01-01T11:00:00.000Z":88}',
typ='series', orient='index')
expected = Series([88],
index=DatetimeIndex(['2019-01-01 11:00:00'],
tz='UTC'))
assert_series_equal(result, expected)