From 2b62da0b99d4b63498153e0cd8774621f1a651b6 Mon Sep 17 00:00:00 2001 From: suzutomato Date: Mon, 16 Dec 2019 22:54:24 -0800 Subject: [PATCH 01/10] BUG: func 'to_pickle' and 'read_pickle' where not accepting URL GH30299 --- doc/source/whatsnew/v1.0.0.rst | 2 +- pandas/io/pickle.py | 65 +++++++++++++++++++++++++--------- 2 files changed, 50 insertions(+), 17 deletions(-) diff --git a/doc/source/whatsnew/v1.0.0.rst b/doc/source/whatsnew/v1.0.0.rst index c288a008777cf..7c754effacdc3 100755 --- a/doc/source/whatsnew/v1.0.0.rst +++ b/doc/source/whatsnew/v1.0.0.rst @@ -797,7 +797,7 @@ I/O - Bug in :func:`read_json` where default encoding was not set to ``utf-8`` (:issue:`29565`) - Bug in :class:`PythonParser` where str and bytes were being mixed when dealing with the decimal field (:issue:`29650`) - :meth:`read_gbq` now accepts ``progress_bar_type`` to display progress bar while the data downloads. (:issue:`29857`) -- +- Bug in :func: 'to_pickle' and :func: 'read_pickle' where not accepting URL (:issue:'30299') Plotting ^^^^^^^^ diff --git a/pandas/io/pickle.py b/pandas/io/pickle.py index 0a0ccedd78f00..51d9019976134 100644 --- a/pandas/io/pickle.py +++ b/pandas/io/pickle.py @@ -1,13 +1,24 @@ """ pickle compat """ import pickle +from typing import Any, Optional import warnings from pandas.compat import pickle_compat as pc -from pandas.io.common import _get_handle, _stringify_path +from pandas._typing import FilePathOrBuffer +from pandas.io.common import ( + _get_handle, + get_filepath_or_buffer as _get_filepath_or_buffer, +) -def to_pickle(obj, path, compression="infer", protocol=pickle.HIGHEST_PROTOCOL): + +def to_pickle( + obj: Any, + filepath_or_buffer: FilePathOrBuffer, + compression: Optional[str] = "infer", + protocol: int = pickle.HIGHEST_PROTOCOL, +): """ Pickle (serialize) object to file. @@ -16,10 +27,12 @@ def to_pickle(obj, path, compression="infer", protocol=pickle.HIGHEST_PROTOCOL): obj : any object Any python object. path : str - File path where the pickled object will be stored. + File path, URL, or buffer where the pickled object will be stored. compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' - A string representing the compression to use in the output file. By - default, infers from the file extension in specified path. + If 'infer' and 'path_or_url' is path-like, then detect compression from + the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no + compression) If 'infer' and 'path_or_url' is not path-like, then use + None (= no decompression). protocol : int Int which indicates which protocol should be used by the pickler, default HIGHEST_PROTOCOL (see [1], paragraph 12.1.2). The possible @@ -63,8 +76,12 @@ def to_pickle(obj, path, compression="infer", protocol=pickle.HIGHEST_PROTOCOL): >>> import os >>> os.remove("./dummy.pkl") """ - path = _stringify_path(path) - f, fh = _get_handle(path, "wb", compression=compression, is_text=False) + fp_or_buf, _, compression, should_close = _get_filepath_or_buffer( + filepath_or_buffer, compression=compression, mode="wb" + ) + if not isinstance(fp_or_buf, str) and compression == "infer": + compression = None + f, fh = _get_handle(fp_or_buf, "wb", compression=compression, is_text=False) if protocol < 0: protocol = pickle.HIGHEST_PROTOCOL try: @@ -73,9 +90,16 @@ def to_pickle(obj, path, compression="infer", protocol=pickle.HIGHEST_PROTOCOL): f.close() for _f in fh: _f.close() + if should_close: + try: + fp_or_buf.close() + except ValueError: + pass -def read_pickle(path, compression="infer"): +def read_pickle( + filepath_or_buffer: FilePathOrBuffer, compression: Optional[str] = "infer" +): """ Load pickled pandas object (or any object) from file. @@ -86,13 +110,13 @@ def read_pickle(path, compression="infer"): Parameters ---------- - path : str - File path where the pickled object will be loaded. + filepath_or_buffer : str + File path, URL, or buffer where the pickled object will be loaded from. compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' - For on-the-fly decompression of on-disk data. If 'infer', then use - gzip, bz2, xz or zip if path ends in '.gz', '.bz2', '.xz', - or '.zip' respectively, and no decompression otherwise. - Set to None for no decompression. + If 'infer' and 'path_or_url' is path-like, then detect compression from + the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no + compression) If 'infer' and 'path_or_url' is not path-like, then use + None (= no decompression). Returns ------- @@ -134,8 +158,12 @@ def read_pickle(path, compression="infer"): >>> import os >>> os.remove("./dummy.pkl") """ - path = _stringify_path(path) - f, fh = _get_handle(path, "rb", compression=compression, is_text=False) + fp_or_buf, _, compression, should_close = _get_filepath_or_buffer( + filepath_or_buffer, compression=compression + ) + if not isinstance(fp_or_buf, str) and compression == "infer": + compression = None + f, fh = _get_handle(fp_or_buf, "rb", compression=compression, is_text=False) # 1) try standard library Pickle # 2) try pickle_compat (older pandas version) to handle subclass changes @@ -159,3 +187,8 @@ def read_pickle(path, compression="infer"): f.close() for _f in fh: _f.close() + if should_close: + try: + fp_or_buf.close() + except ValueError: + pass From e5b8edd4fdb74763ca725fb1c56f0225afd2ee7d Mon Sep 17 00:00:00 2001 From: suzutomato Date: Wed, 25 Dec 2019 01:34:30 -0800 Subject: [PATCH 02/10] Update pickle.py to accept URLs --- pandas/io/pickle.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/pandas/io/pickle.py b/pandas/io/pickle.py index 51d9019976134..4e043077dca58 100644 --- a/pandas/io/pickle.py +++ b/pandas/io/pickle.py @@ -7,10 +7,7 @@ from pandas._typing import FilePathOrBuffer -from pandas.io.common import ( - _get_handle, - get_filepath_or_buffer as _get_filepath_or_buffer, -) +from pandas.io.common import _get_handle, get_filepath_or_buffer def to_pickle( @@ -26,8 +23,9 @@ def to_pickle( ---------- obj : any object Any python object. - path : str + filepath_or_buffer : str, path object or file-like object File path, URL, or buffer where the pickled object will be stored. + URL has to be either of S3 or GCS. compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' If 'infer' and 'path_or_url' is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no @@ -76,7 +74,7 @@ def to_pickle( >>> import os >>> os.remove("./dummy.pkl") """ - fp_or_buf, _, compression, should_close = _get_filepath_or_buffer( + fp_or_buf, _, compression, should_close = get_filepath_or_buffer( filepath_or_buffer, compression=compression, mode="wb" ) if not isinstance(fp_or_buf, str) and compression == "infer": @@ -110,8 +108,9 @@ def read_pickle( Parameters ---------- - filepath_or_buffer : str + filepath_or_buffer : str, path object or file-like object File path, URL, or buffer where the pickled object will be loaded from. + URL is not limited to S3 and GCS. compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' If 'infer' and 'path_or_url' is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no @@ -158,7 +157,7 @@ def read_pickle( >>> import os >>> os.remove("./dummy.pkl") """ - fp_or_buf, _, compression, should_close = _get_filepath_or_buffer( + fp_or_buf, _, compression, should_close = get_filepath_or_buffer( filepath_or_buffer, compression=compression ) if not isinstance(fp_or_buf, str) and compression == "infer": From 3de08a11b27e048d9edf95b0f114c0665540137d Mon Sep 17 00:00:00 2001 From: suzutomato Date: Wed, 25 Dec 2019 01:35:03 -0800 Subject: [PATCH 03/10] Add tests for Buffer and URL I/O --- pandas/tests/io/test_pickle.py | 90 ++++++++++++++++++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/pandas/tests/io/test_pickle.py b/pandas/tests/io/test_pickle.py index 3be966edef080..1592a306bce38 100644 --- a/pandas/tests/io/test_pickle.py +++ b/pandas/tests/io/test_pickle.py @@ -22,6 +22,7 @@ import pytest from pandas.compat import _get_lzma_file, _import_lzma, is_platform_little_endian +import pandas.util._test_decorators as td import pandas as pd from pandas import Index @@ -390,3 +391,92 @@ def test_unicode_decode_error(): # just test the columns are correct since the values are random excols = pd.Index(["a", "b", "c"]) tm.assert_index_equal(df.columns, excols) + + +# --------------------- +# tests for buffer I/O +# --------------------- + + +def test_pickle_buffer_roundtrip(): + with tm.ensure_clean() as path: + df = tm.makeDataFrame() + df.to_pickle(open(path, "wb")) + result = pd.read_pickle(open(path, "rb")) + tm.assert_frame_equal(df, result) + + +# --------------------- +# tests for URL I/O +# --------------------- + + +@pytest.mark.parametrize("mockurl", ["http://url.com", "ftp://test.com"]) +def test_pickle_generalurl_read(monkeypatch, mockurl): + def python_pickler(obj, path): + with open(path, "wb") as fh: + pickle.dump(obj, fh, protocol=-1) + + class MockReadResponse: + def __init__(self, path): + self.file = open(path, "rb") + self.headers = {"Content-Encoding": None} + + def read(self): + return self.file.read() + + def close(self): + return self.file.close() + + with tm.ensure_clean() as path: + + def mock_urlopen_read(*args, **kwargs): + return MockReadResponse(path) + + df = tm.makeDataFrame() + python_pickler(df, path) + monkeypatch.setattr("urllib.request.urlopen", mock_urlopen_read) + result = pd.read_pickle(mockurl) + tm.assert_frame_equal(df, result) + + +@td.skip_if_no("gcsfs") +@pytest.mark.parametrize("mockurl", ["gs://gcs.com", "gcs://gcs.com"]) +def test_pickle_gcsurl_roundtrip(monkeypatch, mockurl): + with tm.ensure_clean() as path: + + class MockGCSFileSystem: + def __init__(self, *args, **kwargs): + pass + + def open(self, *args): + mode = args[1] or None + f = open(path, mode) + return f + + monkeypatch.setattr("gcsfs.GCSFileSystem", MockGCSFileSystem) + df = tm.makeDataFrame() + df.to_pickle(mockurl) + result = pd.read_pickle(mockurl) + tm.assert_frame_equal(df, result) + + +@td.skip_if_no("s3fs") +@pytest.mark.parametrize("mockurl", ["s3://s3.com", "s3n://s3.com", "s3a://s3.com"]) +def test_pickle_s3url_roundtrip(monkeypatch, mockurl): + with tm.ensure_clean() as path: + + class MockS3FileSystem: + def __init__(self, *args, **kwargs): + pass + + def open(self, *args): + mode = args[1] or None + f = open(path, mode) + return f + + monkeypatch.setattr("s3fs.S3FileSystem", MockS3FileSystem) + df = tm.makeDataFrame() + df.to_pickle(mockurl) + result = pd.read_pickle(mockurl) + tm.assert_frame_equal(df, result) From 325170d5299afc7c3f56f9155400cf202cb8c664 Mon Sep 17 00:00:00 2001 From: suzutomato Date: Wed, 25 Dec 2019 01:35:38 -0800 Subject: [PATCH 04/10] Update what's new for pickle updates --- doc/source/whatsnew/v1.0.0.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/source/whatsnew/v1.0.0.rst b/doc/source/whatsnew/v1.0.0.rst index 7c754effacdc3..7c27af4a74f01 100755 --- a/doc/source/whatsnew/v1.0.0.rst +++ b/doc/source/whatsnew/v1.0.0.rst @@ -681,6 +681,7 @@ Categorical same type as if one used the :meth:`.str.` / :meth:`.dt.` on a :class:`Series` of that type. E.g. when accessing :meth:`Series.dt.tz_localize` on a :class:`Categorical` with duplicate entries, the accessor was skipping duplicates (:issue:`27952`) - Bug in :meth:`DataFrame.replace` and :meth:`Series.replace` that would give incorrect results on categorical data (:issue:`26988`) +- Bug where calling :meth:`Categorical.min` or :meth:`Categorical.max` on an empty Categorical would raise a numpy exception (:issue:`30227`) Datetimelike @@ -797,7 +798,7 @@ I/O - Bug in :func:`read_json` where default encoding was not set to ``utf-8`` (:issue:`29565`) - Bug in :class:`PythonParser` where str and bytes were being mixed when dealing with the decimal field (:issue:`29650`) - :meth:`read_gbq` now accepts ``progress_bar_type`` to display progress bar while the data downloads. (:issue:`29857`) -- Bug in :func: 'to_pickle' and :func: 'read_pickle' where not accepting URL (:issue:'30299') +- Bug in :func: 'to_pickle' and :func: 'read_pickle' where not accepting URL (:issue:'30163') Plotting ^^^^^^^^ @@ -862,7 +863,7 @@ ExtensionArray - Bug in :class:`arrays.PandasArray` when setting a scalar string (:issue:`28118`, :issue:`28150`). - Bug where nullable integers could not be compared to strings (:issue:`28930`) -- +- Bug where :class:`DataFrame` constructor raised ValueError with list-like data and ``dtype`` specified (:issue:`30280`) Other From 4c0c1cb96d2bc43d3a0ca8515d235970ca0afc28 Mon Sep 17 00:00:00 2001 From: suzutomato Date: Wed, 25 Dec 2019 11:43:17 -0800 Subject: [PATCH 05/10] Update type hints in util/testing to align with pickle.py --- pandas/util/testing.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pandas/util/testing.py b/pandas/util/testing.py index c31cddc102afb..dc2be33937d7e 100644 --- a/pandas/util/testing.py +++ b/pandas/util/testing.py @@ -8,7 +8,7 @@ from shutil import rmtree import string import tempfile -from typing import List, Optional, Union, cast +from typing import Any, List, Optional, Union, cast import warnings import zipfile @@ -22,7 +22,7 @@ ) import pandas._libs.testing as _testing -from pandas._typing import FrameOrSeries +from pandas._typing import FilePathOrBuffer from pandas.compat import _get_lzma_file, _import_lzma from pandas.core.dtypes.common import ( @@ -101,15 +101,15 @@ def reset_display_options(): pd.reset_option("^display.", silent=True) -def round_trip_pickle(obj: FrameOrSeries, path: Optional[str] = None) -> FrameOrSeries: +def round_trip_pickle(obj: Any, path: FilePathOrBuffer = ""): """ Pickle an object and then read it again. Parameters ---------- - obj : pandas object + obj : any object The object to pickle and then re-read. - path : str, default None + path : str, path object or file-like object, default None The path where the pickled object is written and then read. Returns @@ -117,7 +117,7 @@ def round_trip_pickle(obj: FrameOrSeries, path: Optional[str] = None) -> FrameOr pandas object The original object that was pickled and then re-read. """ - if path is None: + if not path: path = f"__{rands(10)}__.pickle" with ensure_clean(path) as path: pd.to_pickle(obj, path) From dac735803683ecba6954072594942c6f0fde8841 Mon Sep 17 00:00:00 2001 From: suzutomato Date: Wed, 25 Dec 2019 11:58:52 -0800 Subject: [PATCH 06/10] Add an internal var to testing.py to negate type hint issue --- pandas/util/testing.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/pandas/util/testing.py b/pandas/util/testing.py index dc2be33937d7e..4a88becea24bf 100644 --- a/pandas/util/testing.py +++ b/pandas/util/testing.py @@ -22,7 +22,7 @@ ) import pandas._libs.testing as _testing -from pandas._typing import FilePathOrBuffer +from pandas._typing import FilePathOrBuffer, FrameOrSeries from pandas.compat import _get_lzma_file, _import_lzma from pandas.core.dtypes.common import ( @@ -101,7 +101,9 @@ def reset_display_options(): pd.reset_option("^display.", silent=True) -def round_trip_pickle(obj: Any, path: FilePathOrBuffer = ""): +def round_trip_pickle( + obj: Any, path: Optional[FilePathOrBuffer] = None +) -> FrameOrSeries: """ Pickle an object and then read it again. @@ -117,11 +119,12 @@ def round_trip_pickle(obj: Any, path: FilePathOrBuffer = ""): pandas object The original object that was pickled and then re-read. """ - if not path: - path = f"__{rands(10)}__.pickle" - with ensure_clean(path) as path: - pd.to_pickle(obj, path) - return pd.read_pickle(path) + _path = path + if _path is None: + _path = f"__{rands(10)}__.pickle" + with ensure_clean(_path) as path: + pd.to_pickle(obj, _path) + return pd.read_pickle(_path) def round_trip_pathlib(writer, reader, path: Optional[str] = None): From 4362af9d8f3f2450fae49fe53a38b7cc1f4e2acd Mon Sep 17 00:00:00 2001 From: suzutomato Date: Sat, 4 Jan 2020 14:25:55 -0800 Subject: [PATCH 07/10] Use with for open() in test_pickle.py, add versionchanged directives to pickle.py, move the updates to other enhancement in v1.0.0.rst --- doc/source/whatsnew/v1.0.0.rst | 3 +-- pandas/io/pickle.py | 6 ++++-- pandas/tests/io/test_pickle.py | 6 ++++-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/doc/source/whatsnew/v1.0.0.rst b/doc/source/whatsnew/v1.0.0.rst index 10a475e91ed16..639ef658c3ce3 100755 --- a/doc/source/whatsnew/v1.0.0.rst +++ b/doc/source/whatsnew/v1.0.0.rst @@ -227,7 +227,7 @@ Other enhancements - Added new writer for exporting Stata dta files in version 118, ``StataWriter118``. This format supports exporting strings containing Unicode characters (:issue:`23573`) - :meth:`Series.map` now accepts ``collections.abc.Mapping`` subclasses as a mapper (:issue:`29733`) - The ``pandas.datetime`` class is now deprecated. Import from ``datetime`` instead (:issue:`30296`) - +- :func: 'to_pickle' and :func: 'read_pickle' now accept URL (:issue:'30163') Build Changes @@ -934,7 +934,6 @@ I/O - :meth:`read_gbq` now accepts ``progress_bar_type`` to display progress bar while the data downloads. (:issue:`29857`) - Bug in :func:`pandas.io.json.json_normalize` where a missing value in the location specified by `record_path` would raise a ``TypeError`` (:issue:`30148`) - :func:`read_excel` now accepts binary data (:issue:`15914`) -- Bug in :func: 'to_pickle' and :func: 'read_pickle' where not accepting URL (:issue:'30163') Plotting ^^^^^^^^ diff --git a/pandas/io/pickle.py b/pandas/io/pickle.py index 4c192fcc5e195..8651d09f7b40b 100644 --- a/pandas/io/pickle.py +++ b/pandas/io/pickle.py @@ -24,7 +24,8 @@ def to_pickle( Any python object. filepath_or_buffer : str, path object or file-like object File path, URL, or buffer where the pickled object will be stored. - URL has to be either of S3 or GCS. + .. versionchanged:: 1.0.0 + Accept URL. URL has to be of S3 or GCS. compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' If 'infer' and 'path_or_url' is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no @@ -109,7 +110,8 @@ def read_pickle( ---------- filepath_or_buffer : str, path object or file-like object File path, URL, or buffer where the pickled object will be loaded from. - URL is not limited to S3 and GCS. + .. versionchanged:: 1.0.0 + Accept URL. URL is not limited to S3 and GCS. compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' If 'infer' and 'path_or_url' is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no diff --git a/pandas/tests/io/test_pickle.py b/pandas/tests/io/test_pickle.py index cbf35e68351bd..9c81fba9073e0 100644 --- a/pandas/tests/io/test_pickle.py +++ b/pandas/tests/io/test_pickle.py @@ -401,8 +401,10 @@ def test_unicode_decode_error(): def test_pickle_buffer_roundtrip(): with tm.ensure_clean() as path: df = tm.makeDataFrame() - df.to_pickle(open(path, "wb")) - result = pd.read_pickle(open(path, "rb")) + with open(path, "wb") as fh: + df.to_pickle(fh) + with open(path, "rb") as fh: + result = pd.read_pickle(fh) tm.assert_frame_equal(df, result) From 839f3db1ace7970a83b12ad81e191e69797d558b Mon Sep 17 00:00:00 2001 From: Tomoyuki Suzuki Date: Sat, 4 Jan 2020 15:10:01 -0800 Subject: [PATCH 08/10] Correct quotation marks --- doc/source/whatsnew/v1.0.0.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/whatsnew/v1.0.0.rst b/doc/source/whatsnew/v1.0.0.rst index 639ef658c3ce3..22d5ff0b2c791 100755 --- a/doc/source/whatsnew/v1.0.0.rst +++ b/doc/source/whatsnew/v1.0.0.rst @@ -227,7 +227,7 @@ Other enhancements - Added new writer for exporting Stata dta files in version 118, ``StataWriter118``. This format supports exporting strings containing Unicode characters (:issue:`23573`) - :meth:`Series.map` now accepts ``collections.abc.Mapping`` subclasses as a mapper (:issue:`29733`) - The ``pandas.datetime`` class is now deprecated. Import from ``datetime`` instead (:issue:`30296`) -- :func: 'to_pickle' and :func: 'read_pickle' now accept URL (:issue:'30163') +- :func:`to_pickle` and :func:`read_pickle` now accept URL (:issue:`30163`) Build Changes From 8595fdb4f05d801081cd848f5782b9b16c146ce5 Mon Sep 17 00:00:00 2001 From: Tomoyuki Suzuki Date: Sat, 4 Jan 2020 15:56:17 -0800 Subject: [PATCH 09/10] Add line breaks around versionchanged directives --- pandas/io/pickle.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pandas/io/pickle.py b/pandas/io/pickle.py index 8651d09f7b40b..01621b88b4cc2 100644 --- a/pandas/io/pickle.py +++ b/pandas/io/pickle.py @@ -24,8 +24,10 @@ def to_pickle( Any python object. filepath_or_buffer : str, path object or file-like object File path, URL, or buffer where the pickled object will be stored. + .. versionchanged:: 1.0.0 Accept URL. URL has to be of S3 or GCS. + compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' If 'infer' and 'path_or_url' is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no @@ -110,8 +112,10 @@ def read_pickle( ---------- filepath_or_buffer : str, path object or file-like object File path, URL, or buffer where the pickled object will be loaded from. + .. versionchanged:: 1.0.0 Accept URL. URL is not limited to S3 and GCS. + compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' If 'infer' and 'path_or_url' is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no From ed96d592e406b768ca26c014f61587e118c841a0 Mon Sep 17 00:00:00 2001 From: Tomoyuki Suzuki Date: Sat, 4 Jan 2020 16:32:33 -0800 Subject: [PATCH 10/10] Update pickle.py --- pandas/io/pickle.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pandas/io/pickle.py b/pandas/io/pickle.py index 01621b88b4cc2..e51f24b551f31 100644 --- a/pandas/io/pickle.py +++ b/pandas/io/pickle.py @@ -24,10 +24,10 @@ def to_pickle( Any python object. filepath_or_buffer : str, path object or file-like object File path, URL, or buffer where the pickled object will be stored. - + .. versionchanged:: 1.0.0 Accept URL. URL has to be of S3 or GCS. - + compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' If 'infer' and 'path_or_url' is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no @@ -112,10 +112,10 @@ def read_pickle( ---------- filepath_or_buffer : str, path object or file-like object File path, URL, or buffer where the pickled object will be loaded from. - + .. versionchanged:: 1.0.0 Accept URL. URL is not limited to S3 and GCS. - + compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, default 'infer' If 'infer' and 'path_or_url' is path-like, then detect compression from the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise no