diff --git a/doc/source/whatsnew/v2.2.0.rst b/doc/source/whatsnew/v2.2.0.rst index d8b63a6d1395d..430f61ee6827b 100644 --- a/doc/source/whatsnew/v2.2.0.rst +++ b/doc/source/whatsnew/v2.2.0.rst @@ -98,6 +98,7 @@ Deprecations - Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_json` except ``path_or_buf``. (:issue:`54229`) - Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_latex` except ``buf``. (:issue:`54229`) - Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_markdown` except ``buf``. (:issue:`54229`) +- Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_parquet` except ``path``. (:issue:`54229`) - Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_pickle` except ``path``. (:issue:`54229`) - Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_string` except ``buf``. (:issue:`54229`) - Deprecated not passing a tuple to :class:`DataFrameGroupBy.get_group` or :class:`SeriesGroupBy.get_group` when grouping by a length-1 list-like (:issue:`25971`) diff --git a/pandas/core/frame.py b/pandas/core/frame.py index 05c0db0c09376..2bbab10be45ad 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -2878,6 +2878,9 @@ def to_parquet( ) -> None: ... + @deprecate_nonkeyword_arguments( + version="3.0", allowed_args=["self", "path"], name="to_parquet" + ) @doc(storage_options=_shared_docs["storage_options"]) def to_parquet( self, diff --git a/pandas/tests/io/test_parquet.py b/pandas/tests/io/test_parquet.py index a4c6cfcf9fe0e..9182e4c4e7674 100644 --- a/pandas/tests/io/test_parquet.py +++ b/pandas/tests/io/test_parquet.py @@ -359,6 +359,20 @@ def test_cross_engine_fp_pa(df_cross_compat, pa, fp): tm.assert_frame_equal(result, df[["a", "d"]]) +def test_parquet_pos_args_deprecation(engine): + # GH-54229 + df = pd.DataFrame({"a": [1, 2, 3]}) + msg = ( + r"Starting with pandas version 3.0 all arguments of to_parquet except for the " + r"argument 'path' will be keyword-only." + ) + with tm.ensure_clean() as path: + with tm.assert_produces_warning( + FutureWarning, match=msg, check_stacklevel=False + ): + df.to_parquet(path, engine) + + class Base: def check_error_on_write(self, df, engine, exc, err_msg): # check that we are raising the exception on writing @@ -998,7 +1012,7 @@ def test_filter_row_groups(self, pa): pytest.importorskip("pyarrow") df = pd.DataFrame({"a": list(range(0, 3))}) with tm.ensure_clean() as path: - df.to_parquet(path, pa) + df.to_parquet(path, engine=pa) result = read_parquet( path, pa, filters=[("a", "==", 0)], use_legacy_dataset=False ) @@ -1011,7 +1025,7 @@ def test_read_parquet_manager(self, pa, using_array_manager): ) with tm.ensure_clean() as path: - df.to_parquet(path, pa) + df.to_parquet(path, engine=pa) result = read_parquet(path, pa) if using_array_manager: assert isinstance(result._mgr, pd.core.internals.ArrayManager) @@ -1177,7 +1191,7 @@ def test_filter_row_groups(self, fp): d = {"a": list(range(0, 3))} df = pd.DataFrame(d) with tm.ensure_clean() as path: - df.to_parquet(path, fp, compression=None, row_group_offsets=1) + df.to_parquet(path, engine=fp, compression=None, row_group_offsets=1) result = read_parquet(path, fp, filters=[("a", "==", 0)]) assert len(result) == 1