diff --git a/doc/source/whatsnew/v3.0.0.rst b/doc/source/whatsnew/v3.0.0.rst index 81c3f88f7e8ad..35be14cd033e4 100644 --- a/doc/source/whatsnew/v3.0.0.rst +++ b/doc/source/whatsnew/v3.0.0.rst @@ -107,8 +107,11 @@ Removal of prior version deprecations/changes - Removed ``DataFrameGroupBy.grouper`` and ``SeriesGroupBy.grouper`` (:issue:`56521`) - Removed ``axis`` argument from :meth:`DataFrame.groupby`, :meth:`Series.groupby`, :meth:`DataFrame.rolling`, :meth:`Series.rolling`, :meth:`DataFrame.resample`, and :meth:`Series.resample` (:issue:`51203`) - Removed ``axis`` argument from all groupby operations (:issue:`50405`) +- Removed ``pandas.io.sql.execute`` (:issue:`50185`) - Removed deprecated argument ``obj`` in :meth:`.DataFrameGroupBy.get_group` and :meth:`.SeriesGroupBy.get_group` (:issue:`53545`) - Removed the ``ArrayManager`` (:issue:`55043`) +- Removed unused arguments ``*args`` and ``**kwargs`` in :class:`Resampler` methods (:issue:`50977`) +- Unrecognized timezones when parsing strings to datetimes now raises a ``ValueError`` (:issue:`51477`) .. --------------------------------------------------------------------------- .. _whatsnew_300.performance: diff --git a/pandas/_libs/tslibs/parsing.pyx b/pandas/_libs/tslibs/parsing.pyx index d0872a509c440..1e544a9927086 100644 --- a/pandas/_libs/tslibs/parsing.pyx +++ b/pandas/_libs/tslibs/parsing.pyx @@ -733,15 +733,10 @@ cdef datetime dateutil_parse( ) elif res.tzname is not None: # e.g. "1994 Jan 15 05:16 FOO" where FOO is not recognized - # GH#18702 - warnings.warn( + # GH#18702, # GH 50235 enforced in 3.0 + raise ValueError( f'Parsed string "{timestr}" included an un-recognized timezone ' - f'"{res.tzname}". Dropping unrecognized timezones is deprecated; ' - "in a future version this will raise. Instead pass the string " - "without the timezone, then use .tz_localize to convert to a " - "recognized timezone.", - FutureWarning, - stacklevel=find_stack_level() + f'"{res.tzname}".' ) out_bestunit[0] = attrname_to_npy_unit[reso] diff --git a/pandas/core/resample.py b/pandas/core/resample.py index bf5b7e5906180..8c65020e38a6d 100644 --- a/pandas/core/resample.py +++ b/pandas/core/resample.py @@ -27,7 +27,6 @@ ) from pandas._libs.tslibs.dtypes import freq_to_period_freqstr from pandas._typing import NDFrameT -from pandas.compat.numpy import function as nv from pandas.errors import AbstractMethodError from pandas.util._decorators import ( Appender, @@ -1156,8 +1155,6 @@ def sum( self, numeric_only: bool = False, min_count: int = 0, - *args, - **kwargs, ): """ Compute sum of group values. @@ -1195,8 +1192,6 @@ def sum( 2023-02-01 7 Freq: MS, dtype: int64 """ - maybe_warn_args_and_kwargs(type(self), "sum", args, kwargs) - nv.validate_resampler_func("sum", args, kwargs) return self._downsample("sum", numeric_only=numeric_only, min_count=min_count) @final @@ -1204,8 +1199,6 @@ def prod( self, numeric_only: bool = False, min_count: int = 0, - *args, - **kwargs, ): """ Compute prod of group values. @@ -1243,8 +1236,6 @@ def prod( 2023-02-01 12 Freq: MS, dtype: int64 """ - maybe_warn_args_and_kwargs(type(self), "prod", args, kwargs) - nv.validate_resampler_func("prod", args, kwargs) return self._downsample("prod", numeric_only=numeric_only, min_count=min_count) @final @@ -1252,8 +1243,6 @@ def min( self, numeric_only: bool = False, min_count: int = 0, - *args, - **kwargs, ): """ Compute min value of group. @@ -1277,9 +1266,6 @@ def min( 2023-02-01 3 Freq: MS, dtype: int64 """ - - maybe_warn_args_and_kwargs(type(self), "min", args, kwargs) - nv.validate_resampler_func("min", args, kwargs) return self._downsample("min", numeric_only=numeric_only, min_count=min_count) @final @@ -1287,8 +1273,6 @@ def max( self, numeric_only: bool = False, min_count: int = 0, - *args, - **kwargs, ): """ Compute max value of group. @@ -1312,8 +1296,6 @@ def max( 2023-02-01 4 Freq: MS, dtype: int64 """ - maybe_warn_args_and_kwargs(type(self), "max", args, kwargs) - nv.validate_resampler_func("max", args, kwargs) return self._downsample("max", numeric_only=numeric_only, min_count=min_count) @final @@ -1323,11 +1305,7 @@ def first( numeric_only: bool = False, min_count: int = 0, skipna: bool = True, - *args, - **kwargs, ): - maybe_warn_args_and_kwargs(type(self), "first", args, kwargs) - nv.validate_resampler_func("first", args, kwargs) return self._downsample( "first", numeric_only=numeric_only, min_count=min_count, skipna=skipna ) @@ -1339,28 +1317,20 @@ def last( numeric_only: bool = False, min_count: int = 0, skipna: bool = True, - *args, - **kwargs, ): - maybe_warn_args_and_kwargs(type(self), "last", args, kwargs) - nv.validate_resampler_func("last", args, kwargs) return self._downsample( "last", numeric_only=numeric_only, min_count=min_count, skipna=skipna ) @final @doc(GroupBy.median) - def median(self, numeric_only: bool = False, *args, **kwargs): - maybe_warn_args_and_kwargs(type(self), "median", args, kwargs) - nv.validate_resampler_func("median", args, kwargs) + def median(self, numeric_only: bool = False): return self._downsample("median", numeric_only=numeric_only) @final def mean( self, numeric_only: bool = False, - *args, - **kwargs, ): """ Compute mean of groups, excluding missing values. @@ -1395,8 +1365,6 @@ def mean( 2023-02-01 3.5 Freq: MS, dtype: float64 """ - maybe_warn_args_and_kwargs(type(self), "mean", args, kwargs) - nv.validate_resampler_func("mean", args, kwargs) return self._downsample("mean", numeric_only=numeric_only) @final @@ -1404,8 +1372,6 @@ def std( self, ddof: int = 1, numeric_only: bool = False, - *args, - **kwargs, ): """ Compute standard deviation of groups, excluding missing values. @@ -1443,8 +1409,6 @@ def std( 2023-02-01 2.645751 Freq: MS, dtype: float64 """ - maybe_warn_args_and_kwargs(type(self), "std", args, kwargs) - nv.validate_resampler_func("std", args, kwargs) return self._downsample("std", ddof=ddof, numeric_only=numeric_only) @final @@ -1452,8 +1416,6 @@ def var( self, ddof: int = 1, numeric_only: bool = False, - *args, - **kwargs, ): """ Compute variance of groups, excluding missing values. @@ -1497,8 +1459,6 @@ def var( 2023-02-01 4.666667 Freq: MS, dtype: float64 """ - maybe_warn_args_and_kwargs(type(self), "var", args, kwargs) - nv.validate_resampler_func("var", args, kwargs) return self._downsample("var", ddof=ddof, numeric_only=numeric_only) @final @@ -1507,23 +1467,12 @@ def sem( self, ddof: int = 1, numeric_only: bool = False, - *args, - **kwargs, ): - maybe_warn_args_and_kwargs(type(self), "sem", args, kwargs) - nv.validate_resampler_func("sem", args, kwargs) return self._downsample("sem", ddof=ddof, numeric_only=numeric_only) @final @doc(GroupBy.ohlc) - def ohlc( - self, - *args, - **kwargs, - ): - maybe_warn_args_and_kwargs(type(self), "ohlc", args, kwargs) - nv.validate_resampler_func("ohlc", args, kwargs) - + def ohlc(self): ax = self.ax obj = self._obj_with_exclusions if len(ax) == 0: @@ -1544,13 +1493,7 @@ def ohlc( @final @doc(SeriesGroupBy.nunique) - def nunique( - self, - *args, - **kwargs, - ): - maybe_warn_args_and_kwargs(type(self), "nunique", args, kwargs) - nv.validate_resampler_func("nunique", args, kwargs) + def nunique(self): return self._downsample("nunique") @final @@ -2874,40 +2817,6 @@ def _asfreq_compat(index: DatetimeIndex | PeriodIndex | TimedeltaIndex, freq): return new_index -def maybe_warn_args_and_kwargs(cls, kernel: str, args, kwargs) -> None: - """ - Warn for deprecation of args and kwargs in resample functions. - - Parameters - ---------- - cls : type - Class to warn about. - kernel : str - Operation name. - args : tuple or None - args passed by user. Will be None if and only if kernel does not have args. - kwargs : dict or None - kwargs passed by user. Will be None if and only if kernel does not have kwargs. - """ - warn_args = args is not None and len(args) > 0 - warn_kwargs = kwargs is not None and len(kwargs) > 0 - if warn_args and warn_kwargs: - msg = "args and kwargs" - elif warn_args: - msg = "args" - elif warn_kwargs: - msg = "kwargs" - else: - return - warnings.warn( - f"Passing additional {msg} to {cls.__name__}.{kernel} has " - "no impact on the result and is deprecated. This will " - "raise a TypeError in a future version of pandas.", - category=FutureWarning, - stacklevel=find_stack_level(), - ) - - def _apply( grouped: GroupBy, how: Callable, *args, include_groups: bool, **kwargs ) -> DataFrame: diff --git a/pandas/io/sql.py b/pandas/io/sql.py index 4e0ddd0f56ba8..b4330c717d368 100644 --- a/pandas/io/sql.py +++ b/pandas/io/sql.py @@ -235,37 +235,6 @@ def _wrap_result_adbc( return df -def execute(sql, con, params=None): - """ - Execute the given SQL query using the provided connection object. - - Parameters - ---------- - sql : string - SQL query to be executed. - con : SQLAlchemy connection or sqlite3 connection - If a DBAPI2 object, only sqlite3 is supported. - params : list or tuple, optional, default: None - List of parameters to pass to execute method. - - Returns - ------- - Results Iterable - """ - warnings.warn( - "`pandas.io.sql.execute` is deprecated and " - "will be removed in the future version.", - FutureWarning, - stacklevel=find_stack_level(), - ) # GH50185 - sqlalchemy = import_optional_dependency("sqlalchemy", errors="ignore") - - if sqlalchemy is not None and isinstance(con, (str, sqlalchemy.engine.Engine)): - raise TypeError("pandas.io.sql.execute requires a connection") # GH50185 - with pandasSQL_builder(con, need_transaction=True) as pandas_sql: - return pandas_sql.execute(sql, params) - - # ----------------------------------------------------------------------------- # -- Read and write to DataFrames diff --git a/pandas/tests/io/test_sql.py b/pandas/tests/io/test_sql.py index d86b80691190d..8f15467084cf3 100644 --- a/pandas/tests/io/test_sql.py +++ b/pandas/tests/io/test_sql.py @@ -1487,26 +1487,6 @@ def test_read_view_sqlite(sqlite_buildin): tm.assert_frame_equal(result, expected) -def test_execute_typeerror(sqlite_engine_iris): - with pytest.raises(TypeError, match="pandas.io.sql.execute requires a connection"): - with tm.assert_produces_warning( - FutureWarning, - match="`pandas.io.sql.execute` is deprecated and " - "will be removed in the future version.", - ): - sql.execute("select * from iris", sqlite_engine_iris) - - -def test_execute_deprecated(sqlite_conn_iris): - # GH50185 - with tm.assert_produces_warning( - FutureWarning, - match="`pandas.io.sql.execute` is deprecated and " - "will be removed in the future version.", - ): - sql.execute("select * from iris", sqlite_conn_iris) - - def flavor(conn_name): if "postgresql" in conn_name: return "postgresql" diff --git a/pandas/tests/resample/test_resample_api.py b/pandas/tests/resample/test_resample_api.py index 17c286c4651e6..bf0543ffcc4bb 100644 --- a/pandas/tests/resample/test_resample_api.py +++ b/pandas/tests/resample/test_resample_api.py @@ -5,7 +5,6 @@ import pytest from pandas._libs import lib -from pandas.errors import UnsupportedFunctionCall import pandas as pd from pandas import ( @@ -987,46 +986,6 @@ def test_series_downsample_method(method, numeric_only, expected_data): tm.assert_series_equal(result, expected) -@pytest.mark.parametrize( - "method, raises", - [ - ("sum", True), - ("prod", True), - ("min", True), - ("max", True), - ("first", False), - ("last", False), - ("median", False), - ("mean", True), - ("std", True), - ("var", True), - ("sem", False), - ("ohlc", False), - ("nunique", False), - ], -) -def test_args_kwargs_depr(method, raises): - index = date_range("20180101", periods=3, freq="h") - df = Series([2, 4, 6], index=index) - resampled = df.resample("30min") - args = () - - func = getattr(resampled, method) - - error_msg = "numpy operations are not valid with resample." - error_msg_type = "too many arguments passed in" - warn_msg = f"Passing additional args to DatetimeIndexResampler.{method}" - - if raises: - with tm.assert_produces_warning(FutureWarning, match=warn_msg): - with pytest.raises(UnsupportedFunctionCall, match=error_msg): - func(*args, 1, 2, 3, 4) - else: - with tm.assert_produces_warning(FutureWarning, match=warn_msg): - with pytest.raises(TypeError, match=error_msg_type): - func(*args, 1, 2, 3, 4) - - def test_resample_empty(): # GH#52484 df = DataFrame( diff --git a/pandas/tests/tools/test_to_datetime.py b/pandas/tests/tools/test_to_datetime.py index 9d364c2f86ac5..0cbdba874c5f7 100644 --- a/pandas/tests/tools/test_to_datetime.py +++ b/pandas/tests/tools/test_to_datetime.py @@ -3672,20 +3672,17 @@ def test_to_datetime_mixed_not_necessarily_iso8601_coerce(errors, expected): tm.assert_index_equal(result, expected) -def test_ignoring_unknown_tz_deprecated(): +def test_unknown_tz_raises(): # GH#18702, GH#51476 dtstr = "2014 Jan 9 05:15 FAKE" - msg = 'un-recognized timezone "FAKE". Dropping unrecognized timezones is deprecated' - with tm.assert_produces_warning(FutureWarning, match=msg): - res = Timestamp(dtstr) - assert res == Timestamp(dtstr[:-5]) - - with tm.assert_produces_warning(FutureWarning): - res = to_datetime(dtstr) - assert res == to_datetime(dtstr[:-5]) - with tm.assert_produces_warning(FutureWarning): - res = to_datetime([dtstr]) - tm.assert_index_equal(res, to_datetime([dtstr[:-5]])) + msg = '.*un-recognized timezone "FAKE".' + with pytest.raises(ValueError, match=msg): + Timestamp(dtstr) + + with pytest.raises(ValueError, match=msg): + to_datetime(dtstr) + with pytest.raises(ValueError, match=msg): + to_datetime([dtstr]) def test_from_numeric_arrow_dtype(any_numeric_ea_dtype):