diff --git a/pandas/_libs/index.pyx b/pandas/_libs/index.pyx index cc0174f795ebe..f968e879498b2 100644 --- a/pandas/_libs/index.pyx +++ b/pandas/_libs/index.pyx @@ -490,11 +490,11 @@ cdef class ObjectEngine(IndexEngine): cdef class DatetimeEngine(Int64Engine): cdef: - NPY_DATETIMEUNIT reso + NPY_DATETIMEUNIT _creso def __init__(self, ndarray values): super().__init__(values.view("i8")) - self.reso = get_unit_from_dtype(values.dtype) + self._creso = get_unit_from_dtype(values.dtype) cdef int64_t _unbox_scalar(self, scalar) except? -1: # NB: caller is responsible for ensuring tzawareness compat @@ -502,12 +502,12 @@ cdef class DatetimeEngine(Int64Engine): if scalar is NaT: return NaT.value elif isinstance(scalar, _Timestamp): - if scalar._creso == self.reso: + if scalar._creso == self._creso: return scalar.value else: # Note: caller is responsible for catching potential ValueError # from _as_creso - return (<_Timestamp>scalar)._as_creso(self.reso, round_ok=False).value + return (<_Timestamp>scalar)._as_creso(self._creso, round_ok=False).value raise TypeError(scalar) def __contains__(self, val: object) -> bool: @@ -570,12 +570,12 @@ cdef class TimedeltaEngine(DatetimeEngine): if scalar is NaT: return NaT.value elif isinstance(scalar, _Timedelta): - if scalar._creso == self.reso: + if scalar._creso == self._creso: return scalar.value else: # Note: caller is responsible for catching potential ValueError # from _as_creso - return (<_Timedelta>scalar)._as_creso(self.reso, round_ok=False).value + return (<_Timedelta>scalar)._as_creso(self._creso, round_ok=False).value raise TypeError(scalar) diff --git a/pandas/_libs/tslibs/timestamps.pyx b/pandas/_libs/tslibs/timestamps.pyx index 30ead1d4e3142..2e7b336e3536a 100644 --- a/pandas/_libs/tslibs/timestamps.pyx +++ b/pandas/_libs/tslibs/timestamps.pyx @@ -2099,10 +2099,10 @@ default 'raise' value = tz_localize_to_utc_single(self.value, tz, ambiguous=ambiguous, nonexistent=nonexistent, - reso=self._creso) + creso=self._creso) elif tz is None: # reset tz - value = tz_convert_from_utc_single(self.value, self.tz, reso=self._creso) + value = tz_convert_from_utc_single(self.value, self.tz, creso=self._creso) else: raise TypeError( @@ -2245,7 +2245,7 @@ default 'raise' fold = self.fold if tzobj is not None: - value = tz_convert_from_utc_single(value, tzobj, reso=self._creso) + value = tz_convert_from_utc_single(value, tzobj, creso=self._creso) # setup components pandas_datetime_to_datetimestruct(value, self._creso, &dts) diff --git a/pandas/_libs/tslibs/tzconversion.pxd b/pandas/_libs/tslibs/tzconversion.pxd index 3a6a6f4e10035..1b95899e5c037 100644 --- a/pandas/_libs/tslibs/tzconversion.pxd +++ b/pandas/_libs/tslibs/tzconversion.pxd @@ -9,14 +9,14 @@ from pandas._libs.tslibs.np_datetime cimport NPY_DATETIMEUNIT cpdef int64_t tz_convert_from_utc_single( - int64_t utc_val, tzinfo tz, NPY_DATETIMEUNIT reso=* + int64_t utc_val, tzinfo tz, NPY_DATETIMEUNIT creso=* ) except? -1 cdef int64_t tz_localize_to_utc_single( int64_t val, tzinfo tz, object ambiguous=*, object nonexistent=*, - NPY_DATETIMEUNIT reso=*, + NPY_DATETIMEUNIT creso=*, ) except? -1 diff --git a/pandas/_libs/tslibs/tzconversion.pyi b/pandas/_libs/tslibs/tzconversion.pyi index fab73f96b0dfb..a354765a348ec 100644 --- a/pandas/_libs/tslibs/tzconversion.pyi +++ b/pandas/_libs/tslibs/tzconversion.pyi @@ -10,12 +10,12 @@ from pandas._typing import npt # tz_convert_from_utc_single exposed for testing def tz_convert_from_utc_single( - val: np.int64, tz: tzinfo, reso: int = ... + val: np.int64, tz: tzinfo, creso: int = ... ) -> np.int64: ... def tz_localize_to_utc( vals: npt.NDArray[np.int64], tz: tzinfo | None, ambiguous: str | bool | Iterable[bool] | None = ..., nonexistent: str | timedelta | np.timedelta64 | None = ..., - reso: int = ..., # NPY_DATETIMEUNIT + creso: int = ..., # NPY_DATETIMEUNIT ) -> npt.NDArray[np.int64]: ... diff --git a/pandas/_libs/tslibs/tzconversion.pyx b/pandas/_libs/tslibs/tzconversion.pyx index 953ba10993973..e2812178a2b43 100644 --- a/pandas/_libs/tslibs/tzconversion.pyx +++ b/pandas/_libs/tslibs/tzconversion.pyx @@ -54,7 +54,7 @@ cdef const int64_t[::1] _deltas_placeholder = np.array([], dtype=np.int64) cdef class Localizer: # cdef: # tzinfo tz - # NPY_DATETIMEUNIT _reso + # NPY_DATETIMEUNIT _creso # bint use_utc, use_fixed, use_tzlocal, use_dst, use_pytz # ndarray trans # Py_ssize_t ntrans @@ -64,9 +64,9 @@ cdef class Localizer: @cython.initializedcheck(False) @cython.boundscheck(False) - def __cinit__(self, tzinfo tz, NPY_DATETIMEUNIT reso): + def __cinit__(self, tzinfo tz, NPY_DATETIMEUNIT creso): self.tz = tz - self._creso = reso + self._creso = creso self.use_utc = self.use_tzlocal = self.use_fixed = False self.use_dst = self.use_pytz = False self.ntrans = -1 # placeholder @@ -82,22 +82,22 @@ cdef class Localizer: else: trans, deltas, typ = get_dst_info(tz) - if reso != NPY_DATETIMEUNIT.NPY_FR_ns: + if creso != NPY_DATETIMEUNIT.NPY_FR_ns: # NB: using floordiv here is implicitly assuming we will # never see trans or deltas that are not an integer number # of seconds. # TODO: avoid these np.array calls - if reso == NPY_DATETIMEUNIT.NPY_FR_us: + if creso == NPY_DATETIMEUNIT.NPY_FR_us: trans = np.array(trans) // 1_000 deltas = np.array(deltas) // 1_000 - elif reso == NPY_DATETIMEUNIT.NPY_FR_ms: + elif creso == NPY_DATETIMEUNIT.NPY_FR_ms: trans = np.array(trans) // 1_000_000 deltas = np.array(deltas) // 1_000_000 - elif reso == NPY_DATETIMEUNIT.NPY_FR_s: + elif creso == NPY_DATETIMEUNIT.NPY_FR_s: trans = np.array(trans) // 1_000_000_000 deltas = np.array(deltas) // 1_000_000_000 else: - raise NotImplementedError(reso) + raise NotImplementedError(creso) self.trans = trans self.ntrans = self.trans.shape[0] @@ -121,7 +121,7 @@ cdef class Localizer: return utc_val elif self.use_tzlocal: return utc_val + _tz_localize_using_tzinfo_api( - utc_val, self.tz, to_utc=False, reso=self._creso, fold=fold + utc_val, self.tz, to_utc=False, creso=self._creso, fold=fold ) elif self.use_fixed: return utc_val + self.delta @@ -140,7 +140,7 @@ cdef int64_t tz_localize_to_utc_single( tzinfo tz, object ambiguous=None, object nonexistent=None, - NPY_DATETIMEUNIT reso=NPY_DATETIMEUNIT.NPY_FR_ns, + NPY_DATETIMEUNIT creso=NPY_DATETIMEUNIT.NPY_FR_ns, ) except? -1: """See tz_localize_to_utc.__doc__""" cdef: @@ -155,18 +155,18 @@ cdef int64_t tz_localize_to_utc_single( return val elif is_tzlocal(tz) or is_zoneinfo(tz): - return val - _tz_localize_using_tzinfo_api(val, tz, to_utc=True, reso=reso) + return val - _tz_localize_using_tzinfo_api(val, tz, to_utc=True, creso=creso) elif is_fixed_offset(tz): _, deltas, _ = get_dst_info(tz) delta = deltas[0] # TODO: de-duplicate with Localizer.__init__ - if reso != NPY_DATETIMEUNIT.NPY_FR_ns: - if reso == NPY_DATETIMEUNIT.NPY_FR_us: + if creso != NPY_DATETIMEUNIT.NPY_FR_ns: + if creso == NPY_DATETIMEUNIT.NPY_FR_us: delta = delta // 1000 - elif reso == NPY_DATETIMEUNIT.NPY_FR_ms: + elif creso == NPY_DATETIMEUNIT.NPY_FR_ms: delta = delta // 1_000_000 - elif reso == NPY_DATETIMEUNIT.NPY_FR_s: + elif creso == NPY_DATETIMEUNIT.NPY_FR_s: delta = delta // 1_000_000_000 return val - delta @@ -177,7 +177,7 @@ cdef int64_t tz_localize_to_utc_single( tz, ambiguous=ambiguous, nonexistent=nonexistent, - reso=reso, + creso=creso, )[0] @@ -188,7 +188,7 @@ def tz_localize_to_utc( tzinfo tz, object ambiguous=None, object nonexistent=None, - NPY_DATETIMEUNIT reso=NPY_DATETIMEUNIT.NPY_FR_ns, + NPY_DATETIMEUNIT creso=NPY_DATETIMEUNIT.NPY_FR_ns, ): """ Localize tzinfo-naive i8 to given time zone (using pytz). If @@ -216,7 +216,7 @@ def tz_localize_to_utc( nonexistent : {None, "NaT", "shift_forward", "shift_backward", "raise", \ timedelta-like} How to handle non-existent times when converting wall times to UTC - reso : NPY_DATETIMEUNIT, default NPY_FR_ns + creso : NPY_DATETIMEUNIT, default NPY_FR_ns Returns ------- @@ -236,8 +236,8 @@ timedelta-like} bint shift_forward = False, shift_backward = False bint fill_nonexist = False str stamp - Localizer info = Localizer(tz, reso=reso) - int64_t pph = periods_per_day(reso) // 24 + Localizer info = Localizer(tz, creso=creso) + int64_t pph = periods_per_day(creso) // 24 # Vectorized version of DstTzInfo.localize if info.use_utc: @@ -252,7 +252,7 @@ timedelta-like} result[i] = NPY_NAT else: result[i] = v - _tz_localize_using_tzinfo_api( - v, tz, to_utc=True, reso=reso + v, tz, to_utc=True, creso=creso ) return result.base # to return underlying ndarray @@ -294,7 +294,7 @@ timedelta-like} shift_backward = True elif PyDelta_Check(nonexistent): from .timedeltas import delta_to_nanoseconds - shift_delta = delta_to_nanoseconds(nonexistent, reso=reso) + shift_delta = delta_to_nanoseconds(nonexistent, reso=creso) elif nonexistent not in ('raise', None): msg = ("nonexistent must be one of {'NaT', 'raise', 'shift_forward', " "shift_backwards} or a timedelta object") @@ -303,13 +303,13 @@ timedelta-like} # Determine whether each date lies left of the DST transition (store in # result_a) or right of the DST transition (store in result_b) result_a, result_b =_get_utc_bounds( - vals, info.tdata, info.ntrans, info.deltas, reso=reso + vals, info.tdata, info.ntrans, info.deltas, creso=creso ) # silence false-positive compiler warning dst_hours = np.empty(0, dtype=np.int64) if infer_dst: - dst_hours = _get_dst_hours(vals, result_a, result_b, reso=reso) + dst_hours = _get_dst_hours(vals, result_a, result_b, creso=creso) # Pre-compute delta_idx_offset that will be used if we go down non-existent # paths. @@ -348,7 +348,7 @@ timedelta-like} # TODO: test with non-nano; parametrize test_dt_round_tz_ambiguous result[i] = NPY_NAT else: - stamp = _render_tstamp(val, reso=reso) + stamp = _render_tstamp(val, creso=creso) raise pytz.AmbiguousTimeError( f"Cannot infer dst time from {stamp}, try using the " "'ambiguous' argument" @@ -386,7 +386,7 @@ timedelta-like} elif fill_nonexist: result[i] = NPY_NAT else: - stamp = _render_tstamp(val, reso=reso) + stamp = _render_tstamp(val, creso=creso) raise pytz.NonExistentTimeError(stamp) return result.base # .base to get underlying ndarray @@ -422,10 +422,10 @@ cdef inline Py_ssize_t bisect_right_i8(int64_t *data, return left -cdef inline str _render_tstamp(int64_t val, NPY_DATETIMEUNIT reso): +cdef inline str _render_tstamp(int64_t val, NPY_DATETIMEUNIT creso): """ Helper function to render exception messages""" from pandas._libs.tslibs.timestamps import Timestamp - ts = Timestamp._from_value_and_reso(val, reso, None) + ts = Timestamp._from_value_and_reso(val, creso, None) return str(ts) @@ -434,7 +434,7 @@ cdef _get_utc_bounds( int64_t* tdata, Py_ssize_t ntrans, const int64_t[::1] deltas, - NPY_DATETIMEUNIT reso, + NPY_DATETIMEUNIT creso, ): # Determine whether each date lies left of the DST transition (store in # result_a) or right of the DST transition (store in result_b) @@ -444,7 +444,7 @@ cdef _get_utc_bounds( Py_ssize_t i, n = vals.size int64_t val, v_left, v_right Py_ssize_t isl, isr, pos_left, pos_right - int64_t ppd = periods_per_day(reso) + int64_t ppd = periods_per_day(creso) result_a = cnp.PyArray_EMPTY(vals.ndim, vals.shape, cnp.NPY_INT64, 0) result_b = cnp.PyArray_EMPTY(vals.ndim, vals.shape, cnp.NPY_INT64, 0) @@ -486,11 +486,11 @@ cdef _get_utc_bounds( @cython.boundscheck(False) cdef ndarray[int64_t] _get_dst_hours( - # vals, reso only needed here to potential render an exception message + # vals, creso only needed here to potential render an exception message const int64_t[:] vals, ndarray[int64_t] result_a, ndarray[int64_t] result_b, - NPY_DATETIMEUNIT reso, + NPY_DATETIMEUNIT creso, ): cdef: Py_ssize_t i, n = vals.shape[0] @@ -519,7 +519,7 @@ cdef ndarray[int64_t] _get_dst_hours( if trans_idx.size == 1: # see test_tz_localize_to_utc_ambiguous_infer - stamp = _render_tstamp(vals[trans_idx[0]], reso=reso) + stamp = _render_tstamp(vals[trans_idx[0]], creso=creso) raise pytz.AmbiguousTimeError( f"Cannot infer dst time from {stamp} as there " "are no repeated times" @@ -541,7 +541,7 @@ cdef ndarray[int64_t] _get_dst_hours( delta = np.diff(result_a[grp]) if grp.size == 1 or np.all(delta > 0): # see test_tz_localize_to_utc_ambiguous_infer - stamp = _render_tstamp(vals[grp[0]], reso=reso) + stamp = _render_tstamp(vals[grp[0]], creso=creso) raise pytz.AmbiguousTimeError(stamp) # Find the index for the switch and pull from a for dst and b @@ -567,7 +567,7 @@ cdef ndarray[int64_t] _get_dst_hours( # Timezone Conversion cpdef int64_t tz_convert_from_utc_single( - int64_t utc_val, tzinfo tz, NPY_DATETIMEUNIT reso=NPY_DATETIMEUNIT.NPY_FR_ns + int64_t utc_val, tzinfo tz, NPY_DATETIMEUNIT creso=NPY_DATETIMEUNIT.NPY_FR_ns ) except? -1: """ Convert the val (in i8) from UTC to tz @@ -578,14 +578,14 @@ cpdef int64_t tz_convert_from_utc_single( ---------- utc_val : int64 tz : tzinfo - reso : NPY_DATETIMEUNIT, default NPY_FR_ns + creso : NPY_DATETIMEUNIT, default NPY_FR_ns Returns ------- converted: int64 """ cdef: - Localizer info = Localizer(tz, reso=reso) + Localizer info = Localizer(tz, creso=creso) Py_ssize_t pos # Note: caller is responsible for ensuring utc_val != NPY_NAT @@ -598,7 +598,7 @@ cdef int64_t _tz_localize_using_tzinfo_api( int64_t val, tzinfo tz, bint to_utc=True, - NPY_DATETIMEUNIT reso=NPY_DATETIMEUNIT.NPY_FR_ns, + NPY_DATETIMEUNIT creso=NPY_DATETIMEUNIT.NPY_FR_ns, bint* fold=NULL, ) except? -1: """ @@ -613,7 +613,7 @@ cdef int64_t _tz_localize_using_tzinfo_api( tz : tzinfo to_utc : bint True if converting _to_ UTC, False if going the other direction. - reso : NPY_DATETIMEUNIT + creso : NPY_DATETIMEUNIT fold : bint*, default NULL pointer to fold: whether datetime ends up in a fold or not after adjustment. @@ -633,9 +633,9 @@ cdef int64_t _tz_localize_using_tzinfo_api( datetime dt int64_t delta timedelta td - int64_t pps = periods_per_second(reso) + int64_t pps = periods_per_second(creso) - pandas_datetime_to_datetimestruct(val, reso, &dts) + pandas_datetime_to_datetimestruct(val, creso, &dts) # datetime_new is cython-optimized constructor if not to_utc: diff --git a/pandas/_libs/tslibs/vectorized.pyx b/pandas/_libs/tslibs/vectorized.pyx index c1784c53a7857..6a6b156af3dc4 100644 --- a/pandas/_libs/tslibs/vectorized.pyx +++ b/pandas/_libs/tslibs/vectorized.pyx @@ -56,7 +56,7 @@ def tz_convert_from_utc(ndarray stamps, tzinfo tz, NPY_DATETIMEUNIT reso=NPY_FR_ ndarray[int64] """ cdef: - Localizer info = Localizer(tz, reso=reso) + Localizer info = Localizer(tz, creso=reso) int64_t utc_val, local_val Py_ssize_t pos, i, n = stamps.size @@ -131,7 +131,7 @@ def ints_to_pydatetime( ndarray[object] of type specified by box """ cdef: - Localizer info = Localizer(tz, reso=reso) + Localizer info = Localizer(tz, creso=reso) int64_t utc_val, local_val Py_ssize_t i, n = stamps.size Py_ssize_t pos = -1 # unused, avoid not-initialized warning @@ -234,7 +234,7 @@ def get_resolution( ) -> Resolution: # stamps is int64_t, any ndim cdef: - Localizer info = Localizer(tz, reso=reso) + Localizer info = Localizer(tz, creso=reso) int64_t utc_val, local_val Py_ssize_t i, n = stamps.size Py_ssize_t pos = -1 # unused, avoid not-initialized warning @@ -286,7 +286,7 @@ cpdef ndarray normalize_i8_timestamps(ndarray stamps, tzinfo tz, NPY_DATETIMEUNI result : int64 ndarray of converted of normalized nanosecond timestamps """ cdef: - Localizer info = Localizer(tz, reso=reso) + Localizer info = Localizer(tz, creso=reso) int64_t utc_val, local_val, res_val Py_ssize_t i, n = stamps.size Py_ssize_t pos = -1 # unused, avoid not-initialized warning @@ -333,7 +333,7 @@ def is_date_array_normalized(ndarray stamps, tzinfo tz, NPY_DATETIMEUNIT reso) - is_normalized : bool True if all stamps are normalized """ cdef: - Localizer info = Localizer(tz, reso=reso) + Localizer info = Localizer(tz, creso=reso) int64_t utc_val, local_val Py_ssize_t i, n = stamps.size Py_ssize_t pos = -1 # unused, avoid not-initialized warning @@ -364,7 +364,7 @@ def dt64arr_to_periodarr( ): # stamps is int64_t, arbitrary ndim cdef: - Localizer info = Localizer(tz, reso=reso) + Localizer info = Localizer(tz, creso=reso) Py_ssize_t i, n = stamps.size Py_ssize_t pos = -1 # unused, avoid not-initialized warning int64_t utc_val, local_val, res_val diff --git a/pandas/core/arrays/datetimes.py b/pandas/core/arrays/datetimes.py index 9ecd9473c903b..3337fd5f3cbd6 100644 --- a/pandas/core/arrays/datetimes.py +++ b/pandas/core/arrays/datetimes.py @@ -1021,7 +1021,7 @@ def tz_localize( tz, ambiguous=ambiguous, nonexistent=nonexistent, - reso=self._creso, + creso=self._creso, ) new_dates = new_dates.view(f"M8[{self._unit}]") dtype = tz_to_dtype(tz, unit=self._unit)