60
60
from pandas .core .dtypes .cast import (
61
61
convert_dtypes ,
62
62
maybe_box_native ,
63
- maybe_cast_pointwise_result ,
63
+ maybe_cast_result ,
64
64
validate_numeric_casting ,
65
65
)
66
66
from pandas .core .dtypes .common import (
100
100
import pandas .core .common as com
101
101
from pandas .core .construction import (
102
102
create_series_with_explicit_dtype ,
103
- ensure_wrapped_if_datetimelike ,
104
103
extract_array ,
105
104
is_empty_data ,
106
105
sanitize_array ,
113
112
from pandas .core .indexes .accessors import CombinedDatetimelikeProperties
114
113
from pandas .core .indexes .api import (
115
114
CategoricalIndex ,
116
- DatetimeIndex ,
117
115
Float64Index ,
118
116
Index ,
119
117
MultiIndex ,
120
- PeriodIndex ,
121
- TimedeltaIndex ,
122
118
ensure_index ,
123
119
)
124
120
import pandas .core .indexes .base as ibase
121
+ from pandas .core .indexes .datetimes import DatetimeIndex
122
+ from pandas .core .indexes .period import PeriodIndex
123
+ from pandas .core .indexes .timedeltas import TimedeltaIndex
125
124
from pandas .core .indexing import check_bool_indexer
126
125
from pandas .core .internals import (
127
126
SingleArrayManager ,
@@ -864,7 +863,7 @@ def take(self, indices, axis=0, is_copy=None, **kwargs) -> Series:
864
863
result = self ._constructor (new_values , index = new_index , fastpath = True )
865
864
return result .__finalize__ (self , method = "take" )
866
865
867
- def _take_with_is_copy (self , indices , axis = 0 ) -> Series :
866
+ def _take_with_is_copy (self , indices , axis = 0 ):
868
867
"""
869
868
Internal version of the `take` method that sets the `_is_copy`
870
869
attribute to keep track of the parent dataframe (using in indexing
@@ -1020,7 +1019,7 @@ def _get_value(self, label, takeable: bool = False):
1020
1019
loc = self .index .get_loc (label )
1021
1020
return self .index ._get_values_for_loc (self , loc , label )
1022
1021
1023
- def __setitem__ (self , key , value ) -> None :
1022
+ def __setitem__ (self , key , value ):
1024
1023
key = com .apply_if_callable (key , self )
1025
1024
cacher_needs_updating = self ._check_is_chained_assignment_possible ()
1026
1025
@@ -1059,7 +1058,7 @@ def __setitem__(self, key, value) -> None:
1059
1058
if cacher_needs_updating :
1060
1059
self ._maybe_update_cacher ()
1061
1060
1062
- def _set_with_engine (self , key , value ) -> None :
1061
+ def _set_with_engine (self , key , value ):
1063
1062
# fails with AttributeError for IntervalIndex
1064
1063
loc = self .index ._engine .get_loc (key )
1065
1064
# error: Argument 1 to "validate_numeric_casting" has incompatible type
@@ -1095,15 +1094,15 @@ def _set_with(self, key, value):
1095
1094
else :
1096
1095
self .loc [key ] = value
1097
1096
1098
- def _set_labels (self , key , value ) -> None :
1097
+ def _set_labels (self , key , value ):
1099
1098
key = com .asarray_tuplesafe (key )
1100
1099
indexer : np .ndarray = self .index .get_indexer (key )
1101
1100
mask = indexer == - 1
1102
1101
if mask .any ():
1103
1102
raise KeyError (f"{ key [mask ]} not in index" )
1104
1103
self ._set_values (indexer , value )
1105
1104
1106
- def _set_values (self , key , value ) -> None :
1105
+ def _set_values (self , key , value ):
1107
1106
if isinstance (key , Series ):
1108
1107
key = key ._values
1109
1108
@@ -1892,7 +1891,7 @@ def count(self, level=None):
1892
1891
2
1893
1892
"""
1894
1893
if level is None :
1895
- return notna (self ._values ).sum (). astype ( "int64" )
1894
+ return notna (self ._values ).sum ()
1896
1895
else :
1897
1896
warnings .warn (
1898
1897
"Using the level keyword in DataFrame and Series aggregations is "
@@ -1994,12 +1993,15 @@ def unique(self) -> ArrayLike:
1994
1993
['2016-01-01 00:00:00-05:00']
1995
1994
Length: 1, dtype: datetime64[ns, US/Eastern]
1996
1995
1997
- An Categorical will return categories in the order of
1998
- appearance and with the same dtype .
1996
+ An unordered Categorical will return categories in the order of
1997
+ appearance.
1999
1998
2000
1999
>>> pd.Series(pd.Categorical(list('baabc'))).unique()
2001
2000
['b', 'a', 'c']
2002
- Categories (3, object): ['a', 'b', 'c']
2001
+ Categories (3, object): ['b', 'a', 'c']
2002
+
2003
+ An ordered Categorical preserves the category ordering.
2004
+
2003
2005
>>> pd.Series(pd.Categorical(list('baabc'), categories=list('abc'),
2004
2006
... ordered=True)).unique()
2005
2007
['b', 'a', 'c']
@@ -2754,15 +2756,13 @@ def __rmatmul__(self, other):
2754
2756
return self .dot (np .transpose (other ))
2755
2757
2756
2758
@doc (base .IndexOpsMixin .searchsorted , klass = "Series" )
2757
- def searchsorted (self , value , side = "left" , sorter = None ) -> np . ndarray :
2759
+ def searchsorted (self , value , side = "left" , sorter = None ):
2758
2760
return algorithms .searchsorted (self ._values , value , side = side , sorter = sorter )
2759
2761
2760
2762
# -------------------------------------------------------------------
2761
2763
# Combination
2762
2764
2763
- def append (
2764
- self , to_append , ignore_index : bool = False , verify_integrity : bool = False
2765
- ):
2765
+ def append (self , to_append , ignore_index = False , verify_integrity = False ):
2766
2766
"""
2767
2767
Concatenate two or more Series.
2768
2768
@@ -2846,7 +2846,7 @@ def append(
2846
2846
to_concat , ignore_index = ignore_index , verify_integrity = verify_integrity
2847
2847
)
2848
2848
2849
- def _binop (self , other : Series , func , level = None , fill_value = None ):
2849
+ def _binop (self , other , func , level = None , fill_value = None ):
2850
2850
"""
2851
2851
Perform generic binary operation with optional fill value.
2852
2852
@@ -2873,7 +2873,7 @@ def _binop(self, other: Series, func, level=None, fill_value=None):
2873
2873
if not self .index .equals (other .index ):
2874
2874
this , other = self .align (other , level = level , join = "outer" , copy = False )
2875
2875
2876
- this_vals , other_vals = ops .fill_binop (this ._values , other ._values , fill_value )
2876
+ this_vals , other_vals = ops .fill_binop (this .values , other .values , fill_value )
2877
2877
2878
2878
with np .errstate (all = "ignore" ):
2879
2879
result = func (this_vals , other_vals )
@@ -3071,24 +3071,22 @@ def combine(self, other, func, fill_value=None) -> Series:
3071
3071
# so do this element by element
3072
3072
new_index = self .index .union (other .index )
3073
3073
new_name = ops .get_op_result_name (self , other )
3074
- new_values = np . empty ( len ( new_index ), dtype = object )
3075
- for i , idx in enumerate ( new_index ) :
3074
+ new_values = []
3075
+ for idx in new_index :
3076
3076
lv = self .get (idx , fill_value )
3077
3077
rv = other .get (idx , fill_value )
3078
3078
with np .errstate (all = "ignore" ):
3079
- new_values [ i ] = func (lv , rv )
3079
+ new_values . append ( func (lv , rv ) )
3080
3080
else :
3081
3081
# Assume that other is a scalar, so apply the function for
3082
3082
# each element in the Series
3083
3083
new_index = self .index
3084
- new_values = np .empty (len (new_index ), dtype = object )
3085
3084
with np .errstate (all = "ignore" ):
3086
- new_values [:] = [func (lv , other ) for lv in self ._values ]
3085
+ new_values = [func (lv , other ) for lv in self ._values ]
3087
3086
new_name = self .name
3088
3087
3089
- # try_float=False is to match _aggregate_series_pure_python
3090
- npvalues = lib .maybe_convert_objects (new_values , try_float = False )
3091
- res_values = maybe_cast_pointwise_result (npvalues , self .dtype , same_dtype = False )
3088
+ res_values = sanitize_array (new_values , None )
3089
+ res_values = maybe_cast_result (res_values , self .dtype , same_dtype = False )
3092
3090
return self ._constructor (res_values , index = new_index , name = new_name )
3093
3091
3094
3092
def combine_first (self , other ) -> Series :
@@ -3611,7 +3609,7 @@ def argsort(self, axis=0, kind="quicksort", order=None) -> Series:
3611
3609
3612
3610
Returns
3613
3611
-------
3614
- Series[np.intp]
3612
+ Series
3615
3613
Positions of values within the sort order with -1 indicating
3616
3614
nan values.
3617
3615
@@ -3732,7 +3730,7 @@ def nlargest(self, n=5, keep="first") -> Series:
3732
3730
"""
3733
3731
return algorithms .SelectNSeries (self , n = n , keep = keep ).nlargest ()
3734
3732
3735
- def nsmallest (self , n : int = 5 , keep : str = "first" ) -> Series :
3733
+ def nsmallest (self , n = 5 , keep = "first" ) -> Series :
3736
3734
"""
3737
3735
Return the smallest `n` elements.
3738
3736
@@ -3944,7 +3942,7 @@ def explode(self, ignore_index: bool = False) -> Series:
3944
3942
3945
3943
return self ._constructor (values , index = index , name = self .name )
3946
3944
3947
- def unstack (self , level = - 1 , fill_value = None ) -> DataFrame :
3945
+ def unstack (self , level = - 1 , fill_value = None ):
3948
3946
"""
3949
3947
Unstack, also known as pivot, Series with MultiIndex to produce DataFrame.
3950
3948
@@ -4169,8 +4167,7 @@ def apply(
4169
4167
Python function or NumPy ufunc to apply.
4170
4168
convert_dtype : bool, default True
4171
4169
Try to find better dtype for elementwise function results. If
4172
- False, leave as dtype=object. Note that the dtype is always
4173
- preserved for extension array dtypes, such as Categorical.
4170
+ False, leave as dtype=object.
4174
4171
args : tuple
4175
4172
Positional arguments passed to func after the series value.
4176
4173
**kwargs
@@ -4190,7 +4187,7 @@ def apply(
4190
4187
Notes
4191
4188
-----
4192
4189
Functions that mutate the passed object can produce unexpected
4193
- behavior or errors and are not supported. See :ref:`gotchas. udf-mutation`
4190
+ behavior or errors and are not supported. See :ref:`udf-mutation`
4194
4191
for more details.
4195
4192
4196
4193
Examples
@@ -4297,11 +4294,7 @@ def _reduce(
4297
4294
with np .errstate (all = "ignore" ):
4298
4295
return op (delegate , skipna = skipna , ** kwds )
4299
4296
4300
- def _reindex_indexer (
4301
- self , new_index : Index | None , indexer : np .ndarray | None , copy : bool
4302
- ) -> Series :
4303
- # Note: new_index is None iff indexer is None
4304
- # if not None, indexer is np.intp
4297
+ def _reindex_indexer (self , new_index , indexer , copy ):
4305
4298
if indexer is None :
4306
4299
if copy :
4307
4300
return self .copy ()
@@ -4319,9 +4312,8 @@ def _needs_reindex_multi(self, axes, method, level) -> bool:
4319
4312
"""
4320
4313
return False
4321
4314
4322
- # error: Cannot determine type of 'align'
4323
4315
@doc (
4324
- NDFrame .align , # type: ignore[has-type]
4316
+ NDFrame .align ,
4325
4317
klass = _shared_doc_kwargs ["klass" ],
4326
4318
axes_single_arg = _shared_doc_kwargs ["axes_single_arg" ],
4327
4319
)
@@ -4473,9 +4465,8 @@ def set_axis(self, labels, axis: Axis = ..., inplace: bool = ...) -> Series | No
4473
4465
def set_axis (self , labels , axis : Axis = 0 , inplace : bool = False ):
4474
4466
return super ().set_axis (labels , axis = axis , inplace = inplace )
4475
4467
4476
- # error: Cannot determine type of 'reindex'
4477
4468
@doc (
4478
- NDFrame .reindex , # type: ignore[has-type]
4469
+ NDFrame .reindex ,
4479
4470
klass = _shared_doc_kwargs ["klass" ],
4480
4471
axes = _shared_doc_kwargs ["axes" ],
4481
4472
optional_labels = _shared_doc_kwargs ["optional_labels" ],
@@ -4705,8 +4696,7 @@ def fillna(
4705
4696
) -> Series | None :
4706
4697
...
4707
4698
4708
- # error: Cannot determine type of 'fillna'
4709
- @doc (NDFrame .fillna , ** _shared_doc_kwargs ) # type: ignore[has-type]
4699
+ @doc (NDFrame .fillna , ** _shared_doc_kwargs )
4710
4700
def fillna (
4711
4701
self ,
4712
4702
value = None ,
@@ -4752,9 +4742,8 @@ def pop(self, item: Hashable) -> Any:
4752
4742
"""
4753
4743
return super ().pop (item = item )
4754
4744
4755
- # error: Cannot determine type of 'replace'
4756
4745
@doc (
4757
- NDFrame .replace , # type: ignore[has-type]
4746
+ NDFrame .replace ,
4758
4747
klass = _shared_doc_kwargs ["klass" ],
4759
4748
inplace = _shared_doc_kwargs ["inplace" ],
4760
4749
replace_iloc = _shared_doc_kwargs ["replace_iloc" ],
@@ -4802,8 +4791,7 @@ def _replace_single(self, to_replace, method: str, inplace: bool, limit):
4802
4791
4803
4792
return result
4804
4793
4805
- # error: Cannot determine type of 'shift'
4806
- @doc (NDFrame .shift , klass = _shared_doc_kwargs ["klass" ]) # type: ignore[has-type]
4794
+ @doc (NDFrame .shift , klass = _shared_doc_kwargs ["klass" ])
4807
4795
def shift (self , periods = 1 , freq = None , axis = 0 , fill_value = None ) -> Series :
4808
4796
return super ().shift (
4809
4797
periods = periods , freq = freq , axis = axis , fill_value = fill_value
@@ -5038,23 +5026,19 @@ def _convert_dtypes(
5038
5026
result = input_series .copy ()
5039
5027
return result
5040
5028
5041
- # error: Cannot determine type of 'isna'
5042
- @doc (NDFrame .isna , klass = _shared_doc_kwargs ["klass" ]) # type: ignore[has-type]
5029
+ @doc (NDFrame .isna , klass = _shared_doc_kwargs ["klass" ])
5043
5030
def isna (self ) -> Series :
5044
5031
return generic .NDFrame .isna (self )
5045
5032
5046
- # error: Cannot determine type of 'isna'
5047
- @doc (NDFrame .isna , klass = _shared_doc_kwargs ["klass" ]) # type: ignore[has-type]
5033
+ @doc (NDFrame .isna , klass = _shared_doc_kwargs ["klass" ])
5048
5034
def isnull (self ) -> Series :
5049
5035
return super ().isnull ()
5050
5036
5051
- # error: Cannot determine type of 'notna'
5052
- @doc (NDFrame .notna , klass = _shared_doc_kwargs ["klass" ]) # type: ignore[has-type]
5037
+ @doc (NDFrame .notna , klass = _shared_doc_kwargs ["klass" ])
5053
5038
def notna (self ) -> Series :
5054
5039
return super ().notna ()
5055
5040
5056
- # error: Cannot determine type of 'notna'
5057
- @doc (NDFrame .notna , klass = _shared_doc_kwargs ["klass" ]) # type: ignore[has-type]
5041
+ @doc (NDFrame .notna , klass = _shared_doc_kwargs ["klass" ])
5058
5042
def notnull (self ) -> Series :
5059
5043
return super ().notnull ()
5060
5044
@@ -5149,8 +5133,7 @@ def dropna(self, axis=0, inplace=False, how=None):
5149
5133
# ----------------------------------------------------------------------
5150
5134
# Time series-oriented methods
5151
5135
5152
- # error: Cannot determine type of 'asfreq'
5153
- @doc (NDFrame .asfreq , ** _shared_doc_kwargs ) # type: ignore[has-type]
5136
+ @doc (NDFrame .asfreq , ** _shared_doc_kwargs )
5154
5137
def asfreq (
5155
5138
self ,
5156
5139
freq ,
@@ -5167,8 +5150,7 @@ def asfreq(
5167
5150
fill_value = fill_value ,
5168
5151
)
5169
5152
5170
- # error: Cannot determine type of 'resample'
5171
- @doc (NDFrame .resample , ** _shared_doc_kwargs ) # type: ignore[has-type]
5153
+ @doc (NDFrame .resample , ** _shared_doc_kwargs )
5172
5154
def resample (
5173
5155
self ,
5174
5156
rule ,
@@ -5313,8 +5295,6 @@ def _arith_method(self, other, op):
5313
5295
5314
5296
lvalues = self ._values
5315
5297
rvalues = extract_array (other , extract_numpy = True , extract_range = True )
5316
- rvalues = ops .maybe_prepare_scalar_for_op (rvalues , lvalues .shape )
5317
- rvalues = ensure_wrapped_if_datetimelike (rvalues )
5318
5298
5319
5299
with np .errstate (all = "ignore" ):
5320
5300
result = ops .arithmetic_op (lvalues , rvalues , op )
0 commit comments