@@ -1501,7 +1501,6 @@ def _cython_agg_general(
1501
1501
# that goes through SeriesGroupBy
1502
1502
1503
1503
data = self ._get_data_to_aggregate (numeric_only = numeric_only , name = how )
1504
- is_ser = data .ndim == 1
1505
1504
1506
1505
def array_func (values : ArrayLike ) -> ArrayLike :
1507
1506
try :
@@ -1523,16 +1522,12 @@ def array_func(values: ArrayLike) -> ArrayLike:
1523
1522
return result
1524
1523
1525
1524
new_mgr = data .grouped_reduce (array_func )
1526
-
1527
1525
res = self ._wrap_agged_manager (new_mgr )
1528
- if is_ser :
1529
- if self .as_index :
1530
- res .index = self .grouper .result_index
1531
- else :
1532
- res = self ._insert_inaxis_grouper (res )
1533
- return self ._reindex_output (res )
1534
- else :
1535
- return res
1526
+ out = self ._wrap_aggregated_output (res )
1527
+ if data .ndim == 2 :
1528
+ # TODO: don't special-case DataFrame vs Series
1529
+ out = out .infer_objects (copy = False )
1530
+ return out
1536
1531
1537
1532
def _cython_transform (
1538
1533
self , how : str , numeric_only : bool = False , axis : AxisInt = 0 , ** kwargs
@@ -1793,19 +1788,14 @@ def hfunc(bvalues: ArrayLike) -> ArrayLike:
1793
1788
return counted
1794
1789
1795
1790
new_mgr = data .grouped_reduce (hfunc )
1791
+ new_obj = self ._wrap_agged_manager (new_mgr )
1796
1792
1797
1793
# If we are grouping on categoricals we want unobserved categories to
1798
1794
# return zero, rather than the default of NaN which the reindexing in
1799
- # _wrap_agged_manager () returns. GH 35028
1795
+ # _wrap_aggregated_output () returns. GH 35028
1800
1796
# e.g. test_dataframe_groupby_on_2_categoricals_when_observed_is_false
1801
1797
with com .temp_setattr (self , "observed" , True ):
1802
- result = self ._wrap_agged_manager (new_mgr )
1803
-
1804
- if result .ndim == 1 :
1805
- if self .as_index :
1806
- result .index = self .grouper .result_index
1807
- else :
1808
- result = self ._insert_inaxis_grouper (result )
1798
+ result = self ._wrap_aggregated_output (new_obj )
1809
1799
1810
1800
return self ._reindex_output (result , fill_value = 0 )
1811
1801
@@ -2790,9 +2780,7 @@ def blk_func(values: ArrayLike) -> ArrayLike:
2790
2780
mgr = obj ._mgr
2791
2781
res_mgr = mgr .apply (blk_func )
2792
2782
2793
- new_obj = obj ._constructor (res_mgr )
2794
- if isinstance (new_obj , Series ):
2795
- new_obj .name = obj .name
2783
+ new_obj = self ._wrap_agged_manager (res_mgr )
2796
2784
2797
2785
if self .axis == 1 :
2798
2786
# Only relevant for DataFrameGroupBy
@@ -3197,15 +3185,10 @@ def blk_func(values: ArrayLike) -> ArrayLike:
3197
3185
out = out .reshape (ncols , ngroups * nqs )
3198
3186
return post_processor (out , inference , result_mask , orig_vals )
3199
3187
3200
- obj = self ._obj_with_exclusions
3201
- is_ser = obj .ndim == 1
3202
3188
data = self ._get_data_to_aggregate (numeric_only = numeric_only , name = "quantile" )
3203
3189
res_mgr = data .grouped_reduce (blk_func )
3204
3190
3205
- if is_ser :
3206
- res = self ._wrap_agged_manager (res_mgr )
3207
- else :
3208
- res = obj ._constructor (res_mgr )
3191
+ res = self ._wrap_agged_manager (res_mgr )
3209
3192
3210
3193
if orig_scalar :
3211
3194
# Avoid expensive MultiIndex construction
@@ -3652,19 +3635,12 @@ def blk_func(values: ArrayLike) -> ArrayLike:
3652
3635
3653
3636
return result .T
3654
3637
3655
- obj = self ._obj_with_exclusions
3656
-
3657
3638
# Operate block-wise instead of column-by-column
3658
- is_ser = obj .ndim == 1
3659
3639
mgr = self ._get_data_to_aggregate (numeric_only = numeric_only , name = how )
3660
3640
3661
3641
res_mgr = mgr .grouped_reduce (blk_func )
3662
3642
3663
- if is_ser :
3664
- out = self ._wrap_agged_manager (res_mgr )
3665
- else :
3666
- out = obj ._constructor (res_mgr )
3667
-
3643
+ out = self ._wrap_agged_manager (res_mgr )
3668
3644
return self ._wrap_aggregated_output (out )
3669
3645
3670
3646
@final
0 commit comments