46
46
47
47
from pandas .core .dtypes .cast import (
48
48
find_common_type ,
49
- maybe_cast_result_dtype ,
50
49
maybe_downcast_numeric ,
51
50
)
52
51
from pandas .core .dtypes .common import (
58
57
is_interval_dtype ,
59
58
is_numeric_dtype ,
60
59
is_scalar ,
61
- needs_i8_conversion ,
62
60
)
63
61
from pandas .core .dtypes .missing import (
64
62
isna ,
@@ -1104,13 +1102,11 @@ def _cython_agg_manager(
1104
1102
1105
1103
using_array_manager = isinstance (data , ArrayManager )
1106
1104
1107
- def cast_agg_result (result , values : ArrayLike , how : str ) -> ArrayLike :
1105
+ def cast_agg_result (
1106
+ result : ArrayLike , values : ArrayLike , how : str
1107
+ ) -> ArrayLike :
1108
1108
# see if we can cast the values to the desired dtype
1109
1109
# this may not be the original dtype
1110
- assert not isinstance (result , DataFrame )
1111
-
1112
- dtype = maybe_cast_result_dtype (values .dtype , how )
1113
- result = maybe_downcast_numeric (result , dtype )
1114
1110
1115
1111
if isinstance (values , Categorical ) and isinstance (result , np .ndarray ):
1116
1112
# If the Categorical op didn't raise, it is dtype-preserving
@@ -1125,6 +1121,7 @@ def cast_agg_result(result, values: ArrayLike, how: str) -> ArrayLike:
1125
1121
):
1126
1122
# We went through a SeriesGroupByPath and need to reshape
1127
1123
# GH#32223 includes case with IntegerArray values
1124
+ # We only get here with values.dtype == object
1128
1125
result = result .reshape (1 , - 1 )
1129
1126
# test_groupby_duplicate_columns gets here with
1130
1127
# result.dtype == int64, values.dtype=object, how="min"
@@ -1140,8 +1137,11 @@ def py_fallback(values: ArrayLike) -> ArrayLike:
1140
1137
1141
1138
# call our grouper again with only this block
1142
1139
if values .ndim == 1 :
1140
+ # We only get here with ExtensionArray
1141
+
1143
1142
obj = Series (values )
1144
1143
else :
1144
+ # We only get here with values.dtype == object
1145
1145
# TODO special case not needed with ArrayManager
1146
1146
obj = DataFrame (values .T )
1147
1147
if obj .shape [1 ] == 1 :
@@ -1193,7 +1193,8 @@ def array_func(values: ArrayLike) -> ArrayLike:
1193
1193
1194
1194
result = py_fallback (values )
1195
1195
1196
- return cast_agg_result (result , values , how )
1196
+ return cast_agg_result (result , values , how )
1197
+ return result
1197
1198
1198
1199
# TypeError -> we may have an exception in trying to aggregate
1199
1200
# continue and exclude the block
@@ -1366,11 +1367,7 @@ def _wrap_applied_output_series(
1366
1367
1367
1368
# if we have date/time like in the original, then coerce dates
1368
1369
# as we are stacking can easily have object dtypes here
1369
- so = self ._selected_obj
1370
- if so .ndim == 2 and so .dtypes .apply (needs_i8_conversion ).any ():
1371
- result = result ._convert (datetime = True )
1372
- else :
1373
- result = result ._convert (datetime = True )
1370
+ result = result ._convert (datetime = True )
1374
1371
1375
1372
if not self .as_index :
1376
1373
self ._insert_inaxis_grouper_inplace (result )
@@ -1507,7 +1504,7 @@ def _choose_path(self, fast_path: Callable, slow_path: Callable, group: DataFram
1507
1504
try :
1508
1505
res_fast = fast_path (group )
1509
1506
except AssertionError :
1510
- raise
1507
+ raise # pragma: no cover
1511
1508
except Exception :
1512
1509
# GH#29631 For user-defined function, we can't predict what may be
1513
1510
# raised; see test_transform.test_transform_fastpath_raises
0 commit comments