Skip to content

Commit 1574b0e

Browse files
jbrockmendeljreback
authored andcommitted
REF: dispatch Series.quantile to DataFrame, remove ScalarBlock (#24606)
1 parent 6249355 commit 1574b0e

File tree

5 files changed

+24
-49
lines changed

5 files changed

+24
-49
lines changed

pandas/core/internals/__init__.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@
55
make_block, # io.pytables, io.packers
66
FloatBlock, IntBlock, ComplexBlock, BoolBlock, ObjectBlock,
77
TimeDeltaBlock, DatetimeBlock, DatetimeTZBlock,
8-
CategoricalBlock, ExtensionBlock, ScalarBlock,
9-
Block)
8+
CategoricalBlock, ExtensionBlock, Block)
109
from .managers import ( # noqa:F401
1110
BlockManager, SingleBlockManager,
1211
create_block_manager_from_arrays, create_block_manager_from_blocks,

pandas/core/internals/blocks.py

+6-35
Original file line numberDiff line numberDiff line change
@@ -222,12 +222,6 @@ def make_block(self, values, placement=None, ndim=None):
222222

223223
return make_block(values, placement=placement, ndim=ndim)
224224

225-
def make_block_scalar(self, values):
226-
"""
227-
Create a ScalarBlock
228-
"""
229-
return ScalarBlock(values)
230-
231225
def make_block_same_class(self, values, placement=None, ndim=None,
232226
dtype=None):
233227
""" Wrap given values in a block of same type as self. """
@@ -1468,13 +1462,15 @@ def quantile(self, qs, interpolation='linear', axis=0):
14681462
else:
14691463
# create the array of na_values
14701464
# 2d len(values) * len(qs)
1471-
result = np.repeat(np.array([self._na_value] * len(qs)),
1465+
result = np.repeat(np.array([self.fill_value] * len(qs)),
14721466
len(values)).reshape(len(values),
14731467
len(qs))
14741468
else:
1475-
mask = isna(self.values)
1469+
# asarray needed for Sparse, see GH#24600
1470+
# TODO: Why self.values and not values?
1471+
mask = np.asarray(isna(self.values))
14761472
result = nanpercentile(values, np.array(qs) * 100,
1477-
axis=axis, na_value=self._na_value,
1473+
axis=axis, na_value=self.fill_value,
14781474
mask=mask, ndim=self.ndim,
14791475
interpolation=interpolation)
14801476

@@ -1490,8 +1486,6 @@ def quantile(self, qs, interpolation='linear', axis=0):
14901486

14911487
ndim = getattr(result, 'ndim', None) or 0
14921488
result = self._try_coerce_result(result)
1493-
if lib.is_scalar(result):
1494-
return self.make_block_scalar(result)
14951489
return make_block(result,
14961490
placement=np.arange(len(result)),
14971491
ndim=ndim)
@@ -1534,29 +1528,6 @@ def _replace_coerce(self, to_replace, value, inplace=True, regex=False,
15341528
return self
15351529

15361530

1537-
class ScalarBlock(Block):
1538-
"""
1539-
a scalar compat Block
1540-
"""
1541-
__slots__ = ['_mgr_locs', 'values', 'ndim']
1542-
1543-
def __init__(self, values):
1544-
self.ndim = 0
1545-
self.mgr_locs = [0]
1546-
self.values = values
1547-
1548-
@property
1549-
def dtype(self):
1550-
return type(self.values)
1551-
1552-
@property
1553-
def shape(self):
1554-
return tuple([0])
1555-
1556-
def __len__(self):
1557-
return 0
1558-
1559-
15601531
class NonConsolidatableMixIn(object):
15611532
""" hold methods for the nonconsolidatable blocks """
15621533
_can_consolidate = False
@@ -2675,7 +2646,7 @@ def convert(self, *args, **kwargs):
26752646

26762647
if args:
26772648
raise NotImplementedError
2678-
by_item = True if 'by_item' not in kwargs else kwargs['by_item']
2649+
by_item = kwargs.get('by_item', True)
26792650

26802651
new_inputs = ['coerce', 'datetime', 'numeric', 'timedelta']
26812652
new_style = False

pandas/core/internals/managers.py

+6-9
Original file line numberDiff line numberDiff line change
@@ -425,6 +425,10 @@ def quantile(self, axis=0, consolidate=True, transposed=False,
425425
Block Manager (new object)
426426
"""
427427

428+
# Series dispatches to DataFrame for quantile, which allows us to
429+
# simplify some of the code here and in the blocks
430+
assert self.ndim >= 2
431+
428432
if consolidate:
429433
self._consolidate_inplace()
430434

@@ -449,6 +453,7 @@ def get_axe(block, qs, axes):
449453

450454
# note that some DatetimeTZ, Categorical are always ndim==1
451455
ndim = {b.ndim for b in blocks}
456+
assert 0 not in ndim, ndim
452457

453458
if 2 in ndim:
454459

@@ -474,15 +479,7 @@ def get_axe(block, qs, axes):
474479

475480
return self.__class__(blocks, new_axes)
476481

477-
# 0 ndim
478-
if 0 in ndim and 1 not in ndim:
479-
values = np.array([b.values for b in blocks])
480-
if len(values) == 1:
481-
return values.item()
482-
blocks = [make_block(values, ndim=1)]
483-
axes = Index([ax[0] for ax in axes])
484-
485-
# single block
482+
# single block, i.e. ndim == {1}
486483
values = _concat._concat_compat([b.values for b in blocks])
487484

488485
# compute the orderings of our original data

pandas/core/series.py

+10-2
Original file line numberDiff line numberDiff line change
@@ -1987,15 +1987,23 @@ def quantile(self, q=0.5, interpolation='linear'):
19871987

19881988
self._check_percentile(q)
19891989

1990-
result = self._data.quantile(qs=q, interpolation=interpolation)
1990+
# We dispatch to DataFrame so that core.internals only has to worry
1991+
# about 2D cases.
1992+
df = self.to_frame()
1993+
1994+
result = df.quantile(q=q, interpolation=interpolation,
1995+
numeric_only=False)
1996+
if result.ndim == 2:
1997+
result = result.iloc[:, 0]
19911998

19921999
if is_list_like(q):
2000+
result.name = self.name
19932001
return self._constructor(result,
19942002
index=Float64Index(q),
19952003
name=self.name)
19962004
else:
19972005
# scalar
1998-
return result
2006+
return result.iloc[0]
19992007

20002008
def corr(self, other, method='pearson', min_periods=None):
20012009
"""

pandas/tests/resample/test_base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -218,5 +218,5 @@ def test_resample_quantile_all_ts(series):
218218
q = 0.75
219219
freq = 'H'
220220
result = s.resample(freq).quantile(q)
221-
expected = s.resample(freq).agg(lambda x: x.quantile(q))
221+
expected = s.resample(freq).agg(lambda x: x.quantile(q)).rename(s.name)
222222
tm.assert_series_equal(result, expected)

0 commit comments

Comments
 (0)