Skip to content

REF: rename _data->_mgr #33054

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Apr 6, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions pandas/_libs/properties.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@ cdef class AxisProperty:
list axes

if obj is None:
# Only instances have _data, not classes
# Only instances have _mgr, not classes
return self
else:
axes = obj._data.axes
axes = obj._mgr.axes
return axes[self.axis]

def __set__(self, obj, value):
Expand Down
8 changes: 4 additions & 4 deletions pandas/_libs/reduction.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ cdef class Reducer:
name = labels[i]

object.__setattr__(
cached_typ._data._block, 'values', chunk)
cached_typ._mgr._block, 'values', chunk)
object.__setattr__(cached_typ, 'name', name)
res = self.f(cached_typ)
else:
Expand Down Expand Up @@ -180,8 +180,8 @@ cdef class _BaseGrouper:
# to a 1-d ndarray like datetime / timedelta / period.
object.__setattr__(cached_ityp, '_index_data', islider.buf)
cached_ityp._engine.clear_mapping()
object.__setattr__(cached_typ._data._block, 'values', vslider.buf)
object.__setattr__(cached_typ._data._block, 'mgr_locs',
object.__setattr__(cached_typ._mgr._block, 'values', vslider.buf)
object.__setattr__(cached_typ._mgr._block, 'mgr_locs',
slice(len(vslider.buf)))
object.__setattr__(cached_typ, '_index', cached_ityp)
object.__setattr__(cached_typ, 'name', self.name)
Expand Down Expand Up @@ -551,7 +551,7 @@ cdef class BlockSlider:
self.dummy = frame[:0]
self.index = self.dummy.index

self.blocks = [b.values for b in self.dummy._data.blocks]
self.blocks = [b.values for b in self.dummy._mgr.blocks]

for x in self.blocks:
util.set_array_not_contiguous(x)
Expand Down
4 changes: 2 additions & 2 deletions pandas/_libs/src/ujson/python/objToJSON.c
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ static PyObject *get_sub_attr(PyObject *obj, char *attr, char *subAttr) {
}

static int is_simple_frame(PyObject *obj) {
PyObject *check = get_sub_attr(obj, "_data", "is_mixed_type");
PyObject *check = get_sub_attr(obj, "_mgr", "is_mixed_type");
int ret = (check == Py_False);

if (!check) {
Expand Down Expand Up @@ -760,7 +760,7 @@ void PdBlock_iterBegin(JSOBJ _obj, JSONTypeContext *tc) {
goto BLKRET;
}

blocks = get_sub_attr(obj, "_data", "blocks");
blocks = get_sub_attr(obj, "_mgr", "blocks");
if (!blocks) {
GET_TC(tc)->iterNext = NpyArr_iterNextNone;
goto BLKRET;
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -1645,7 +1645,7 @@ def take_nd(
if arr.flags.f_contiguous and axis == arr.ndim - 1:
# minor tweak that can make an order-of-magnitude difference
# for dataframes initialized directly from 2-d ndarrays
# (s.t. df.values is c-contiguous and df._data.blocks[0] is its
# (s.t. df.values is c-contiguous and df._mgr.blocks[0] is its
# f-contiguous transpose)
out = np.empty(out_shape, dtype=dtype, order="F")
else:
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def get_result(self):
# ufunc
elif isinstance(self.f, np.ufunc):
with np.errstate(all="ignore"):
results = self.obj._data.apply("apply", func=self.f)
results = self.obj._mgr.apply("apply", func=self.f)
return self.obj._constructor(
data=results, index=self.index, columns=self.columns, copy=False
)
Expand Down
32 changes: 16 additions & 16 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ def __init__(
dtype = self._validate_dtype(dtype)

if isinstance(data, DataFrame):
data = data._data
data = data._mgr

if isinstance(data, BlockManager):
mgr = self._init_mgr(
Expand Down Expand Up @@ -590,10 +590,10 @@ def _is_homogeneous_type(self) -> bool:
... "B": np.array([1, 2], dtype=np.int64)})._is_homogeneous_type
False
"""
if self._data.any_extension_types:
return len({block.dtype for block in self._data.blocks}) == 1
if self._mgr.any_extension_types:
return len({block.dtype for block in self._mgr.blocks}) == 1
else:
return not self._data.is_mixed_type
return not self._mgr.is_mixed_type

# ----------------------------------------------------------------------
# Rendering Methods
Expand Down Expand Up @@ -2537,7 +2537,7 @@ def _ixs(self, i: int, axis: int = 0):
"""
# irow
if axis == 0:
new_values = self._data.fast_xs(i)
new_values = self._mgr.fast_xs(i)

# if we are a copy, mark as such
copy = isinstance(new_values, np.ndarray) and new_values.base is None
Expand All @@ -2554,7 +2554,7 @@ def _ixs(self, i: int, axis: int = 0):
else:
label = self.columns[i]

values = self._data.iget(i)
values = self._mgr.iget(i)
result = self._box_col_values(values, label)

# this is a cached value, mark it so
Expand Down Expand Up @@ -2865,7 +2865,7 @@ def _ensure_valid_index(self, value):
"and a value that cannot be converted to a Series"
) from err

self._data = self._data.reindex_axis(
self._mgr = self._mgr.reindex_axis(
value.index.copy(), axis=1, fill_value=np.nan
)

Expand Down Expand Up @@ -3313,7 +3313,7 @@ def insert(self, loc, column, value, allow_duplicates=False) -> None:
"""
self._ensure_valid_index(value)
value = self._sanitize_column(column, value, broadcast=False)
self._data.insert(loc, column, value, allow_duplicates=allow_duplicates)
self._mgr.insert(loc, column, value, allow_duplicates=allow_duplicates)

def assign(self, **kwargs) -> "DataFrame":
r"""
Expand Down Expand Up @@ -3494,7 +3494,7 @@ def reindexer(value):
@property
def _series(self):
return {
item: Series(self._data.iget(idx), index=self.index, name=item)
item: Series(self._mgr.iget(idx), index=self.index, name=item)
for idx, item in enumerate(self.columns)
}

Expand Down Expand Up @@ -4403,7 +4403,7 @@ def _maybe_casted_values(index, labels=None):
values_dtype = values.dtype

if issubclass(values_type, DatetimeLikeArray):
values = values._data
values = values._data # TODO: can we de-kludge yet?

if mask.any():
values, _ = maybe_upcast_putmask(values, mask, np.nan)
Expand Down Expand Up @@ -4787,7 +4787,7 @@ def sort_values(
k, kind=kind, ascending=ascending, na_position=na_position
)

new_data = self._data.take(
new_data = self._mgr.take(
indexer, axis=self._get_block_manager_axis(axis), verify=False
)

Expand Down Expand Up @@ -4922,7 +4922,7 @@ def sort_index(
)

baxis = self._get_block_manager_axis(axis)
new_data = self._data.take(indexer, axis=baxis, verify=False)
new_data = self._mgr.take(indexer, axis=baxis, verify=False)

# reconstruct axis if needed
new_data.axes[baxis] = new_data.axes[baxis]._sort_levels_monotonic()
Expand Down Expand Up @@ -6661,7 +6661,7 @@ def diff(self, periods: int = 1, axis: Axis = 0) -> "DataFrame":
5 NaN NaN NaN
"""
bm_axis = self._get_block_manager_axis(axis)
new_data = self._data.diff(n=periods, axis=bm_axis)
new_data = self._mgr.diff(n=periods, axis=bm_axis)
return self._constructor(new_data)

# ----------------------------------------------------------------------
Expand Down Expand Up @@ -7855,7 +7855,7 @@ def count(self, axis=0, level=None, numeric_only=False):
if len(frame._get_axis(axis)) == 0:
result = Series(0, index=frame._get_agg_axis(axis))
else:
if frame._is_mixed_type or frame._data.any_extension_types:
if frame._is_mixed_type or frame._mgr.any_extension_types:
# the or any_extension_types is really only hit for single-
# column frames with an extension array
result = notna(frame).sum(axis=axis)
Expand Down Expand Up @@ -7979,7 +7979,7 @@ def blk_func(values):

# After possibly _get_data and transposing, we are now in the
# simple case where we can use BlockManager._reduce
res = df._data.reduce(blk_func)
res = df._mgr.reduce(blk_func)
assert isinstance(res, dict)
if len(res):
assert len(res) == max(list(res.keys())) + 1, res.keys()
Expand Down Expand Up @@ -8421,7 +8421,7 @@ def quantile(self, q=0.5, axis=0, numeric_only=True, interpolation="linear"):
return self._constructor([], index=q, columns=cols)
return self._constructor_sliced([], index=cols, name=q, dtype=np.float64)

result = data._data.quantile(
result = data._mgr.quantile(
qs=q, axis=1, interpolation=interpolation, transposed=is_transposed
)

Expand Down
Loading