Skip to content

REF: rename _data-->_mgr #27254

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions pandas/_libs/properties.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,10 @@ cdef class AxisProperty:
list axes

if obj is None:
# Only instances have _data, not classes
# Only instances have _mgr, not classes
return self
else:
axes = obj._data.axes
axes = obj._mgr.axes
return axes[self.axis]

def __set__(self, obj, value):
Expand Down
8 changes: 4 additions & 4 deletions pandas/_libs/reduction.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ cdef class Reducer:
object.__setattr__(cached_typ, 'index', self.index)

object.__setattr__(
cached_typ._data._block, 'values', chunk)
cached_typ._mgr._block, 'values', chunk)
object.__setattr__(cached_typ, 'name', name)
res = self.f(cached_typ)
else:
Expand Down Expand Up @@ -279,7 +279,7 @@ cdef class SeriesBinGrouper:
object.__setattr__(cached_ityp, '_index_data', islider.buf)
cached_ityp._engine.clear_mapping()
object.__setattr__(
cached_typ._data._block, 'values', vslider.buf)
cached_typ._mgr._block, 'values', vslider.buf)
object.__setattr__(cached_typ, '_index', cached_ityp)
object.__setattr__(cached_typ, 'name', name)

Expand Down Expand Up @@ -405,7 +405,7 @@ cdef class SeriesGrouper:
object.__setattr__(cached_ityp, '_data', islider.buf)
cached_ityp._engine.clear_mapping()
object.__setattr__(
cached_typ._data._block, 'values', vslider.buf)
cached_typ._mgr._block, 'values', vslider.buf)
object.__setattr__(cached_typ, '_index', cached_ityp)
object.__setattr__(cached_typ, 'name', name)

Expand Down Expand Up @@ -577,7 +577,7 @@ cdef class BlockSlider:
self.dummy = frame[:0]
self.index = self.dummy.index

self.blocks = [b.values for b in self.dummy._data.blocks]
self.blocks = [b.values for b in self.dummy._mgr.blocks]

for x in self.blocks:
util.set_array_not_contiguous(x)
Expand Down
4 changes: 2 additions & 2 deletions pandas/_libs/src/ujson/python/objToJSON.c
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@ static PyObject *get_sub_attr(PyObject *obj, char *attr, char *subAttr) {
}

static int is_simple_frame(PyObject *obj) {
PyObject *check = get_sub_attr(obj, "_data", "is_mixed_type");
PyObject *check = get_sub_attr(obj, "_mgr", "is_mixed_type");
int ret = (check == Py_False);

if (!check) {
Expand Down Expand Up @@ -984,7 +984,7 @@ void PdBlock_iterBegin(JSOBJ _obj, JSONTypeContext *tc) {
goto BLKRET;
}

blocks = get_sub_attr(obj, "_data", "blocks");
blocks = get_sub_attr(obj, "_mgr", "blocks");
if (!blocks) {
GET_TC(tc)->iterNext = NpyArr_iterNextNone;
goto BLKRET;
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -1710,7 +1710,7 @@ def take_nd(
if arr.flags.f_contiguous and axis == arr.ndim - 1:
# minor tweak that can make an order-of-magnitude difference
# for dataframes initialized directly from 2-d ndarrays
# (s.t. df.values is c-contiguous and df._data.blocks[0] is its
# (s.t. df.values is c-contiguous and df._mgr.blocks[0] is its
# f-contiguous transpose)
out = np.empty(out_shape, dtype=dtype, order="F")
else:
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def get_result(self):
# ufunc
elif isinstance(self.f, np.ufunc):
with np.errstate(all="ignore"):
results = self.obj._data.apply("apply", func=self.f)
results = self.obj._mgr.apply("apply", func=self.f)
return self.obj._constructor(
data=results, index=self.index, columns=self.columns, copy=False
)
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/dtypes/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def _check(cls, inst):

class _ABCGeneric(type):
def __instancecheck__(cls, inst):
return hasattr(inst, "_data")
return hasattr(inst, "_mgr")


ABCGeneric = _ABCGeneric("ABCGeneric", tuple(), {})
4 changes: 2 additions & 2 deletions pandas/core/dtypes/missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def _isna_new(obj):
):
return _isna_ndarraylike(obj)
elif isinstance(obj, ABCGeneric):
return obj._constructor(obj._data.isna(func=isna))
return obj._constructor(obj._mgr.isna(func=isna))
elif isinstance(obj, list):
return _isna_ndarraylike(np.asarray(obj, dtype=object))
elif hasattr(obj, "__array__"):
Expand Down Expand Up @@ -172,7 +172,7 @@ def _isna_old(obj):
elif isinstance(obj, (ABCSeries, np.ndarray, ABCIndexClass)):
return _isna_ndarraylike_old(obj)
elif isinstance(obj, ABCGeneric):
return obj._constructor(obj._data.isna(func=_isna_old))
return obj._constructor(obj._mgr.isna(func=_isna_old))
elif isinstance(obj, list):
return _isna_ndarraylike_old(np.asarray(obj, dtype=object))
elif hasattr(obj, "__array__"):
Expand Down
40 changes: 20 additions & 20 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ def __init__(self, data=None, index=None, columns=None, dtype=None, copy=False):
dtype = self._validate_dtype(dtype)

if isinstance(data, DataFrame):
data = data._data
data = data._mgr

if isinstance(data, BlockManager):
mgr = self._init_mgr(
Expand Down Expand Up @@ -545,10 +545,10 @@ def _is_homogeneous_type(self):
... "B": np.array([1, 2], dtype=np.int64)})._is_homogeneous_type
False
"""
if self._data.any_extension_types:
return len({block.dtype for block in self._data.blocks}) == 1
if self._mgr.any_extension_types:
return len({block.dtype for block in self._mgr.blocks}) == 1
else:
return not self._data.is_mixed_type
return not self._mgr.is_mixed_type

# ----------------------------------------------------------------------
# Rendering Methods
Expand Down Expand Up @@ -2521,7 +2521,7 @@ def _sizeof_fmt(num, size_qualifier):
else:
_verbose_repr()

counts = self._data.get_dtype_counts()
counts = self._mgr.get_dtype_counts()
dtypes = ["{k}({kk:d})".format(k=k[0], kk=k[1]) for k in sorted(counts.items())]
lines.append("dtypes: {types}".format(types=", ".join(dtypes)))

Expand Down Expand Up @@ -2755,7 +2755,7 @@ def _unpickle_frame_compat(self, state): # pragma: no cover
columns = com._unpickle_array(cols)

index = com._unpickle_array(idx)
self._data = self._init_dict(series, index, columns, None)
self._mgr = self._init_dict(series, index, columns, None)

def _unpickle_matrix_compat(self, state): # pragma: no cover
# old unpickling
Expand All @@ -2772,7 +2772,7 @@ def _unpickle_matrix_compat(self, state): # pragma: no cover

dm = dm.join(objects)

self._data = dm._data
self._mgr = dm._mgr

# ----------------------------------------------------------------------
# Getting and setting elements
Expand Down Expand Up @@ -2905,7 +2905,7 @@ def _ixs(self, i, axis=0):
result = self.take(i, axis=axis)
copy = True
else:
new_values = self._data.fast_xs(i)
new_values = self._mgr.fast_xs(i)
if is_scalar(new_values):
return new_values

Expand Down Expand Up @@ -2939,7 +2939,7 @@ def _ixs(self, i, axis=0):
# as the index (iow a not found value), iget returns
# a 0-len ndarray. This is effectively catching
# a numpy error (as numpy should really raise)
values = self._data.iget(i)
values = self._mgr.iget(i)

if index_len and not len(values):
values = np.array([np.nan] * index_len, dtype=object)
Expand Down Expand Up @@ -3538,7 +3538,7 @@ def _ensure_valid_index(self, value):
"Series"
)

self._data = self._data.reindex_axis(
self._mgr = self._mgr.reindex_axis(
value.index.copy(), axis=1, fill_value=np.nan
)

Expand Down Expand Up @@ -3581,7 +3581,7 @@ def insert(self, loc, column, value, allow_duplicates=False):
"""
self._ensure_valid_index(value)
value = self._sanitize_column(column, value, broadcast=False)
self._data.insert(loc, column, value, allow_duplicates=allow_duplicates)
self._mgr.insert(loc, column, value, allow_duplicates=allow_duplicates)

def assign(self, **kwargs):
r"""
Expand Down Expand Up @@ -3780,7 +3780,7 @@ def reindexer(value):
@property
def _series(self):
return {
item: Series(self._data.iget(idx), index=self.index, name=item)
item: Series(self._mgr.iget(idx), index=self.index, name=item)
for idx, item in enumerate(self.columns)
}

Expand Down Expand Up @@ -4903,7 +4903,7 @@ def drop_duplicates(self, subset=None, keep="first", inplace=False):

if inplace:
inds, = (-duplicated)._ndarray_values.nonzero()
new_data = self._data.take(inds)
new_data = self._mgr.take(inds)
self._update_inplace(new_data)
else:
return self[-duplicated]
Expand Down Expand Up @@ -5007,7 +5007,7 @@ def sort_values(
k, kind=kind, ascending=ascending, na_position=na_position
)

new_data = self._data.take(
new_data = self._mgr.take(
indexer, axis=self._get_block_manager_axis(axis), verify=False
)

Expand Down Expand Up @@ -5084,7 +5084,7 @@ def sort_index(
)

baxis = self._get_block_manager_axis(axis)
new_data = self._data.take(indexer, axis=baxis, verify=False)
new_data = self._mgr.take(indexer, axis=baxis, verify=False)

# reconstruct axis if needed
new_data.axes[baxis] = new_data.axes[baxis]._sort_levels_monotonic()
Expand Down Expand Up @@ -6527,7 +6527,7 @@ def diff(self, periods=1, axis=0):
5 NaN NaN NaN
"""
bm_axis = self._get_block_manager_axis(axis)
new_data = self._data.diff(n=periods, axis=bm_axis)
new_data = self._mgr.diff(n=periods, axis=bm_axis)
return self._constructor(new_data)

# ----------------------------------------------------------------------
Expand Down Expand Up @@ -7754,7 +7754,7 @@ def count(self, axis=0, level=None, numeric_only=False):
if len(frame._get_axis(axis)) == 0:
result = Series(0, index=frame._get_agg_axis(axis))
else:
if frame._is_mixed_type or frame._data.any_extension_types:
if frame._is_mixed_type or frame._mgr.any_extension_types:
# the or any_extension_types is really only hit for single-
# column frames with an extension array
result = notna(frame).sum(axis=axis)
Expand Down Expand Up @@ -8209,7 +8209,7 @@ def quantile(self, q=0.5, axis=0, numeric_only=True, interpolation="linear"):
if is_transposed:
data = data.T

result = data._data.quantile(
result = data._mgr.quantile(
qs=q, axis=1, interpolation=interpolation, transposed=is_transposed
)

Expand Down Expand Up @@ -8243,7 +8243,7 @@ def to_timestamp(self, freq=None, how="start", axis=0, copy=True):
-------
DataFrame with DatetimeIndex
"""
new_data = self._data
new_data = self._mgr
if copy:
new_data = new_data.copy()

Expand Down Expand Up @@ -8275,7 +8275,7 @@ def to_period(self, freq=None, axis=0, copy=True):
-------
TimeSeries with PeriodIndex
"""
new_data = self._data
new_data = self._mgr
if copy:
new_data = new_data.copy()

Expand Down
Loading