Skip to content

Commit ed8b22f

Browse files
committed
BUG: 2D ndarray of dtype 'object' is always copied upon construction (pandas-dev#39263)
1 parent edbd450 commit ed8b22f

File tree

4 files changed

+51
-30
lines changed

4 files changed

+51
-30
lines changed

doc/source/whatsnew/v1.3.0.rst

+1
Original file line numberDiff line numberDiff line change
@@ -231,6 +231,7 @@ Datetimelike
231231
- Bug in :meth:`DatetimeIndex.intersection`, :meth:`DatetimeIndex.symmetric_difference`, :meth:`PeriodIndex.intersection`, :meth:`PeriodIndex.symmetric_difference` always returning object-dtype when operating with :class:`CategoricalIndex` (:issue:`38741`)
232232
- Bug in :meth:`Series.where` incorrectly casting ``datetime64`` values to ``int64`` (:issue:`37682`)
233233
- Bug in :class:`Categorical` incorrectly typecasting ``datetime`` object to ``Timestamp`` (:issue:`38878`)
234+
- Bug in :func:`DataFrame` constructor unnecessarily copying 2D object arrays (:issue:`39263`)
234235

235236
Timedelta
236237
^^^^^^^^^

pandas/core/internals/construction.py

+2-30
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737
is_integer_dtype,
3838
is_list_like,
3939
is_named_tuple,
40-
is_object_dtype,
4140
)
4241
from pandas.core.dtypes.generic import (
4342
ABCDataFrame,
@@ -59,7 +58,7 @@
5958
)
6059
from pandas.core.internals.managers import (
6160
create_block_manager_from_arrays,
62-
create_block_manager_from_blocks,
61+
create_block_manager_from_array,
6362
)
6463

6564
if TYPE_CHECKING:
@@ -232,34 +231,7 @@ def init_ndarray(values, index, columns, dtype: Optional[DtypeObj], copy: bool):
232231
)
233232
values = values.T
234233

235-
# if we don't have a dtype specified, then try to convert objects
236-
# on the entire block; this is to convert if we have datetimelike's
237-
# embedded in an object type
238-
if dtype is None and is_object_dtype(values.dtype):
239-
240-
if values.ndim == 2 and values.shape[0] != 1:
241-
# transpose and separate blocks
242-
243-
dvals_list = [maybe_infer_to_datetimelike(row) for row in values]
244-
for n in range(len(dvals_list)):
245-
if isinstance(dvals_list[n], np.ndarray):
246-
dvals_list[n] = dvals_list[n].reshape(1, -1)
247-
248-
from pandas.core.internals.blocks import make_block
249-
250-
# TODO: What about re-joining object columns?
251-
block_values = [
252-
make_block(dvals_list[n], placement=[n], ndim=2)
253-
for n in range(len(dvals_list))
254-
]
255-
256-
else:
257-
datelike_vals = maybe_infer_to_datetimelike(values)
258-
block_values = [datelike_vals]
259-
else:
260-
block_values = [values]
261-
262-
return create_block_manager_from_blocks(block_values, [columns, index])
234+
return create_block_manager_from_array(values, [columns, index], dtype)
263235

264236

265237
def init_dict(data: Dict, index, columns, dtype: Optional[DtypeObj] = None):

pandas/core/internals/managers.py

+40
Original file line numberDiff line numberDiff line change
@@ -28,12 +28,14 @@
2828
find_common_type,
2929
infer_dtype_from_scalar,
3030
maybe_promote,
31+
maybe_infer_to_datetimelike,
3132
)
3233
from pandas.core.dtypes.common import (
3334
DT64NS_DTYPE,
3435
is_dtype_equal,
3536
is_extension_array_dtype,
3637
is_list_like,
38+
is_object_dtype,
3739
)
3840
from pandas.core.dtypes.concat import concat_compat
3941
from pandas.core.dtypes.dtypes import ExtensionDtype
@@ -1679,6 +1681,37 @@ def create_block_manager_from_arrays(
16791681
raise construction_error(len(arrays), arrays[0].shape, axes, e)
16801682

16811683

1684+
def create_block_manager_from_array(
1685+
array, axes: List[Index], dtype: Optional[Dtype] = None
1686+
) -> BlockManager:
1687+
assert isinstance(axes, list)
1688+
assert all(isinstance(x, Index) for x in axes)
1689+
1690+
# ensure we dont have any PandasArrays when we call get_block_type
1691+
# Note: just calling extract_array breaks tests that patch PandasArray._typ.
1692+
array = array if not isinstance(array, ABCPandasArray) else array.to_numpy()
1693+
1694+
try:
1695+
if dtype is None and is_object_dtype(array.dtype):
1696+
maybe_datetime = [
1697+
maybe_infer_to_datetimelike(instance) for instance in array
1698+
]
1699+
if not all(
1700+
is_dtype_equal(instance.dtype, array.dtype)
1701+
for instance in maybe_datetime
1702+
):
1703+
blocks = _form_blocks(maybe_datetime, axes[0], axes)
1704+
else:
1705+
blocks = [make_block(array, slice(0, len(axes[0])))]
1706+
else:
1707+
blocks = [make_block(array, slice(0, len(axes[0])), dtype=dtype)]
1708+
mgr = BlockManager(blocks, axes)
1709+
mgr._consolidate_inplace()
1710+
return mgr
1711+
except ValueError as e:
1712+
raise construction_error(array.shape[0], array.shape[1:], axes, e)
1713+
1714+
16821715
def construction_error(tot_items, block_shape, axes, e=None):
16831716
""" raise a helpful message about our construction """
16841717
passed = tuple(map(int, [tot_items] + list(block_shape)))
@@ -1706,6 +1739,13 @@ def construction_error(tot_items, block_shape, axes, e=None):
17061739
def _form_blocks(arrays, names: Index, axes) -> List[Block]:
17071740
# put "leftover" items in float bucket, where else?
17081741
# generalize?
1742+
1743+
if len(arrays) != len(names):
1744+
raise ValueError(
1745+
f"Number of arrays ({len(arrays)}) "
1746+
f"does not match index length ({len(names)})"
1747+
)
1748+
17091749
items_dict: DefaultDict[str, List] = defaultdict(list)
17101750
extra_locs = []
17111751

pandas/tests/frame/test_constructors.py

+8
Original file line numberDiff line numberDiff line change
@@ -2267,6 +2267,14 @@ def test_nested_dict_construction(self):
22672267
)
22682268
tm.assert_frame_equal(result, expected)
22692269

2270+
def test_object_array_does_not_copy(self):
2271+
a = np.array(["a", "b"], dtype="object")
2272+
b = np.array([["a", "b"], ["c", "d"]], dtype="object")
2273+
df = DataFrame(a)
2274+
assert np.shares_memory(df.values, a)
2275+
df2 = DataFrame(b)
2276+
assert np.shares_memory(df2.values, b)
2277+
22702278
def test_from_tzaware_object_array(self):
22712279
# GH#26825 2D object array of tzaware timestamps should not raise
22722280
dti = date_range("2016-04-05 04:30", periods=3, tz="UTC")

0 commit comments

Comments
 (0)