Skip to content

BUG: bug in cache updating when consolidating #10264 #10272

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 5, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions doc/source/whatsnew/v0.16.2.txt
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,9 @@ Bug Fixes
- Bug in ``Categorical`` repr with ``display.width`` of ``None`` in Python 3 (:issue:`10087`)

- Bug in groupby.apply aggregation for Categorical not preserving categories (:issue:`10138`)

- Bug in cache updating when consolidating (:issue:`10264`)

- Bug in ``mean()`` where integer dtypes can overflow (:issue:`10172`)
- Bug where Panel.from_dict does not set dtype when specified (:issue:`10058`)
- Bug in ``Index.union`` raises ``AttributeError`` when passing array-likes. (:issue:`10149`)
Expand Down
7 changes: 5 additions & 2 deletions pandas/core/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1352,6 +1352,7 @@ def take(self, indices, axis=0, convert=True, is_copy=True):
taken : type of caller
"""

self._consolidate_inplace()
new_data = self._data.take(indices,
axis=self._get_block_manager_axis(axis),
convert=True, verify=True)
Expand Down Expand Up @@ -2128,8 +2129,10 @@ def _protect_consolidate(self, f):
return result

def _consolidate_inplace(self):
f = lambda: self._data.consolidate()
self._data = self._protect_consolidate(f)
""" we are inplace consolidating; return None """
def f():
self._data = self._data.consolidate()
self._protect_consolidate(f)

def consolidate(self, inplace=False):
"""
Expand Down
3 changes: 3 additions & 0 deletions pandas/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,6 +369,7 @@ def _setitem_with_indexer(self, indexer, value):
# we can directly set the series here
# as we select a slice indexer on the mi
idx = index._convert_slice_indexer(idx)
obj._consolidate_inplace()
obj = obj.copy()
obj._data = obj._data.setitem(indexer=tuple([idx]), value=value)
self.obj[item] = obj
Expand Down Expand Up @@ -396,6 +397,7 @@ def setter(item, v):
s = v
else:
# set the item, possibly having a dtype change
s._consolidate_inplace()
s = s.copy()
s._data = s._data.setitem(indexer=pi, value=v)
s._maybe_update_cacher(clear=True)
Expand Down Expand Up @@ -492,6 +494,7 @@ def can_do_equal_len():
self.obj._check_is_chained_assignment_possible()

# actually do the set
self.obj._consolidate_inplace()
self.obj._data = self.obj._data.setitem(indexer=indexer, value=value)
self.obj._maybe_update_cacher(clear=True)

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/internals.py
Original file line number Diff line number Diff line change
Expand Up @@ -2830,13 +2830,13 @@ def consolidate(self):
return self

bm = self.__class__(self.blocks, self.axes)
bm._is_consolidated = False
bm._consolidate_inplace()
return bm

def _consolidate_inplace(self):
if not self.is_consolidated():
self.blocks = tuple(_consolidate(self.blocks))

self._is_consolidated = True
self._known_consolidated = True
self._rebuild_blknos_and_blklocs()
Expand Down
12 changes: 12 additions & 0 deletions pandas/tests/test_indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -3527,6 +3527,18 @@ def test_cache_updating(self):
result = df.loc[(0,0),'z']
self.assertEqual(result, 2)

# 10264
df = DataFrame(np.zeros((5,5),dtype='int64'),columns=['a','b','c','d','e'],index=range(5))
df['f'] = 0
df.f.values[3] = 1
y = df.iloc[np.arange(2,len(df))]
df.f.values[3] = 2
expected = DataFrame(np.zeros((5,6),dtype='int64'),columns=['a','b','c','d','e','f'],index=range(5))
expected.at[3,'f'] = 2
assert_frame_equal(df, expected)
expected = Series([0,0,0,2,0],name='f')
assert_series_equal(df.f, expected)

def test_slice_consolidate_invalidate_item_cache(self):

# this is chained assignment, but will 'work'
Expand Down