Skip to content

Commit 080c71e

Browse files
author
Carlos Souza
committed
BUG: replace of numeric by string fixed
1 parent 8b463cb commit 080c71e

File tree

3 files changed

+18
-9
lines changed

3 files changed

+18
-9
lines changed

doc/source/whatsnew/v0.20.0.txt

+2
Original file line numberDiff line numberDiff line change
@@ -985,3 +985,5 @@ Bug Fixes
985985
- Bug in ``pd.melt()`` where passing a tuple value for ``value_vars`` caused a ``TypeError`` (:issue:`15348`)
986986
- Bug in ``.eval()`` which caused multiline evals to fail with local variables not on the first line (:issue:`15342`)
987987
- Bug in ``pd.read_msgpack`` which did not allow to load dataframe with an index of type ``CategoricalIndex`` (:issue:`15487`)
988+
989+
- Bug in ``Series.replace`` which replaced a numeric by string (:issue:`15743`)

pandas/core/missing.py

+8-7
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,16 @@ def mask_missing(arr, values_to_mask):
2121
Return a masking array of same size/shape as arr
2222
with entries equaling any member of values_to_mask set to True
2323
"""
24-
if not isinstance(values_to_mask, (list, np.ndarray)):
24+
if isinstance(values_to_mask, np.ndarray):
25+
mask_type = values_to_mask.dtype.type
26+
elif isinstance(values_to_mask, list):
27+
mask_type = type(values_to_mask[0])
28+
else:
29+
mask_type = type(values_to_mask)
2530
values_to_mask = [values_to_mask]
2631

2732
try:
28-
values_to_mask = np.array(values_to_mask, dtype=arr.dtype)
33+
values_to_mask = np.array(values_to_mask, dtype=mask_type)
2934
except Exception:
3035
values_to_mask = np.array(values_to_mask, dtype=object)
3136

@@ -409,7 +414,7 @@ def interpolate_2d(values, method='pad', axis=0, limit=None, fill_value=None,
409414
if axis != 0: # pragma: no cover
410415
raise AssertionError("cannot interpolate on a ndim == 1 with "
411416
"axis != 0")
412-
values = values.reshape(tuple((1, ) + values.shape))
417+
values = values.reshape(tuple((1,) + values.shape))
413418

414419
if fill_value is None:
415420
mask = None
@@ -447,7 +452,6 @@ def wrapper(arr, mask, limit=None):
447452

448453

449454
def pad_1d(values, limit=None, mask=None, dtype=None):
450-
451455
if dtype is None:
452456
dtype = values.dtype
453457
_method = None
@@ -472,7 +476,6 @@ def pad_1d(values, limit=None, mask=None, dtype=None):
472476

473477

474478
def backfill_1d(values, limit=None, mask=None, dtype=None):
475-
476479
if dtype is None:
477480
dtype = values.dtype
478481
_method = None
@@ -498,7 +501,6 @@ def backfill_1d(values, limit=None, mask=None, dtype=None):
498501

499502

500503
def pad_2d(values, limit=None, mask=None, dtype=None):
501-
502504
if dtype is None:
503505
dtype = values.dtype
504506
_method = None
@@ -528,7 +530,6 @@ def pad_2d(values, limit=None, mask=None, dtype=None):
528530

529531

530532
def backfill_2d(values, limit=None, mask=None, dtype=None):
531-
532533
if dtype is None:
533534
dtype = values.dtype
534535
_method = None

pandas/tests/series/test_replace.py

+8-2
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010

1111

1212
class TestSeriesReplace(TestData, tm.TestCase):
13-
1413
def test_replace(self):
1514
N = 100
1615
ser = pd.Series(np.random.randn(N))
@@ -101,7 +100,7 @@ def test_replace_gh5319(self):
101100
expected = ser.copy()
102101
expected.loc[2] = pd.Timestamp('20120101')
103102
result = ser.replace({pd.Timestamp('20130103'):
104-
pd.Timestamp('20120101')})
103+
pd.Timestamp('20120101')})
105104
tm.assert_series_equal(result, expected)
106105
result = ser.replace(pd.Timestamp('20130103'),
107106
pd.Timestamp('20120101'))
@@ -227,3 +226,10 @@ def test_replace_with_empty_dictlike(self):
227226
s = pd.Series(list('abcd'))
228227
tm.assert_series_equal(s, s.replace(dict()))
229228
tm.assert_series_equal(s, s.replace(pd.Series([])))
229+
230+
def test_replace_string_with_nan(self):
231+
# GH 15743
232+
s = pd.Series([1, 2, 3])
233+
result = s.replace('2', np.nan)
234+
expected = pd.Series([1, 2, 3])
235+
tm.assert_series_equal(expected, result)

0 commit comments

Comments
 (0)