Skip to content

Commit 5bb5e33

Browse files
ZhuBaohejreback
authored andcommitted
DOC: correct merge_asof example (#19737)
1 parent c086a51 commit 5bb5e33

File tree

2 files changed

+26
-7
lines changed

2 files changed

+26
-7
lines changed

pandas/core/reshape/merge.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -457,8 +457,8 @@ def merge_asof(left, right, on=None,
457457
time ticker price quantity bid ask
458458
0 2016-05-25 13:30:00.023 MSFT 51.95 75 NaN NaN
459459
1 2016-05-25 13:30:00.038 MSFT 51.95 155 51.97 51.98
460-
2 2016-05-25 13:30:00.048 GOOG 720.77 100 720.50 720.93
461-
3 2016-05-25 13:30:00.048 GOOG 720.92 100 720.50 720.93
460+
2 2016-05-25 13:30:00.048 GOOG 720.77 100 NaN NaN
461+
3 2016-05-25 13:30:00.048 GOOG 720.92 100 NaN NaN
462462
4 2016-05-25 13:30:00.048 AAPL 98.00 100 NaN NaN
463463
464464
See also

pandas/tests/reshape/merge/test_merge_asof.py

+24-5
Original file line numberDiff line numberDiff line change
@@ -92,11 +92,30 @@ def test_examples2(self):
9292
by='ticker',
9393
tolerance=pd.Timedelta('2ms'))
9494

95-
pd.merge_asof(trades, quotes,
96-
on='time',
97-
by='ticker',
98-
tolerance=pd.Timedelta('10ms'),
99-
allow_exact_matches=False)
95+
expected = pd.DataFrame({
96+
'time': pd.to_datetime(['20160525 13:30:00.023',
97+
'20160525 13:30:00.038',
98+
'20160525 13:30:00.048',
99+
'20160525 13:30:00.048',
100+
'20160525 13:30:00.048']),
101+
'ticker': ['MSFT', 'MSFT', 'GOOG', 'GOOG', 'AAPL'],
102+
'price': [51.95, 51.95,
103+
720.77, 720.92, 98.00],
104+
'quantity': [75, 155,
105+
100, 100, 100],
106+
'bid': [np.nan, 51.97, np.nan,
107+
np.nan, np.nan],
108+
'ask': [np.nan, 51.98, np.nan,
109+
np.nan, np.nan]},
110+
columns=['time', 'ticker', 'price', 'quantity',
111+
'bid', 'ask'])
112+
113+
result = pd.merge_asof(trades, quotes,
114+
on='time',
115+
by='ticker',
116+
tolerance=pd.Timedelta('10ms'),
117+
allow_exact_matches=False)
118+
assert_frame_equal(result, expected)
100119

101120
def test_examples3(self):
102121
""" doc-string examples """

0 commit comments

Comments
 (0)