Skip to content

Commit b97d6c8

Browse files
committed
ENH pandas-dev#3715: add documentation and tests
1 parent 8f0c5e6 commit b97d6c8

File tree

4 files changed

+66
-1
lines changed

4 files changed

+66
-1
lines changed

doc/source/whatsnew/v0.20.0.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ Other enhancements
4949
^^^^^^^^^^^^^^^^^^
5050

5151
- ``pd.read_excel`` now preserves sheet order when using ``sheetname=None`` (:issue:`9930`)
52-
- ``DataFrame.asfreq()`` now accepts a fill_value option to fill missing values during resampling (:issue:`3715`).
52+
- ``DataFrame.asfreq()`` now accepts a ``fill_value`` option to fill missing values during resampling (:issue:`3715`).
5353

5454
.. _whatsnew_0200.api_breaking:
5555

pandas/core/generic.py

+39
Original file line numberDiff line numberDiff line change
@@ -4020,6 +4020,45 @@ def asfreq(self, freq, method=None, how=None, normalize=False,
40204020
-------
40214021
converted : type of caller
40224022
4023+
Examples
4024+
--------
4025+
4026+
Start by creating a series with 4 one minute timestamps.
4027+
4028+
>>> index = pd.date_range('1/1/2000', periods=4, freq='T')
4029+
>>> series = pd.Series([0.0, None, 2.0, 3.0], index=index)
4030+
>>> df = pd.DataFrame({'s':series})
4031+
>>> df
4032+
s
4033+
2000-01-01 00:00:00 0.0
4034+
2000-01-01 00:01:00 NaN
4035+
2000-01-01 00:02:00 2.0
4036+
2000-01-01 00:03:00 3.0
4037+
4038+
Upsample the series into 30 second bins.
4039+
4040+
>>> df.asfreq(freq='30S')
4041+
s
4042+
2000-01-01 00:00:00 0.0
4043+
2000-01-01 00:00:30 NaN
4044+
2000-01-01 00:01:00 NaN
4045+
2000-01-01 00:01:30 NaN
4046+
2000-01-01 00:02:00 2.0
4047+
2000-01-01 00:02:30 NaN
4048+
2000-01-01 00:03:00 3.0
4049+
4050+
Upsample again, providing a ``fill value``.
4051+
4052+
>>> df.asfreq(freq='30S', fill_value=9.0)
4053+
s
4054+
2000-01-01 00:00:00 0.0
4055+
2000-01-01 00:00:30 9.0
4056+
2000-01-01 00:01:00 NaN
4057+
2000-01-01 00:01:30 9.0
4058+
2000-01-01 00:02:00 2.0
4059+
2000-01-01 00:02:30 9.0
4060+
2000-01-01 00:03:00 3.0
4061+
40234062
Notes
40244063
-----
40254064
To learn more about the frequency strings, please see `this link

pandas/tests/frame/test_timeseries.py

+20
Original file line numberDiff line numberDiff line change
@@ -323,6 +323,26 @@ def test_asfreq_datetimeindex(self):
323323
ts = df['A'].asfreq('B')
324324
tm.assertIsInstance(ts.index, DatetimeIndex)
325325

326+
def test_asfreq_fillvalue(self):
327+
# test for fill value during upsampling, related to issue 3715
328+
329+
# setup
330+
rng = pd.date_range('1/1/2016', periods=10, freq='2S')
331+
ts = pd.Series(np.arange(len(rng)), index=rng)
332+
df = pd.DataFrame({'one': ts})
333+
334+
# insert pre-existing missing value
335+
df.loc['2016-01-01 00:00:08', 'one'] = None
336+
337+
actual_df = df.asfreq(freq='1S', fill_value=9.0)
338+
expected_df = df.asfreq(freq='1S').fillna(9.0)
339+
expected_df.loc['2016-01-01 00:00:08', 'one'] = None
340+
assert_frame_equal(expected_df, actual_df)
341+
342+
expected_series = ts.asfreq(freq='1S').fillna(9.0)
343+
actual_series = ts.asfreq(freq='1S', fill_value=9.0)
344+
assert_series_equal(expected_series, actual_series)
345+
326346
def test_first_last_valid(self):
327347
N = len(self.frame.index)
328348
mat = randn(N)

pandas/tseries/tests/test_resample.py

+6
Original file line numberDiff line numberDiff line change
@@ -362,6 +362,8 @@ def test_fillna(self):
362362
r.fillna(0)
363363

364364
def test_fill_value(self):
365+
# test for fill value during resampling, issue 3715
366+
365367
# setup
366368
rng = pd.date_range('1/1/2016', periods=10, freq='2S')
367369
ts = pd.Series(np.arange(len(rng)), index=rng)
@@ -378,6 +380,10 @@ def test_fill_value(self):
378380

379381
assert_frame_equal(expected_df, actual_df)
380382

383+
expected_series = ts.asfreq(freq='1S').fillna(9.0)
384+
actual_series = ts.asfreq(freq='1S', fill_value=9.0)
385+
assert_series_equal(expected_series, actual_series)
386+
381387
def test_apply_without_aggregation(self):
382388

383389
# both resample and groupby should work w/o aggregation

0 commit comments

Comments
 (0)