Skip to content

Commit 7842072

Browse files
Add time values as sampler stats for NUTS (pymc-devs#3986)
* Add time values as sampler stats for NUTS * Use float time counters for nuts stats * Add timing sampler stats to release notes * Improve doc of time related sampler stats Co-authored-by: Alexandre ANDORRA <[email protected]> Co-authored-by: Alexandre ANDORRA <[email protected]>
1 parent 8560f1e commit 7842072

File tree

5 files changed

+38
-5
lines changed

5 files changed

+38
-5
lines changed

RELEASE-NOTES.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@
44
### Documentation
55
- Notebook on [multilevel modeling](https://docs.pymc.io/notebooks/multilevel_modeling.html) has been rewritten to showcase ArviZ and xarray usage for inference result analysis (see [#3963](https://github.com/pymc-devs/pymc3/pull/3963))
66

7+
### New features
8+
- Add sampler stats `process_time_diff`, `perf_counter_diff` and `perf_counter_start`, that record wall and CPU times for each NUTS and HMC sample (see [ #3986](https://github.com/pymc-devs/pymc3/pull/3986)).
9+
710
## PyMC3 3.9.2 (24 June 2020)
811
### Maintenance
912
- Warning added in GP module when `input_dim` is lower than the number of columns in `X` to compute the covariance function (see [#3974](https://github.com/pymc-devs/pymc3/pull/3974)).

pymc3/step_methods/hmc/base_hmc.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# limitations under the License.
1414

1515
from collections import namedtuple
16+
import time
1617

1718
import numpy as np
1819
import logging
@@ -132,6 +133,9 @@ def _hamiltonian_step(self, start, p0, step_size):
132133

133134
def astep(self, q0):
134135
"""Perform a single HMC iteration."""
136+
perf_start = time.perf_counter()
137+
process_start = time.process_time()
138+
135139
p0 = self.potential.random()
136140
start = self.integrator.compute_state(q0, p0)
137141

@@ -166,6 +170,9 @@ def astep(self, q0):
166170

167171
hmc_step = self._hamiltonian_step(start, p0, step_size)
168172

173+
perf_end = time.perf_counter()
174+
process_end = time.process_time()
175+
169176
self.step_adapt.update(hmc_step.accept_stat, adapt_step)
170177
self.potential.update(hmc_step.end.q, hmc_step.end.q_grad, self.tune)
171178
if hmc_step.divergence_info:
@@ -191,7 +198,13 @@ def astep(self, q0):
191198
if not self.tune:
192199
self._samples_after_tune += 1
193200

194-
stats = {"tune": self.tune, "diverging": bool(hmc_step.divergence_info)}
201+
stats = {
202+
"tune": self.tune,
203+
"diverging": bool(hmc_step.divergence_info),
204+
"perf_counter_diff": perf_end - perf_start,
205+
"process_time_diff": process_end - process_start,
206+
"perf_counter_start": perf_start,
207+
}
195208

196209
stats.update(hmc_step.stats)
197210
stats.update(self.step_adapt.stats())

pymc3/step_methods/hmc/hmc.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,9 @@ class HamiltonianMC(BaseHMC):
4848
'path_length': np.float64,
4949
'accepted': np.bool,
5050
'model_logp': np.float64,
51+
'process_time_diff': np.float64,
52+
'perf_counter_diff': np.float64,
53+
'perf_counter_start': np.float64,
5154
}]
5255

5356
def __init__(self, vars=None, path_length=2., max_steps=1024, **kwargs):

pymc3/step_methods/hmc/nuts.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,13 @@ class NUTS(BaseHMC):
7272
samples, the step size is set to this value. This should converge
7373
during tuning.
7474
- `model_logp`: The model log-likelihood for this sample.
75+
- `process_time_diff`: The time it took to draw the sample, as defined
76+
by the python standard library `time.process_time`. This counts all
77+
the CPU time, including worker processes in BLAS and OpenMP.
78+
- `perf_counter_diff`: The time it took to draw the sample, as defined
79+
by the python standard library `time.perf_counter` (wall time).
80+
- `perf_counter_start`: The value of `time.perf_counter` at the beginning
81+
of the computation of the draw.
7582
7683
References
7784
----------
@@ -96,6 +103,9 @@ class NUTS(BaseHMC):
96103
"energy": np.float64,
97104
"max_energy_error": np.float64,
98105
"model_logp": np.float64,
106+
"process_time_diff": np.float64,
107+
"perf_counter_diff": np.float64,
108+
"perf_counter_start": np.float64,
99109
}
100110
]
101111

pymc3/tests/test_step.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -979,7 +979,7 @@ def test_linalg(self, caplog):
979979

980980
def test_sampler_stats(self):
981981
with Model() as model:
982-
x = Normal("x", mu=0, sigma=1)
982+
Normal("x", mu=0, sigma=1)
983983
trace = sample(draws=10, tune=1, chains=1)
984984

985985
# Assert stats exist and have the correct shape.
@@ -995,14 +995,18 @@ def test_sampler_stats(self):
995995
"step_size_bar",
996996
"tree_size",
997997
"tune",
998+
"perf_counter_diff",
999+
"perf_counter_start",
1000+
"process_time_diff",
9981001
}
9991002
assert trace.stat_names == expected_stat_names
10001003
for varname in trace.stat_names:
10011004
assert trace.get_sampler_stats(varname).shape == (10,)
10021005

10031006
# Assert model logp is computed correctly: computing post-sampling
10041007
# and tracking while sampling should give same results.
1005-
model_logp_ = np.array(
1006-
[model.logp(trace.point(i, chain=c)) for c in trace.chains for i in range(len(trace))]
1007-
)
1008+
model_logp_ = np.array([
1009+
model.logp(trace.point(i, chain=c))
1010+
for c in trace.chains for i in range(len(trace))
1011+
])
10081012
assert (trace.model_logp == model_logp_).all()

0 commit comments

Comments
 (0)