Skip to content

Numpy docs hmc #2405

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jul 26, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions pymc3/step_methods/hmc/base_hmc.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,22 @@
import numpy as np

from pymc3.model import modelcontext, Point
from pymc3.step_methods import arraystep
from .quadpotential import quad_potential, QuadPotentialDiagAdapt
from pymc3.step_methods.hmc import integration
from pymc3.theanof import inputvars, floatX
from pymc3.tuning import guess_scaling
import numpy as np
from .quadpotential import quad_potential, QuadPotentialDiagAdapt


class BaseHMC(arraystep.GradientSharedStep):
"""Superclass to implement Hamiltonian/hybrid monte carlo."""

default_blocked = True

def __init__(self, vars=None, scaling=None, step_scale=0.25, is_cov=False,
model=None, blocked=True, potential=None,
integrator="leapfrog", dtype=None, **theano_kwargs):
"""Superclass to implement Hamiltonian/hybrid monte carlo
"""Set up Hamiltonian samplers with common structures.

Parameters
----------
Expand Down
16 changes: 10 additions & 6 deletions pymc3/step_methods/hmc/hmc.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
'''
Created on Mar 7, 2011
import numpy as np

@author: johnsalvatier
'''
from ..arraystep import metrop_select, Competence
from .base_hmc import BaseHMC
from pymc3.vartypes import discrete_types
from pymc3.theanof import floatX
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Numpy should be imported before local imports by pep8

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I updated this here -- it is actually a pylint check (wrong-import-order) that I can maybe try adding tomorrow, but that is a ton of violations!

import numpy as np


__all__ = ['HamiltonianMC']
Expand All @@ -22,11 +18,17 @@ def unif(step_size, elow=.85, ehigh=1.15):


class HamiltonianMC(BaseHMC):
R"""A sampler for continuous variables based on Hamiltonian mechanics.

See NUTS sampler for automatically tuned stopping time and step size scaling.
"""

name = 'hmc'
default_blocked = True

def __init__(self, vars=None, path_length=2., step_rand=unif, **kwargs):
"""
"""Set up the Hamiltonian Monte Carlo sampler.

Parameters
----------
vars : list of theano variables
Expand Down Expand Up @@ -57,6 +59,7 @@ def __init__(self, vars=None, path_length=2., step_rand=unif, **kwargs):
self.step_rand = step_rand

def astep(self, q0):
"""Perform a single HMC iteration."""
e = floatX(self.step_rand(self.step_size))
n_steps = int(self.path_length / e)

Expand All @@ -76,6 +79,7 @@ def astep(self, q0):

@staticmethod
def competence(var):
"""Check how appropriate this class is for sampling a random variable."""
if var.dtype in discrete_types:
return Competence.INCOMPATIBLE
return Competence.COMPATIBLE
21 changes: 21 additions & 0 deletions pymc3/step_methods/hmc/integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,10 @@


class CpuLeapfrogIntegrator(object):
"""Optimized leapfrog integration using numpy."""

def __init__(self, ndim, potential, logp_dlogp_func):
"""Leapfrog integrator using CPU."""
self._ndim = ndim
self._potential = potential
self._logp_dlogp_func = logp_dlogp_func
Expand All @@ -19,6 +22,7 @@ def __init__(self, ndim, potential, logp_dlogp_func):
% (self._potential.dtype, self._dtype))

def compute_state(self, q, p):
"""Compute Hamiltonian functions using a position and momentum."""
if q.dtype != self._dtype or p.dtype != self._dtype:
raise ValueError('Invalid dtype. Must be %s' % self._dtype)
logp, dlogp = self._logp_dlogp_func(q)
Expand All @@ -28,6 +32,23 @@ def compute_state(self, q, p):
return State(q, p, v, dlogp, energy)

def step(self, epsilon, state, out=None):
"""Leapfrog integrator step.

Half a momentum update, full position update, half momentum update.

Parameters
----------
epsilon: float, > 0
step scale
state: State namedtuple,
current position data
out: (optional) State namedtuple,
preallocated arrays to write to in place

Returns
-------
None if `out` is provided, else a State namedtuple
"""
pot = self._potential
axpy = linalg.blas.get_blas_funcs('axpy', dtype=self._dtype)

Expand Down
18 changes: 11 additions & 7 deletions pymc3/step_methods/hmc/nuts.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
from collections import namedtuple
import warnings

import numpy as np
import numpy.random as nr
from scipy import stats, linalg
import six

from ..arraystep import Competence
from pymc3.exceptions import SamplingError
from .base_hmc import BaseHMC
from pymc3.theanof import floatX
from pymc3.vartypes import continuous_types

import numpy as np
import numpy.random as nr
from scipy import stats, linalg
import six

__all__ = ['NUTS']


Expand All @@ -28,7 +28,7 @@ class NUTS(BaseHMC):
sample. A detailed description can be found at [1], "Algorithm 6:
Efficient No-U-Turn Sampler with Dual Averaging".

Nuts provides a number of statistics that can be accessed with
NUTS provides a number of statistics that can be accessed with
`trace.get_sampler_stats`:

- `mean_tree_accept`: The mean acceptance probability for the tree
Expand Down Expand Up @@ -70,6 +70,7 @@ class NUTS(BaseHMC):
.. [1] Hoffman, Matthew D., & Gelman, Andrew. (2011). The No-U-Turn Sampler:
Adaptively Setting Path Lengths in Hamiltonian Monte Carlo.
"""

name = 'nuts'

default_blocked = True
Expand All @@ -92,7 +93,8 @@ def __init__(self, vars=None, Emax=1000, target_accept=0.8,
max_treedepth=10, on_error='summary',
early_max_treedepth=8,
**kwargs):
R"""
R"""Set up the No-U-Turn sampler.

Parameters
----------
vars : list of Theano variables, default all continuous vars
Expand Down Expand Up @@ -171,6 +173,7 @@ def __init__(self, vars=None, Emax=1000, target_accept=0.8,
self.report = NutsReport(on_error, max_treedepth, target_accept)

def astep(self, q0):
"""Perform a single NUTS iteration."""
p0 = self.potential.random()
start = self.integrator.compute_state(q0, p0)

Expand Down Expand Up @@ -229,6 +232,7 @@ def astep(self, q0):

@staticmethod
def competence(var):
"""Check how appropriate this class is for sampling a random variable."""
if var.dtype in continuous_types:
return Competence.IDEAL
return Competence.INCOMPATIBLE
Expand Down
Loading