Skip to content

Update examples #2254

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Jun 12, 2017
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 12 additions & 10 deletions pymc3/examples/arbitrary_stochastic.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,25 @@


def build_model():
# data
failure = np.array([0., 1.])
value = np.array([1., 0.])

# custom log-liklihood
def logp(failure, value):
return tt.sum(failure * tt.log(lam) - lam * value)

# model
with pm.Model() as model:
lam = pm.Exponential('lam', 1)
failure = np.array([0, 1])
value = np.array([1, 0])

def logp(failure, value):
return tt.sum(failure * np.log(lam) - lam * value)
lam = pm.Exponential('lam', 1.)
pm.DensityDist('x', logp, observed={'failure': failure, 'value': value})
return model


def run(n_samples=3000):
model = build_model()
start = model.test_point
h = pm.find_hessian(start, model=model)
step = pm.Metropolis(model.vars, h, blocked=True, model=model)
trace = pm.sample(n_samples, step=step, start=start, model=model)
with model:
trace = pm.sample(n_samples)
return trace

if __name__ == "__main__":
Expand Down
23 changes: 11 additions & 12 deletions pymc3/examples/arma_example.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from pymc3 import Normal, sample, Model, plots, Potential, variational, HalfCauchy
import pymc3 as pm
from theano import scan, shared

import numpy as np
"""
ARMA example
It is interesting to note just how much more compact this is that the original STAN example
It is interesting to note just how much more compact this is than the original STAN example

The original implementation is in the STAN documentation by Gelman et al and is reproduced below

Expand Down Expand Up @@ -52,11 +52,11 @@

def build_model():
y = shared(np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float32))
with Model() as arma_model:
sigma = HalfCauchy('sigma', 5)
theta = Normal('theta', 0, sd=2)
phi = Normal('phi', 0, sd=2)
mu = Normal('mu', 0, sd=10)
with pm.Model() as arma_model:
sigma = pm.HalfCauchy('sigma', 5.)
theta = pm.Normal('theta', 0., sd=2.)
phi = pm.Normal('phi', 0., sd=2.)
mu = pm.Normal('mu', 0., sd=10.)

err0 = y[0] - (mu + phi * mu)

Expand All @@ -69,19 +69,18 @@ def calc_next(last_y, this_y, err, mu, phi, theta):
outputs_info=[err0],
non_sequences=[mu, phi, theta])

Potential('like', Normal.dist(0, sd=sigma).logp(err))
variational.advi(n=2000)
pm.Potential('like', pm.Normal.dist(0, sd=sigma).logp(err))
return arma_model


def run(n_samples=1000):
model = build_model()
with model:
trace = sample(draws=n_samples)
trace = pm.sample(draws=n_samples)

burn = n_samples // 10
plots.traceplot(trace[burn:])
plots.forestplot(trace[burn:])
pm.plots.traceplot(trace[burn:])
pm.plots.forestplot(trace[burn:])


if __name__ == '__main__':
Expand Down
45 changes: 23 additions & 22 deletions pymc3/examples/baseball.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,33 +5,34 @@

import pymc3 as pm
import numpy as np
import theano

data = np.loadtxt(pm.get_data('efron-morris-75-data.tsv'), delimiter="\t", skiprows=1, usecols=(2,3))

atBats = data[:,0].astype(theano.config.floatX)
hits = data[:,1].astype(theano.config.floatX)

N = len( hits )

model = pm.Model()

# we want to bound the kappa below
BoundedKappa = pm.Bound( pm.Pareto, lower=1.0 )

with model:
phi = pm.Uniform( 'phi', lower=0.0, upper=1.0 )
kappa = BoundedKappa( 'kappa', alpha=1.0001, m=1.5 )
thetas = pm.Beta( 'thetas', alpha=phi*kappa, beta=(1.0-phi)*kappa, shape=N )
ys = pm.Binomial( 'ys', n=atBats, p=thetas, observed=hits )

def run( n=100000 ):
def build_model():
data = np.loadtxt(pm.get_data('efron-morris-75-data.tsv'), delimiter="\t",
skiprows=1, usecols=(2,3))

atBats = pm.floatX(data[:,0])
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Avoid camelCase

hits = pm.floatX(data[:,1])

N = len(hits)

# we want to bound the kappa below
BoundedKappa = pm.Bound(pm.Pareto, lower=1.0)

with pm.Model() as model:
phi = pm.Uniform('phi', lower=0.0, upper=1.0)
kappa = BoundedKappa('kappa', alpha=1.0001, m=1.5)
thetas = pm.Beta('thetas', alpha=phi*kappa, beta=(1.0-phi)*kappa, shape=N)
ys = pm.Binomial('ys', n=atBats, p=thetas, observed=hits)
return model

def run(n=2000):
model = build_model()
with model:
# initialize NUTS() with ADVI under the hood
trace = pm.sample( n )
trace = pm.sample(n, nuts_kwargs={'target_accept':.99})
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Wow, does it require this to converge?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it's fine without, but there is a divergent warning.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No problem; just surprised.


# drop some first samples as burnin
pm.traceplot( trace[1000:] )
pm.traceplot(trace[1000:])

if __name__ == '__main__':
run()