|
| 1 | + |
| 2 | +from pymc3 import Normal, sample, Model, traceplot, plots, NUTS, Potential, variational, Cauchy, find_MAP, Slice, HalfCauchy |
| 3 | +from theano import scan, shared |
| 4 | +from scipy import optimize |
| 5 | +import theano.tensor as T |
| 6 | + |
| 7 | + |
| 8 | +import numpy as np |
| 9 | +""" |
| 10 | +ARMA example |
| 11 | +It is interesting to note just how much more compact this is that the original STAN example |
| 12 | +
|
| 13 | +The original implementation is in the STAN documentation by Gelman et al and is reproduced below |
| 14 | +
|
| 15 | +
|
| 16 | +Example from STAN- slightly altered |
| 17 | +
|
| 18 | +data { |
| 19 | + int<lower=1> T; |
| 20 | + real y[T]; |
| 21 | +} |
| 22 | +parameters { |
| 23 | + // assume err[0] == 0 |
| 24 | +} |
| 25 | +nu[t] <- mu + phi * y[t-1] + theta * err[t-1]; |
| 26 | + err[t] <- y[t] - nu[t]; |
| 27 | +} |
| 28 | +mu ~ normal(0,10); |
| 29 | +phi ~ normal(0,2); |
| 30 | +theta ~ normal(0,2); |
| 31 | + real mu; |
| 32 | + real phi; |
| 33 | + real theta; |
| 34 | + real<lower=0> sigma; |
| 35 | +} model { |
| 36 | + vector[T] nu; |
| 37 | + vector[T] err; |
| 38 | + nu[1] <- mu + phi * mu; |
| 39 | + err[1] <- y[1] - nu[1]; |
| 40 | + for (t in 2:T) { |
| 41 | + // num observations |
| 42 | + // observed outputs |
| 43 | + // mean coeff |
| 44 | + // autoregression coeff |
| 45 | + // moving avg coeff |
| 46 | + // noise scale |
| 47 | + // prediction for time t |
| 48 | + // error for time t |
| 49 | +sigma ~ cauchy(0,5); |
| 50 | +err ~ normal(0,sigma); |
| 51 | +// priors |
| 52 | +// likelihood |
| 53 | +Ported to PyMC3 by Peadar Coyle and Chris Fonnesbeck (c) 2016. |
| 54 | +""" |
| 55 | + |
| 56 | +t = np.array([1, 2, 4,5,6,8, 19, 18, 12]) |
| 57 | + |
| 58 | +y = shared(np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float32)) |
| 59 | + |
| 60 | + |
| 61 | +with Model() as arma_model: |
| 62 | + |
| 63 | + sigma = HalfCauchy('sigma', 5) |
| 64 | + theta = Normal('theta', 0, sd=2) |
| 65 | + phi = Normal('phi', 0, sd=2) |
| 66 | + mu = Normal('mu', 0, sd=10) |
| 67 | + |
| 68 | + err0 = y[0] - (mu + phi*mu) |
| 69 | + |
| 70 | + def calc_next(last_y, this_y, err, mu, phi, theta): |
| 71 | + nu_t = mu + phi*last_y + theta*err |
| 72 | + return this_y - nu_t |
| 73 | + |
| 74 | + err, _ = scan(fn=calc_next, |
| 75 | + sequences=dict(input=y, taps=[-1,0]), |
| 76 | + outputs_info=[err0], |
| 77 | + non_sequences=[mu, phi, theta]) |
| 78 | + |
| 79 | + like = Potential('like', Normal.dist(0, sd=sigma).logp(err)) |
| 80 | + |
| 81 | +with arma_model: |
| 82 | + mu, sds, elbo = variational.advi(n=2000) |
| 83 | + |
| 84 | + |
| 85 | +def run(n=1000): |
| 86 | + if n == "short": |
| 87 | + n = 50 |
| 88 | + with arma_model: |
| 89 | + |
| 90 | + trace = sample(1000) |
| 91 | + |
| 92 | + burn = n/10 |
| 93 | + |
| 94 | + traceplot(trace[burn:]) |
| 95 | + plots.summary(trace[burn:]) |
| 96 | + |
| 97 | + |
| 98 | +if __name__ == '__main__': |
| 99 | + run() |
0 commit comments