Skip to content

Commit 32a2eb7

Browse files
authored
refactored GARCH and added Mv(Gaussian/StudentT)RandomWalk (#1603)
* refactored GARCH and added Mv(Gaussian/StudentT)RandomWalk * refactored garch logp * added docs * fix typo * even more typos * better description for tau
1 parent 889b50e commit 32a2eb7

File tree

1 file changed

+90
-6
lines changed

1 file changed

+90
-6
lines changed

pymc3/distributions/timeseries.py

Lines changed: 90 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,18 @@
11
import theano.tensor as tt
22
from theano import scan
33

4+
from .multivariate import get_tau_cov, MvNormal, MvStudentT
45
from .continuous import Normal, Flat
56
from .distribution import Continuous
67

7-
__all__ = ['AR1', 'GaussianRandomWalk', 'GARCH11', 'EulerMaruyama']
8+
__all__ = [
9+
'AR1',
10+
'GaussianRandomWalk',
11+
'GARCH11',
12+
'EulerMaruyama',
13+
'MvGaussianRandomWalk',
14+
'MvStudentTRandomWalk'
15+
]
816

917

1018
class AR1(Continuous):
@@ -108,7 +116,8 @@ def __init__(self, omega=None, alpha_1=None, beta_1=None,
108116
self.initial_vol = initial_vol
109117
self.mean = 0
110118

111-
def _get_volatility(self, x):
119+
def get_volatility(self, x):
120+
x = x[:-1]
112121

113122
def volatility_update(x, vol, w, a, b):
114123
return tt.sqrt(w + a * tt.square(x) + b * tt.square(vol))
@@ -118,12 +127,11 @@ def volatility_update(x, vol, w, a, b):
118127
outputs_info=[self.initial_vol],
119128
non_sequences=[self.omega, self.alpha_1,
120129
self.beta_1])
121-
return vol
130+
return tt.concatenate(self.initial_vol, vol)
122131

123132
def logp(self, x):
124-
vol = self._get_volatility(x[:-1])
125-
return (Normal.dist(0., sd=self.initial_vol).logp(x[0]) +
126-
tt.sum(Normal.dist(0, sd=vol).logp(x[1:])))
133+
vol = self.get_volatility(x)
134+
return tt.sum(Normal.dist(0, sd=vol).logp(x))
127135

128136

129137
class EulerMaruyama(Continuous):
@@ -151,3 +159,79 @@ def logp(self, x):
151159
mu = xt + self.dt * f
152160
sd = tt.sqrt(self.dt) * g
153161
return tt.sum(Normal.dist(mu=mu, sd=sd).logp(x[1:]))
162+
163+
164+
class MvGaussianRandomWalk(Continuous):
165+
"""
166+
Multivariate Random Walk with Normal innovations
167+
168+
Parameters
169+
----------
170+
mu : tensor
171+
innovation drift, defaults to 0.0
172+
cov : tensor
173+
pos def matrix, innovation covariance matrix
174+
tau : tensor
175+
pos def matrix, innovation precision (alternative to specifying cov)
176+
init : distribution
177+
distribution for initial value (Defaults to Flat())
178+
"""
179+
def __init__(self, mu=0., cov=None, tau=None, init=Flat.dist(),
180+
*args, **kwargs):
181+
super(MvGaussianRandomWalk, self).__init__(*args, **kwargs)
182+
tau, cov = get_tau_cov(mu, tau=tau, cov=cov)
183+
self.tau = tau
184+
self.cov = cov
185+
self.mu = mu
186+
self.init = init
187+
self.mean = 0.
188+
189+
def logp(self, x):
190+
tau = self.tau
191+
mu = self.mu
192+
init = self.init
193+
194+
x_im1 = x[:-1]
195+
x_i = x[1:]
196+
197+
innov_like = MvNormal.dist(mu=x_im1 + mu, tau=tau).logp(x_i)
198+
return init.logp(x[0]) + tt.sum(innov_like)
199+
200+
201+
class MvStudentTRandomWalk(Continuous):
202+
"""
203+
Multivariate Random Walk with StudentT innovations
204+
205+
Parameters
206+
----------
207+
nu : degrees of freedom
208+
mu : tensor
209+
innovation drift, defaults to 0.0
210+
cov : tensor
211+
pos def matrix, innovation covariance matrix
212+
tau : tensor
213+
pos def matrix, innovation precision (alternative to specifying cov)
214+
init : distribution
215+
distribution for initial value (Defaults to Flat())
216+
"""
217+
def __init__(self, nu, mu=0., cov=None, tau=None, init=Flat.dist(),
218+
*args, **kwargs):
219+
super(MvStudentTRandomWalk, self).__init__(*args, **kwargs)
220+
tau, cov = get_tau_cov(mu, tau=tau, cov=cov)
221+
self.tau = tau
222+
self.cov = cov
223+
self.mu = mu
224+
self.nu = nu
225+
self.init = init
226+
self.mean = 0.
227+
228+
def logp(self, x):
229+
cov = self.cov
230+
mu = self.mu
231+
nu = self.nu
232+
init = self.init
233+
234+
x_im1 = x[:-1]
235+
x_i = x[1:]
236+
innov_like = MvStudentT.dist(nu, cov, mu=x_im1 + mu).logp(x_i)
237+
return init.logp(x[0]) + tt.sum(innov_like)

0 commit comments

Comments
 (0)