Skip to content

Commit 81773f3

Browse files
committed
Simplify timeseries code, delegate to Mv
1 parent 452be39 commit 81773f3

File tree

1 file changed

+7
-61
lines changed

1 file changed

+7
-61
lines changed

pymc3/distributions/timeseries.py

Lines changed: 7 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
from pymc3.util import get_variable_name
55
from .continuous import get_tau_sd, Normal, Flat
6-
from .dist_math import Cholesky
76
from . import multivariate
87
from . import distribution
98

@@ -280,48 +279,8 @@ def _repr_latex_(self, name=None, dist=None):
280279
get_variable_name(dt))
281280

282281

283-
class _CovSet():
284-
R"""
285-
Convenience class to set Covariance, Inverse Covariance and Cholesky
286-
descomposition of Covariance marrices.
287-
"""
288-
def __initCov__(self, cov=None, tau=None, chol=None, lower=True):
289-
if all([val is None for val in [cov, tau, chol]]):
290-
raise ValueError('One of cov, tau or chol arguments must be provided.')
291-
292-
self.cov = self.tau = self.chol_cov = None
293-
294-
cholesky = Cholesky(nofail=True, lower=True)
295-
if cov is not None:
296-
self.k = cov.shape[0]
297-
self._cov_type = 'cov'
298-
cov = tt.as_tensor_variable(cov)
299-
if cov.ndim != 2:
300-
raise ValueError('cov must be two dimensional.')
301-
self.chol_cov = cholesky(cov)
302-
self.cov = cov
303-
self._n = self.cov.shape[-1]
304-
elif tau is not None:
305-
self.k = tau.shape[0]
306-
self._cov_type = 'tau'
307-
tau = tt.as_tensor_variable(tau)
308-
if tau.ndim != 2:
309-
raise ValueError('tau must be two dimensional.')
310-
self.chol_tau = cholesky(tau)
311-
self.tau = tau
312-
self._n = self.tau.shape[-1]
313-
else:
314-
if chol is not None and not lower:
315-
chol = chol.T
316-
self.k = chol.shape[0]
317-
self._cov_type = 'chol'
318-
if chol.ndim != 2:
319-
raise ValueError('chol must be two dimensional.')
320-
self.chol_cov = tt.as_tensor_variable(chol)
321-
self._n = self.chol_cov.shape[-1]
322282

323-
324-
class MvGaussianRandomWalk(distribution.Continuous, _CovSet):
283+
class MvGaussianRandomWalk(distribution.Continuous):
325284
R"""
326285
Multivariate Random Walk with Normal innovations
327286
@@ -346,19 +305,18 @@ class MvGaussianRandomWalk(distribution.Continuous, _CovSet):
346305
def __init__(self, mu=0., cov=None, tau=None, chol=None, lower=True, init=Flat.dist(),
347306
*args, **kwargs):
348307
super(MvGaussianRandomWalk, self).__init__(*args, **kwargs)
349-
super(MvGaussianRandomWalk, self).__initCov__(cov, tau, chol, lower)
350308

351309
self.mu = mu = tt.as_tensor_variable(mu)
352310
self.init = init
353311
self.mean = tt.as_tensor_variable(0.)
312+
self.innovArgs = (self.mu, cov, tau, chol, lower)
313+
self.innov = multivariate.MvNormal.dist(*self.innovArgs)
354314

355315
def logp(self, x):
356316
x_im1 = x[:-1]
357317
x_i = x[1:]
358318

359-
innov_like = multivariate.MvNormal.dist(mu=x_im1 + self.mu, cov=self.cov,
360-
tau=self.tau, chol=self.chol_cov).logp(x_i)
361-
return self.init.logp(x[0]) + tt.sum(innov_like)
319+
return self.init.logp(x[0]) + self.innov.logp_sum(x_i - x_im1)
362320

363321
def _repr_latex_(self, name=None, dist=None):
364322
if dist is None:
@@ -371,7 +329,7 @@ def _repr_latex_(self, name=None, dist=None):
371329
get_variable_name(cov))
372330

373331

374-
class MvStudentTRandomWalk(distribution.Continuous, _CovSet):
332+
class MvStudentTRandomWalk(MvGaussianRandomWalk):
375333
R"""
376334
Multivariate Random Walk with StudentT innovations
377335
@@ -389,22 +347,10 @@ class MvStudentTRandomWalk(distribution.Continuous, _CovSet):
389347
init : distribution
390348
distribution for initial value (Defaults to Flat())
391349
"""
392-
def __init__(self, nu, mu=0., cov=None, tau=None, chol=None, lower=True, init=Flat.dist(),
393-
*args, **kwargs):
350+
def __init__(self, nu, *args, **kwargs):
394351
super(MvStudentTRandomWalk, self).__init__(*args, **kwargs)
395-
super(MvStudentTRandomWalk, self).__initCov__(cov, tau, chol, lower)
396-
self.mu = mu = tt.as_tensor_variable(mu)
397352
self.nu = nu = tt.as_tensor_variable(nu)
398-
self.init = init
399-
self.mean = tt.as_tensor_variable(0.)
400-
401-
def logp(self, x):
402-
x_im1 = x[:-1]
403-
x_i = x[1:]
404-
innov_like = multivariate.MvStudentT.dist(self.nu, mu=x_im1 + self.mu,
405-
cov=self.cov, tau=self.tau,
406-
chol=self.chol_cov).logp(x_i)
407-
return self.init.logp(x[0]) + tt.sum(innov_like)
353+
self.inov = multivariate.MvStudentT.dist(self.nu, *self.innovArgs)
408354

409355
def _repr_latex_(self, name=None, dist=None):
410356
if dist is None:

0 commit comments

Comments
 (0)