3
3
4
4
from pymc3 .util import get_variable_name
5
5
from .continuous import get_tau_sd , Normal , Flat
6
- from .dist_math import Cholesky
7
6
from . import multivariate
8
7
from . import distribution
9
8
@@ -280,48 +279,8 @@ def _repr_latex_(self, name=None, dist=None):
280
279
get_variable_name (dt ))
281
280
282
281
283
- class _CovSet ():
284
- R"""
285
- Convenience class to set Covariance, Inverse Covariance and Cholesky
286
- descomposition of Covariance marrices.
287
- """
288
- def __initCov__ (self , cov = None , tau = None , chol = None , lower = True ):
289
- if all ([val is None for val in [cov , tau , chol ]]):
290
- raise ValueError ('One of cov, tau or chol arguments must be provided.' )
291
-
292
- self .cov = self .tau = self .chol_cov = None
293
-
294
- cholesky = Cholesky (nofail = True , lower = True )
295
- if cov is not None :
296
- self .k = cov .shape [0 ]
297
- self ._cov_type = 'cov'
298
- cov = tt .as_tensor_variable (cov )
299
- if cov .ndim != 2 :
300
- raise ValueError ('cov must be two dimensional.' )
301
- self .chol_cov = cholesky (cov )
302
- self .cov = cov
303
- self ._n = self .cov .shape [- 1 ]
304
- elif tau is not None :
305
- self .k = tau .shape [0 ]
306
- self ._cov_type = 'tau'
307
- tau = tt .as_tensor_variable (tau )
308
- if tau .ndim != 2 :
309
- raise ValueError ('tau must be two dimensional.' )
310
- self .chol_tau = cholesky (tau )
311
- self .tau = tau
312
- self ._n = self .tau .shape [- 1 ]
313
- else :
314
- if chol is not None and not lower :
315
- chol = chol .T
316
- self .k = chol .shape [0 ]
317
- self ._cov_type = 'chol'
318
- if chol .ndim != 2 :
319
- raise ValueError ('chol must be two dimensional.' )
320
- self .chol_cov = tt .as_tensor_variable (chol )
321
- self ._n = self .chol_cov .shape [- 1 ]
322
282
323
-
324
- class MvGaussianRandomWalk (distribution .Continuous , _CovSet ):
283
+ class MvGaussianRandomWalk (distribution .Continuous ):
325
284
R"""
326
285
Multivariate Random Walk with Normal innovations
327
286
@@ -346,19 +305,18 @@ class MvGaussianRandomWalk(distribution.Continuous, _CovSet):
346
305
def __init__ (self , mu = 0. , cov = None , tau = None , chol = None , lower = True , init = Flat .dist (),
347
306
* args , ** kwargs ):
348
307
super (MvGaussianRandomWalk , self ).__init__ (* args , ** kwargs )
349
- super (MvGaussianRandomWalk , self ).__initCov__ (cov , tau , chol , lower )
350
308
351
309
self .mu = mu = tt .as_tensor_variable (mu )
352
310
self .init = init
353
311
self .mean = tt .as_tensor_variable (0. )
312
+ self .innovArgs = (self .mu , cov , tau , chol , lower )
313
+ self .innov = multivariate .MvNormal .dist (* self .innovArgs )
354
314
355
315
def logp (self , x ):
356
316
x_im1 = x [:- 1 ]
357
317
x_i = x [1 :]
358
318
359
- innov_like = multivariate .MvNormal .dist (mu = x_im1 + self .mu , cov = self .cov ,
360
- tau = self .tau , chol = self .chol_cov ).logp (x_i )
361
- return self .init .logp (x [0 ]) + tt .sum (innov_like )
319
+ return self .init .logp (x [0 ]) + self .innov .logp_sum (x_i - x_im1 )
362
320
363
321
def _repr_latex_ (self , name = None , dist = None ):
364
322
if dist is None :
@@ -371,7 +329,7 @@ def _repr_latex_(self, name=None, dist=None):
371
329
get_variable_name (cov ))
372
330
373
331
374
- class MvStudentTRandomWalk (distribution . Continuous , _CovSet ):
332
+ class MvStudentTRandomWalk (MvGaussianRandomWalk ):
375
333
R"""
376
334
Multivariate Random Walk with StudentT innovations
377
335
@@ -389,22 +347,10 @@ class MvStudentTRandomWalk(distribution.Continuous, _CovSet):
389
347
init : distribution
390
348
distribution for initial value (Defaults to Flat())
391
349
"""
392
- def __init__ (self , nu , mu = 0. , cov = None , tau = None , chol = None , lower = True , init = Flat .dist (),
393
- * args , ** kwargs ):
350
+ def __init__ (self , nu , * args , ** kwargs ):
394
351
super (MvStudentTRandomWalk , self ).__init__ (* args , ** kwargs )
395
- super (MvStudentTRandomWalk , self ).__initCov__ (cov , tau , chol , lower )
396
- self .mu = mu = tt .as_tensor_variable (mu )
397
352
self .nu = nu = tt .as_tensor_variable (nu )
398
- self .init = init
399
- self .mean = tt .as_tensor_variable (0. )
400
-
401
- def logp (self , x ):
402
- x_im1 = x [:- 1 ]
403
- x_i = x [1 :]
404
- innov_like = multivariate .MvStudentT .dist (self .nu , mu = x_im1 + self .mu ,
405
- cov = self .cov , tau = self .tau ,
406
- chol = self .chol_cov ).logp (x_i )
407
- return self .init .logp (x [0 ]) + tt .sum (innov_like )
353
+ self .inov = multivariate .MvStudentT .dist (self .nu , * self .innovArgs )
408
354
409
355
def _repr_latex_ (self , name = None , dist = None ):
410
356
if dist is None :
0 commit comments