@@ -767,7 +767,7 @@ def test_raises3(self):
767
767
B = pm .gp .cov .Coregion (1 )
768
768
769
769
770
- @pytest .mark .xfail (reason = "MvNormal was not yet refactored" )
770
+ # @pytest.mark.xfail(reason="MvNormal was not yet refactored")
771
771
class TestMarginalVsLatent :
772
772
R"""
773
773
Compare the logp of models Marginal, noise=0 and Latent.
@@ -781,7 +781,7 @@ def setup_method(self):
781
781
with pm .Model () as model :
782
782
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
783
783
mean_func = pm .gp .mean .Constant (0.5 )
784
- gp = pm .gp .Marginal (mean_func , cov_func )
784
+ gp = pm .gp .Marginal (mean_func = mean_func , cov_func = cov_func )
785
785
f = gp .marginal_likelihood ("f" , X , y , noise = 0.0 , is_observed = False , observed = y )
786
786
p = gp .conditional ("p" , Xnew )
787
787
self .logp = model .logp ({"p" : pnew })
@@ -794,7 +794,7 @@ def testLatent1(self):
794
794
with pm .Model () as model :
795
795
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
796
796
mean_func = pm .gp .mean .Constant (0.5 )
797
- gp = pm .gp .Latent (mean_func , cov_func )
797
+ gp = pm .gp .Latent (mean_func = mean_func , cov_func = cov_func )
798
798
f = gp .prior ("f" , self .X , reparameterize = False )
799
799
p = gp .conditional ("p" , self .Xnew )
800
800
latent_logp = model .logp ({"f" : self .y , "p" : self .pnew })
@@ -804,7 +804,7 @@ def testLatent2(self):
804
804
with pm .Model () as model :
805
805
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
806
806
mean_func = pm .gp .mean .Constant (0.5 )
807
- gp = pm .gp .Latent (mean_func , cov_func )
807
+ gp = pm .gp .Latent (mean_func = mean_func , cov_func = cov_func )
808
808
f = gp .prior ("f" , self .X , reparameterize = True )
809
809
p = gp .conditional ("p" , self .Xnew )
810
810
chol = np .linalg .cholesky (cov_func (self .X ).eval ())
@@ -828,7 +828,7 @@ def setup_method(self):
828
828
with pm .Model () as model :
829
829
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
830
830
mean_func = pm .gp .mean .Constant (0.5 )
831
- gp = pm .gp .Marginal (mean_func , cov_func )
831
+ gp = pm .gp .Marginal (mean_func = mean_func , cov_func = cov_func )
832
832
sigma = 0.1
833
833
f = gp .marginal_likelihood ("f" , X , y , noise = sigma )
834
834
p = gp .conditional ("p" , Xnew )
@@ -845,7 +845,7 @@ def testApproximations(self, approx):
845
845
with pm .Model () as model :
846
846
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
847
847
mean_func = pm .gp .mean .Constant (0.5 )
848
- gp = pm .gp .MarginalSparse (mean_func , cov_func , approx = approx )
848
+ gp = pm .gp .MarginalSparse (mean_func = mean_func , cov_func = cov_func , approx = approx )
849
849
f = gp .marginal_likelihood ("f" , self .X , self .X , self .y , self .sigma )
850
850
p = gp .conditional ("p" , self .Xnew )
851
851
approx_logp = model .logp ({"f" : self .y , "p" : self .pnew })
@@ -856,7 +856,7 @@ def testPredictVar(self, approx):
856
856
with pm .Model () as model :
857
857
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
858
858
mean_func = pm .gp .mean .Constant (0.5 )
859
- gp = pm .gp .MarginalSparse (mean_func , cov_func , approx = approx )
859
+ gp = pm .gp .MarginalSparse (mean_func = mean_func , cov_func = cov_func , approx = approx )
860
860
f = gp .marginal_likelihood ("f" , self .X , self .X , self .y , self .sigma )
861
861
mu1 , var1 = self .gp .predict (self .Xnew , diag = True )
862
862
mu2 , var2 = gp .predict (self .Xnew , diag = True )
@@ -867,7 +867,7 @@ def testPredictCov(self):
867
867
with pm .Model () as model :
868
868
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
869
869
mean_func = pm .gp .mean .Constant (0.5 )
870
- gp = pm .gp .MarginalSparse (mean_func , cov_func , approx = "DTC" )
870
+ gp = pm .gp .MarginalSparse (mean_func = mean_func , cov_func = cov_func , approx = "DTC" )
871
871
f = gp .marginal_likelihood ("f" , self .X , self .X , self .y , self .sigma , is_observed = False )
872
872
mu1 , cov1 = self .gp .predict (self .Xnew , pred_noise = True )
873
873
mu2 , cov2 = gp .predict (self .Xnew , pred_noise = True )
@@ -888,7 +888,7 @@ def setup_method(self):
888
888
)
889
889
self .means = (pm .gp .mean .Constant (0.5 ), pm .gp .mean .Constant (0.5 ), pm .gp .mean .Constant (0.5 ))
890
890
891
- @pytest .mark .xfail (reason = "MvNormal was not yet refactored" )
891
+ # @pytest.mark.xfail(reason="MvNormal was not yet refactored")
892
892
def testAdditiveMarginal (self ):
893
893
with pm .Model () as model1 :
894
894
gp1 = pm .gp .Marginal (self .means [0 ], self .covs [0 ])
@@ -1007,17 +1007,17 @@ def testAdditiveTypeRaises2(self):
1007
1007
gp1 + gp2
1008
1008
1009
1009
1010
- @pytest .mark .xfail (reason = "MvNormal was not yet refactored" )
1010
+ # @pytest.mark.xfail(reason="MvNormal was not yet refactored")
1011
1011
class TestTP :
1012
1012
R"""
1013
1013
Compare TP with high degress of freedom to GP
1014
1014
"""
1015
1015
1016
1016
def setup_method (self ):
1017
1017
X = np .random .randn (20 , 3 )
1018
- y = np .random .randn (20 ) * 0.01
1019
- Xnew = np .random .randn (50 , 3 )
1020
- pnew = np .random .randn (50 ) * 0.01
1018
+ y = np .random .randn (20 )
1019
+ Xnew = np .random .randn (30 , 3 )
1020
+ pnew = np .random .randn (30 )
1021
1021
with pm .Model () as model :
1022
1022
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
1023
1023
gp = pm .gp .Latent (cov_func = cov_func )
@@ -1027,29 +1027,29 @@ def setup_method(self):
1027
1027
self .y = y
1028
1028
self .Xnew = Xnew
1029
1029
self .pnew = pnew
1030
- self .latent_logp = model . logp ({ "f" : y , "p" : pnew })
1031
- self .plogp = p .logp ({"f" : y , "p" : pnew })
1030
+ self .nu = 1000
1031
+ self .gp_latent_logp = model .logp ({"f" : y , "p" : pnew })
1032
1032
1033
1033
def testTPvsLatent (self ):
1034
1034
with pm .Model () as model :
1035
1035
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
1036
- tp = pm .gp .TP (cov_func = cov_func , nu = 10000 )
1036
+ tp = pm .gp .TP (cov_func = cov_func , nu = self . nu )
1037
1037
f = tp .prior ("f" , self .X , reparameterize = False )
1038
1038
p = tp .conditional ("p" , self .Xnew )
1039
1039
tp_logp = model .logp ({"f" : self .y , "p" : self .pnew })
1040
- npt .assert_allclose (self .latent_logp , tp_logp , atol = 0 , rtol = 1e-2 )
1040
+ npt .assert_allclose (self .gp_latent_logp , tp_logp , atol = 0 , rtol = 1e-2 )
1041
1041
1042
1042
def testTPvsLatentReparameterized (self ):
1043
1043
with pm .Model () as model :
1044
1044
cov_func = pm .gp .cov .ExpQuad (3 , [0.1 , 0.2 , 0.3 ])
1045
- tp = pm .gp .TP (cov_func = cov_func , nu = 10000 )
1045
+ tp = pm .gp .TP (cov_func = cov_func , nu = self . nu )
1046
1046
f = tp .prior ("f" , self .X , reparameterize = True )
1047
1047
p = tp .conditional ("p" , self .Xnew )
1048
1048
chol = np .linalg .cholesky (cov_func (self .X ).eval ())
1049
- y_rotated = np .linalg .solve (chol , self .y )
1050
- # testing full model logp unreliable due to introduction of f_chi2__log__
1051
- plogp = p .logp ({"f_rotated_" : y_rotated , "p" : self .pnew , "f_chi2__log__" : np . log ( 1e20 ) })
1052
- npt .assert_allclose (self .plogp , plogp , atol = 0 , rtol = 1e-2 )
1049
+ f_rotated = np .linalg .solve (chol , self .y )
1050
+
1051
+ tp_logp = model .logp ({"f_rotated_" : f_rotated , "p" : self .pnew })
1052
+ npt .assert_allclose (self .gp_latent_logp , tp_logp , atol = 0 , rtol = 1e-2 )
1053
1053
1054
1054
def testAdditiveTPRaises (self ):
1055
1055
with pm .Model () as model :
0 commit comments