Skip to content

Commit d1e09fe

Browse files
bwengalsbwengals
authored and
bwengals
committed
fix TP tests, force mean_func, cov_func to be req kwarg
1 parent 77c4392 commit d1e09fe

File tree

1 file changed

+22
-22
lines changed

1 file changed

+22
-22
lines changed

pymc/tests/test_gp.py

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -767,7 +767,7 @@ def test_raises3(self):
767767
B = pm.gp.cov.Coregion(1)
768768

769769

770-
@pytest.mark.xfail(reason="MvNormal was not yet refactored")
770+
# @pytest.mark.xfail(reason="MvNormal was not yet refactored")
771771
class TestMarginalVsLatent:
772772
R"""
773773
Compare the logp of models Marginal, noise=0 and Latent.
@@ -781,7 +781,7 @@ def setup_method(self):
781781
with pm.Model() as model:
782782
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
783783
mean_func = pm.gp.mean.Constant(0.5)
784-
gp = pm.gp.Marginal(mean_func, cov_func)
784+
gp = pm.gp.Marginal(mean_func=mean_func, cov_func=cov_func)
785785
f = gp.marginal_likelihood("f", X, y, noise=0.0, is_observed=False, observed=y)
786786
p = gp.conditional("p", Xnew)
787787
self.logp = model.logp({"p": pnew})
@@ -794,7 +794,7 @@ def testLatent1(self):
794794
with pm.Model() as model:
795795
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
796796
mean_func = pm.gp.mean.Constant(0.5)
797-
gp = pm.gp.Latent(mean_func, cov_func)
797+
gp = pm.gp.Latent(mean_func=mean_func, cov_func=cov_func)
798798
f = gp.prior("f", self.X, reparameterize=False)
799799
p = gp.conditional("p", self.Xnew)
800800
latent_logp = model.logp({"f": self.y, "p": self.pnew})
@@ -804,7 +804,7 @@ def testLatent2(self):
804804
with pm.Model() as model:
805805
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
806806
mean_func = pm.gp.mean.Constant(0.5)
807-
gp = pm.gp.Latent(mean_func, cov_func)
807+
gp = pm.gp.Latent(mean_func=mean_func, cov_func=cov_func)
808808
f = gp.prior("f", self.X, reparameterize=True)
809809
p = gp.conditional("p", self.Xnew)
810810
chol = np.linalg.cholesky(cov_func(self.X).eval())
@@ -828,7 +828,7 @@ def setup_method(self):
828828
with pm.Model() as model:
829829
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
830830
mean_func = pm.gp.mean.Constant(0.5)
831-
gp = pm.gp.Marginal(mean_func, cov_func)
831+
gp = pm.gp.Marginal(mean_func=mean_func, cov_func=cov_func)
832832
sigma = 0.1
833833
f = gp.marginal_likelihood("f", X, y, noise=sigma)
834834
p = gp.conditional("p", Xnew)
@@ -845,7 +845,7 @@ def testApproximations(self, approx):
845845
with pm.Model() as model:
846846
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
847847
mean_func = pm.gp.mean.Constant(0.5)
848-
gp = pm.gp.MarginalSparse(mean_func, cov_func, approx=approx)
848+
gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx=approx)
849849
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
850850
p = gp.conditional("p", self.Xnew)
851851
approx_logp = model.logp({"f": self.y, "p": self.pnew})
@@ -856,7 +856,7 @@ def testPredictVar(self, approx):
856856
with pm.Model() as model:
857857
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
858858
mean_func = pm.gp.mean.Constant(0.5)
859-
gp = pm.gp.MarginalSparse(mean_func, cov_func, approx=approx)
859+
gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx=approx)
860860
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
861861
mu1, var1 = self.gp.predict(self.Xnew, diag=True)
862862
mu2, var2 = gp.predict(self.Xnew, diag=True)
@@ -867,7 +867,7 @@ def testPredictCov(self):
867867
with pm.Model() as model:
868868
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
869869
mean_func = pm.gp.mean.Constant(0.5)
870-
gp = pm.gp.MarginalSparse(mean_func, cov_func, approx="DTC")
870+
gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx="DTC")
871871
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma, is_observed=False)
872872
mu1, cov1 = self.gp.predict(self.Xnew, pred_noise=True)
873873
mu2, cov2 = gp.predict(self.Xnew, pred_noise=True)
@@ -888,7 +888,7 @@ def setup_method(self):
888888
)
889889
self.means = (pm.gp.mean.Constant(0.5), pm.gp.mean.Constant(0.5), pm.gp.mean.Constant(0.5))
890890

891-
@pytest.mark.xfail(reason="MvNormal was not yet refactored")
891+
# @pytest.mark.xfail(reason="MvNormal was not yet refactored")
892892
def testAdditiveMarginal(self):
893893
with pm.Model() as model1:
894894
gp1 = pm.gp.Marginal(self.means[0], self.covs[0])
@@ -1007,17 +1007,17 @@ def testAdditiveTypeRaises2(self):
10071007
gp1 + gp2
10081008

10091009

1010-
@pytest.mark.xfail(reason="MvNormal was not yet refactored")
1010+
# @pytest.mark.xfail(reason="MvNormal was not yet refactored")
10111011
class TestTP:
10121012
R"""
10131013
Compare TP with high degress of freedom to GP
10141014
"""
10151015

10161016
def setup_method(self):
10171017
X = np.random.randn(20, 3)
1018-
y = np.random.randn(20) * 0.01
1019-
Xnew = np.random.randn(50, 3)
1020-
pnew = np.random.randn(50) * 0.01
1018+
y = np.random.randn(20)
1019+
Xnew = np.random.randn(30, 3)
1020+
pnew = np.random.randn(30)
10211021
with pm.Model() as model:
10221022
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
10231023
gp = pm.gp.Latent(cov_func=cov_func)
@@ -1027,29 +1027,29 @@ def setup_method(self):
10271027
self.y = y
10281028
self.Xnew = Xnew
10291029
self.pnew = pnew
1030-
self.latent_logp = model.logp({"f": y, "p": pnew})
1031-
self.plogp = p.logp({"f": y, "p": pnew})
1030+
self.nu = 1000
1031+
self.gp_latent_logp = model.logp({"f": y, "p": pnew})
10321032

10331033
def testTPvsLatent(self):
10341034
with pm.Model() as model:
10351035
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
1036-
tp = pm.gp.TP(cov_func=cov_func, nu=10000)
1036+
tp = pm.gp.TP(cov_func=cov_func, nu=self.nu)
10371037
f = tp.prior("f", self.X, reparameterize=False)
10381038
p = tp.conditional("p", self.Xnew)
10391039
tp_logp = model.logp({"f": self.y, "p": self.pnew})
1040-
npt.assert_allclose(self.latent_logp, tp_logp, atol=0, rtol=1e-2)
1040+
npt.assert_allclose(self.gp_latent_logp, tp_logp, atol=0, rtol=1e-2)
10411041

10421042
def testTPvsLatentReparameterized(self):
10431043
with pm.Model() as model:
10441044
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
1045-
tp = pm.gp.TP(cov_func=cov_func, nu=10000)
1045+
tp = pm.gp.TP(cov_func=cov_func, nu=self.nu)
10461046
f = tp.prior("f", self.X, reparameterize=True)
10471047
p = tp.conditional("p", self.Xnew)
10481048
chol = np.linalg.cholesky(cov_func(self.X).eval())
1049-
y_rotated = np.linalg.solve(chol, self.y)
1050-
# testing full model logp unreliable due to introduction of f_chi2__log__
1051-
plogp = p.logp({"f_rotated_": y_rotated, "p": self.pnew, "f_chi2__log__": np.log(1e20)})
1052-
npt.assert_allclose(self.plogp, plogp, atol=0, rtol=1e-2)
1049+
f_rotated = np.linalg.solve(chol, self.y)
1050+
1051+
tp_logp = model.logp({"f_rotated_": f_rotated, "p": self.pnew})
1052+
npt.assert_allclose(self.gp_latent_logp, tp_logp, atol=0, rtol=1e-2)
10531053

10541054
def testAdditiveTPRaises(self):
10551055
with pm.Model() as model:

0 commit comments

Comments
 (0)