@@ -824,6 +824,8 @@ def testLatent1(self):
824
824
gp = pm .gp .Latent (mean_func = mean_func , cov_func = cov_func )
825
825
f = gp .prior ("f" , self .X , reparameterize = False )
826
826
p = gp .conditional ("p" , self .Xnew )
827
+ assert tuple (f .shape .eval ()) == (self .X .shape [0 ],)
828
+ assert tuple (p .shape .eval ()) == (self .Xnew .shape [0 ],)
827
829
latent_logp = model .compile_logp ()({"f" : self .y , "p" : self .pnew })
828
830
npt .assert_allclose (latent_logp , self .logp , atol = 0 , rtol = 1e-2 )
829
831
@@ -834,6 +836,8 @@ def testLatent2(self):
834
836
gp = pm .gp .Latent (mean_func = mean_func , cov_func = cov_func )
835
837
f = gp .prior ("f" , self .X , reparameterize = True )
836
838
p = gp .conditional ("p" , self .Xnew )
839
+ assert tuple (f .shape .eval ()) == (self .X .shape [0 ],)
840
+ assert tuple (p .shape .eval ()) == (self .Xnew .shape [0 ],)
837
841
chol = np .linalg .cholesky (cov_func (self .X ).eval ())
838
842
y_rotated = np .linalg .solve (chol , self .y - 0.5 )
839
843
latent_logp = model .compile_logp ()({"f_rotated_" : y_rotated , "p" : self .pnew })
@@ -1068,6 +1072,8 @@ def testTPvsLatent(self):
1068
1072
tp = pm .gp .TP (cov_func = cov_func , nu = self .nu )
1069
1073
f = tp .prior ("f" , self .X , reparameterize = False )
1070
1074
p = tp .conditional ("p" , self .Xnew )
1075
+ assert tuple (f .shape .eval ()) == (self .X .shape [0 ],)
1076
+ assert tuple (p .shape .eval ()) == (self .Xnew .shape [0 ],)
1071
1077
tp_logp = model .compile_logp ()({"f" : self .y , "p" : self .pnew })
1072
1078
npt .assert_allclose (self .gp_latent_logp , tp_logp , atol = 0 , rtol = 1e-2 )
1073
1079
@@ -1077,6 +1083,8 @@ def testTPvsLatentReparameterized(self):
1077
1083
tp = pm .gp .TP (cov_func = cov_func , nu = self .nu )
1078
1084
f = tp .prior ("f" , self .X , reparameterize = True )
1079
1085
p = tp .conditional ("p" , self .Xnew )
1086
+ assert tuple (f .shape .eval ()) == (self .X .shape [0 ],)
1087
+ assert tuple (p .shape .eval ()) == (self .Xnew .shape [0 ],)
1080
1088
chol = np .linalg .cholesky (cov_func (self .X ).eval ())
1081
1089
f_rotated = np .linalg .solve (chol , self .y )
1082
1090
tp_logp = model .compile_logp ()({"f_rotated_" : f_rotated , "p" : self .pnew })
@@ -1129,6 +1137,8 @@ def testLatentKronvsLatent(self):
1129
1137
kron_gp = pm .gp .LatentKron (mean_func = self .mean , cov_funcs = self .cov_funcs )
1130
1138
f = kron_gp .prior ("f" , self .Xs )
1131
1139
p = kron_gp .conditional ("p" , self .Xnew )
1140
+ assert tuple (f .shape .eval ()) == (self .X .shape [0 ],)
1141
+ assert tuple (p .shape .eval ()) == (self .Xnew .shape [0 ],)
1132
1142
kronlatent_logp = kron_model .compile_logp ()({"f_rotated_" : self .y_rotated , "p" : self .pnew })
1133
1143
npt .assert_allclose (kronlatent_logp , self .logp , atol = 0 , rtol = 1e-3 )
1134
1144
@@ -1186,6 +1196,8 @@ def testMarginalKronvsMarginalpredict(self):
1186
1196
f = kron_gp .marginal_likelihood ("f" , self .Xs , self .y , sigma = self .sigma )
1187
1197
p = kron_gp .conditional ("p" , self .Xnew )
1188
1198
mu , cov = kron_gp .predict (self .Xnew )
1199
+ assert tuple (f .shape .eval ()) == (self .X .shape [0 ],)
1200
+ assert tuple (p .shape .eval ()) == (self .Xnew .shape [0 ],)
1189
1201
npt .assert_allclose (mu , self .mu , atol = 1e-5 , rtol = 1e-2 )
1190
1202
npt .assert_allclose (cov , self .cov , atol = 1e-5 , rtol = 1e-2 )
1191
1203
with kron_model :
0 commit comments