Skip to content

Commit 9ca57bf

Browse files
Fix shape problem in GP priors
Shape asserts were added in existing tests that covered both reparameterize=True/False. Closes #5803
1 parent b67a76f commit 9ca57bf

File tree

3 files changed

+14
-26
lines changed

3 files changed

+14
-26
lines changed

pymc/gp/gp.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@
2727
JITTER_DEFAULT,
2828
cholesky,
2929
conditioned_vars,
30-
infer_size,
3130
replace_with_values,
3231
solve_lower,
3332
solve_upper,
@@ -131,7 +130,7 @@ def _build_prior(self, name, X, reparameterize=True, jitter=JITTER_DEFAULT, **kw
131130
mu = self.mean_func(X)
132131
cov = stabilize(self.cov_func(X), jitter)
133132
if reparameterize:
134-
size = infer_size(X, kwargs.pop("size", None))
133+
size = np.shape(X)[0]
135134
v = pm.Normal(name + "_rotated_", mu=0.0, sigma=1.0, size=size, **kwargs)
136135
f = pm.Deterministic(name, mu + cholesky(cov).dot(v))
137136
else:
@@ -278,7 +277,7 @@ def _build_prior(self, name, X, reparameterize=True, jitter=JITTER_DEFAULT, **kw
278277
mu = self.mean_func(X)
279278
cov = stabilize(self.cov_func(X), jitter)
280279
if reparameterize:
281-
size = infer_size(X, kwargs.pop("size", None))
280+
size = np.shape(X)[0]
282281
v = pm.StudentT(name + "_rotated_", mu=0.0, sigma=1.0, nu=self.nu, size=size, **kwargs)
283282
f = pm.Deterministic(name, mu + cholesky(cov).dot(v))
284283
else:

pymc/gp/util.py

Lines changed: 0 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -88,29 +88,6 @@ def replace_with_values(vars_needed, replacements=None, model=None):
8888
return fn(**replacements)
8989

9090

91-
def infer_size(X, n_points=None):
92-
R"""
93-
Maybe attempt to infer the size, or N, of a Gaussian process input matrix.
94-
95-
If a specific shape cannot be inferred, for instance if X is symbolic, then an
96-
error is raised.
97-
98-
Parameters
99-
----------
100-
X: array-like
101-
Gaussian process input matrix.
102-
n_points: None or int
103-
The number of rows of `X`. If `None`, the number of rows of `X` is
104-
calculated from `X` if possible.
105-
"""
106-
if n_points is None:
107-
try:
108-
n_points = int(X.shape[0])
109-
except TypeError:
110-
raise TypeError("Cannot infer 'shape', provide as an argument")
111-
return n_points
112-
113-
11491
def stabilize(K, jitter=JITTER_DEFAULT):
11592
R"""
11693
Adds small diagonal to a covariance matrix.

pymc/tests/test_gp.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -824,6 +824,8 @@ def testLatent1(self):
824824
gp = pm.gp.Latent(mean_func=mean_func, cov_func=cov_func)
825825
f = gp.prior("f", self.X, reparameterize=False)
826826
p = gp.conditional("p", self.Xnew)
827+
assert tuple(f.shape.eval()) == (self.X.shape[0],)
828+
assert tuple(p.shape.eval()) == (self.Xnew.shape[0],)
827829
latent_logp = model.compile_logp()({"f": self.y, "p": self.pnew})
828830
npt.assert_allclose(latent_logp, self.logp, atol=0, rtol=1e-2)
829831

@@ -834,6 +836,8 @@ def testLatent2(self):
834836
gp = pm.gp.Latent(mean_func=mean_func, cov_func=cov_func)
835837
f = gp.prior("f", self.X, reparameterize=True)
836838
p = gp.conditional("p", self.Xnew)
839+
assert tuple(f.shape.eval()) == (self.X.shape[0],)
840+
assert tuple(p.shape.eval()) == (self.Xnew.shape[0],)
837841
chol = np.linalg.cholesky(cov_func(self.X).eval())
838842
y_rotated = np.linalg.solve(chol, self.y - 0.5)
839843
latent_logp = model.compile_logp()({"f_rotated_": y_rotated, "p": self.pnew})
@@ -1068,6 +1072,8 @@ def testTPvsLatent(self):
10681072
tp = pm.gp.TP(cov_func=cov_func, nu=self.nu)
10691073
f = tp.prior("f", self.X, reparameterize=False)
10701074
p = tp.conditional("p", self.Xnew)
1075+
assert tuple(f.shape.eval()) == (self.X.shape[0],)
1076+
assert tuple(p.shape.eval()) == (self.Xnew.shape[0],)
10711077
tp_logp = model.compile_logp()({"f": self.y, "p": self.pnew})
10721078
npt.assert_allclose(self.gp_latent_logp, tp_logp, atol=0, rtol=1e-2)
10731079

@@ -1077,6 +1083,8 @@ def testTPvsLatentReparameterized(self):
10771083
tp = pm.gp.TP(cov_func=cov_func, nu=self.nu)
10781084
f = tp.prior("f", self.X, reparameterize=True)
10791085
p = tp.conditional("p", self.Xnew)
1086+
assert tuple(f.shape.eval()) == (self.X.shape[0],)
1087+
assert tuple(p.shape.eval()) == (self.Xnew.shape[0],)
10801088
chol = np.linalg.cholesky(cov_func(self.X).eval())
10811089
f_rotated = np.linalg.solve(chol, self.y)
10821090
tp_logp = model.compile_logp()({"f_rotated_": f_rotated, "p": self.pnew})
@@ -1129,6 +1137,8 @@ def testLatentKronvsLatent(self):
11291137
kron_gp = pm.gp.LatentKron(mean_func=self.mean, cov_funcs=self.cov_funcs)
11301138
f = kron_gp.prior("f", self.Xs)
11311139
p = kron_gp.conditional("p", self.Xnew)
1140+
assert tuple(f.shape.eval()) == (self.X.shape[0],)
1141+
assert tuple(p.shape.eval()) == (self.Xnew.shape[0],)
11321142
kronlatent_logp = kron_model.compile_logp()({"f_rotated_": self.y_rotated, "p": self.pnew})
11331143
npt.assert_allclose(kronlatent_logp, self.logp, atol=0, rtol=1e-3)
11341144

@@ -1186,6 +1196,8 @@ def testMarginalKronvsMarginalpredict(self):
11861196
f = kron_gp.marginal_likelihood("f", self.Xs, self.y, sigma=self.sigma)
11871197
p = kron_gp.conditional("p", self.Xnew)
11881198
mu, cov = kron_gp.predict(self.Xnew)
1199+
assert tuple(f.shape.eval()) == (self.X.shape[0],)
1200+
assert tuple(p.shape.eval()) == (self.Xnew.shape[0],)
11891201
npt.assert_allclose(mu, self.mu, atol=1e-5, rtol=1e-2)
11901202
npt.assert_allclose(cov, self.cov, atol=1e-5, rtol=1e-2)
11911203
with kron_model:

0 commit comments

Comments
 (0)