From 60d97ce799ddaa0b34e03c76dc772d54f803ca96 Mon Sep 17 00:00:00 2001 From: Bill Engels Date: Sat, 27 Aug 2022 13:42:22 -0700 Subject: [PATCH 1/6] switch from using DensityDist to using Potential --- pymc/gp/gp.py | 39 +++------------------------------------ 1 file changed, 3 insertions(+), 36 deletions(-) diff --git a/pymc/gp/gp.py b/pymc/gp/gp.py index 5431da3fd1..57976cabea 100644 --- a/pymc/gp/gp.py +++ b/pymc/gp/gp.py @@ -747,9 +747,6 @@ def marginal_likelihood( noise. Must have shape `(n, )`. noise: scalar, Variable Standard deviation of the Gaussian noise. - is_observed: bool - Whether to set `y` as an `observed` variable in the `model`. - Default is `True`. jitter: scalar A small correction added to the diagonal of positive semi-definite covariance matrices to ensure numerical stability. @@ -766,39 +763,9 @@ def marginal_likelihood( raise ValueError("noise argument must be specified") else: self.sigma = noise - - if is_observed: - return pm.DensityDist( - name, - X, - Xu, - self.sigma, - jitter, - logp=self._build_marginal_likelihood_logp, - observed=y, - ndims_params=[2, 2, 0], - size=X.shape[0], - **kwargs, - ) - else: - warnings.warn( - "The 'is_observed' argument has been deprecated. If the GP is " - "unobserved use gp.Latent instead.", - FutureWarning, - ) - return pm.DensityDist( - name, - X, - Xu, - self.sigma, - jitter, - logp=self._build_marginal_likelihood_logp, - observed=y, - ndims_params=[2, 2, 0], - # ndim_supp=1, - size=X.shape[0], - **kwargs, - ) + + approx_logp = self._build_marginal_likelihood_logp(y, X, Xu, noise, JITTER_DEFAULT) + pm.Potential("marginalapprox_logp_" + name, approx_logp) def _build_conditional( self, Xnew, pred_noise, diag, X, Xu, y, sigma, cov_total, mean_total, jitter From e2d313a68c91694bf44a38624b3e2fa4a0c1105e Mon Sep 17 00:00:00 2001 From: Bill Engels Date: Sat, 27 Aug 2022 13:43:08 -0700 Subject: [PATCH 2/6] increase tolerance on flaky tests, add test using find_MAP --- pymc/tests/test_gp.py | 42 ++++++++++++++++++++++++++++++++++++------ 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/pymc/tests/test_gp.py b/pymc/tests/test_gp.py index 80d6a37803..2a83536d23 100644 --- a/pymc/tests/test_gp.py +++ b/pymc/tests/test_gp.py @@ -869,7 +869,7 @@ def setup_method(self): self.sigma = sigma self.pnew = pnew self.gp = gp - + @pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"]) def testApproximations(self, approx): with pm.Model() as model: @@ -879,7 +879,7 @@ def testApproximations(self, approx): f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma) p = gp.conditional("p", self.Xnew) approx_logp = model.compile_logp()({"p": self.pnew}) - npt.assert_allclose(approx_logp, self.logp, atol=0, rtol=1e-2) + npt.assert_allclose(approx_logp, self.logp, atol=1e-2, rtol=1e-2) @pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"]) def testPredictVar(self, approx): @@ -890,8 +890,8 @@ def testPredictVar(self, approx): f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma) mu1, var1 = self.gp.predict(self.Xnew, diag=True) mu2, var2 = gp.predict(self.Xnew, diag=True) - npt.assert_allclose(mu1, mu2, atol=0, rtol=1e-3) - npt.assert_allclose(var1, var2, atol=0, rtol=1e-3) + npt.assert_allclose(mu1, mu2, atol=1e-2, rtol=1e-2) + npt.assert_allclose(var1, var2, atol=1e-2, rtol=1e-2) def testPredictCov(self): with pm.Model() as model: @@ -901,9 +901,39 @@ def testPredictCov(self): f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma) mu1, cov1 = self.gp.predict(self.Xnew, pred_noise=True) mu2, cov2 = gp.predict(self.Xnew, pred_noise=True) - npt.assert_allclose(mu1, mu2, atol=0, rtol=1e-3) - npt.assert_allclose(cov1, cov2, atol=0, rtol=1e-3) + npt.assert_allclose(mu1, mu2, atol=1e-2, rtol=1e-2) + npt.assert_allclose(cov1, cov2, atol=1e-2, rtol=1e-2) + +class TestMarginalVsMarginalApproxFit: + R""" + Compare test fits of models Marginal and MarginalApprox. + Should be nearly equal when inducing points are same as inputs. + """ + def setup_method(self): + self.sigma = 0.1 + self.x = np.linspace(-5, 5, 30) + self.y = 0.25 * self.x + self.sigma*np.random.randn(len(self.x)) + with pm.Model() as model: + cov_func = pm.gp.cov.Linear(1, c=0.0) + c = pm.Normal("c", mu=20.0, sigma=100.0) # far from true value + mean_func = pm.gp.mean.Constant(c) + gp = pm.gp.Marginal(mean_func=mean_func, cov_func=cov_func) + gp.marginal_likelihood("lik", self.x[:, None], self.y, self.sigma) + map_full = pm.find_MAP(method="bfgs") + self.c_full = map_full["c"] + + @pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"]) + def test_fits(self, approx): + with pm.Model() as model: + cov_func = pm.gp.cov.Linear(1, c=0.0) + c = pm.Normal("c", mu=20.0, sigma=100.0) + mean_func = pm.gp.mean.Constant(c) + gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx="VFE") + gp.marginal_likelihood("lik", self.x[:, None], self.x[:, None], self.y, self.sigma) + map_approx = pm.find_MAP(method="bfgs") + npt.assert_allclose(self.c_full, map_approx["c"], atol=0.1, rtol=0.01) + class TestGPAdditive: def setup_method(self): From 1a632820658f69b9ac6b811120211139f4ced602 Mon Sep 17 00:00:00 2001 From: Bill Engels Date: Sat, 27 Aug 2022 14:19:32 -0700 Subject: [PATCH 3/6] refactor MarginalApprox tests --- pymc/tests/test_gp.py | 116 ++++++++++++++++-------------------------- 1 file changed, 44 insertions(+), 72 deletions(-) diff --git a/pymc/tests/test_gp.py b/pymc/tests/test_gp.py index 2a83536d23..fa859a2d13 100644 --- a/pymc/tests/test_gp.py +++ b/pymc/tests/test_gp.py @@ -843,72 +843,10 @@ def testLatent2(self): latent_logp = model.compile_logp()({"f_rotated_": y_rotated, "p": self.pnew}) npt.assert_allclose(latent_logp, self.logp, atol=5) - -class TestMarginalVsMarginalApprox: - R""" - Compare logp of models Marginal and MarginalApprox. - Should be nearly equal when inducing points are same as inputs. - """ - - def setup_method(self): - X = np.random.randn(50, 3) - y = np.random.randn(50) - Xnew = np.random.randn(60, 3) - pnew = np.random.randn(60) - with pm.Model() as model: - cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3]) - mean_func = pm.gp.mean.Constant(0.5) - gp = pm.gp.Marginal(mean_func=mean_func, cov_func=cov_func) - sigma = 0.1 - f = gp.marginal_likelihood("f", X, y, noise=sigma) - p = gp.conditional("p", Xnew) - self.logp = model.compile_logp()({"p": pnew}) - self.X = X - self.Xnew = Xnew - self.y = y - self.sigma = sigma - self.pnew = pnew - self.gp = gp - - @pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"]) - def testApproximations(self, approx): - with pm.Model() as model: - cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3]) - mean_func = pm.gp.mean.Constant(0.5) - gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx=approx) - f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma) - p = gp.conditional("p", self.Xnew) - approx_logp = model.compile_logp()({"p": self.pnew}) - npt.assert_allclose(approx_logp, self.logp, atol=1e-2, rtol=1e-2) - - @pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"]) - def testPredictVar(self, approx): - with pm.Model() as model: - cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3]) - mean_func = pm.gp.mean.Constant(0.5) - gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx=approx) - f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma) - mu1, var1 = self.gp.predict(self.Xnew, diag=True) - mu2, var2 = gp.predict(self.Xnew, diag=True) - npt.assert_allclose(mu1, mu2, atol=1e-2, rtol=1e-2) - npt.assert_allclose(var1, var2, atol=1e-2, rtol=1e-2) - - def testPredictCov(self): - with pm.Model() as model: - cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3]) - mean_func = pm.gp.mean.Constant(0.5) - gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx="DTC") - f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma) - mu1, cov1 = self.gp.predict(self.Xnew, pred_noise=True) - mu2, cov2 = gp.predict(self.Xnew, pred_noise=True) - npt.assert_allclose(mu1, mu2, atol=1e-2, rtol=1e-2) - npt.assert_allclose(cov1, cov2, atol=1e-2, rtol=1e-2) - -class TestMarginalVsMarginalApproxFit: +class TestMarginalVsMarginalApprox: R""" Compare test fits of models Marginal and MarginalApprox. - Should be nearly equal when inducing points are same as inputs. """ def setup_method(self): self.sigma = 0.1 @@ -918,22 +856,56 @@ def setup_method(self): cov_func = pm.gp.cov.Linear(1, c=0.0) c = pm.Normal("c", mu=20.0, sigma=100.0) # far from true value mean_func = pm.gp.mean.Constant(c) - gp = pm.gp.Marginal(mean_func=mean_func, cov_func=cov_func) - gp.marginal_likelihood("lik", self.x[:, None], self.y, self.sigma) - map_full = pm.find_MAP(method="bfgs") - self.c_full = map_full["c"] + self.gp = pm.gp.Marginal(mean_func=mean_func, cov_func=cov_func) + sigma = pm.HalfNormal("sigma", sigma=100) + self.gp.marginal_likelihood("lik", self.x[:, None], self.y, sigma) + self.map_full = pm.find_MAP(method="bfgs") # bfgs seems to work much better than lbfgsb + + self.x_new = np.linspace(-6, 6, 20) + with model: + self.pred_mu, self.pred_var = self.gp.predict( + self.x_new[:, None], point=self.map_full, pred_noise=True, diag=True + ) + + with model: + self.pred_mu, self.pred_covar = self.gp.predict( + self.x_new[:, None], point=self.map_full, pred_noise=False, diag=False + ) @pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"]) - def test_fits(self, approx): + def test_fits_and_preds(self, approx): + # check logp & dlogp, optimization gets approximately correct result with pm.Model() as model: cov_func = pm.gp.cov.Linear(1, c=0.0) - c = pm.Normal("c", mu=20.0, sigma=100.0) + c = pm.Normal("c", mu=20.0, sigma=100.0, initval=-500.0) mean_func = pm.gp.mean.Constant(c) gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx="VFE") - gp.marginal_likelihood("lik", self.x[:, None], self.x[:, None], self.y, self.sigma) + sigma = pm.HalfNormal("sigma", sigma=100, initval=50.0) + gp.marginal_likelihood("lik", self.x[:, None], self.x[:, None], self.y, sigma) map_approx = pm.find_MAP(method="bfgs") - npt.assert_allclose(self.c_full, map_approx["c"], atol=0.1, rtol=0.01) - + + # use wide tolerances (but narrow relative to initial values of unknown parameters) because + # test is likely flakey + npt.assert_allclose(self.map_full["c"], map_approx["c"], atol=0.01, rtol=0.1) + npt.assert_allclose(self.map_full["sigma"], map_approx["sigma"], atol=0.01, rtol=0.1) + + # check that predict (and conditional) work, include noise, with diagonal non-full pred var + with model: + pred_mu_approx, pred_var_approx = gp.predict( + self.x_new[:, None], point=map_approx, pred_noise=True, diag=True + ) + npt.assert_allclose(self.pred_mu, pred_mu_approx, atol=0.0, rtol=0.1) + npt.assert_allclose(self.pred_var, pred_var_approx, atol=0.0, rtol=0.1) + + # check that predict (and conditional) work, no noise, full pred covariance + with model: + pred_mu_approx, pred_var_approx = gp.predict( + self.x_new[:, None], point=map_approx, pred_noise=True, diag=True + ) + npt.assert_allclose(self.pred_mu, pred_mu_approx, atol=0.0, rtol=0.1) + npt.assert_allclose(self.pred_var, pred_var_approx, atol=0.0, rtol=0.1) + + class TestGPAdditive: def setup_method(self): From 766d3b16fb999ca7a9026199e1bb42dadb8a312a Mon Sep 17 00:00:00 2001 From: Bill Engels Date: Sat, 27 Aug 2022 14:24:46 -0700 Subject: [PATCH 4/6] run precommit --- pymc/gp/gp.py | 2 +- pymc/tests/test_gp.py | 22 +++++++++++----------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/pymc/gp/gp.py b/pymc/gp/gp.py index 57976cabea..df387081b8 100644 --- a/pymc/gp/gp.py +++ b/pymc/gp/gp.py @@ -763,7 +763,7 @@ def marginal_likelihood( raise ValueError("noise argument must be specified") else: self.sigma = noise - + approx_logp = self._build_marginal_likelihood_logp(y, X, Xu, noise, JITTER_DEFAULT) pm.Potential("marginalapprox_logp_" + name, approx_logp) diff --git a/pymc/tests/test_gp.py b/pymc/tests/test_gp.py index fa859a2d13..0d4414a45d 100644 --- a/pymc/tests/test_gp.py +++ b/pymc/tests/test_gp.py @@ -843,30 +843,31 @@ def testLatent2(self): latent_logp = model.compile_logp()({"f_rotated_": y_rotated, "p": self.pnew}) npt.assert_allclose(latent_logp, self.logp, atol=5) - + class TestMarginalVsMarginalApprox: R""" Compare test fits of models Marginal and MarginalApprox. """ + def setup_method(self): self.sigma = 0.1 self.x = np.linspace(-5, 5, 30) - self.y = 0.25 * self.x + self.sigma*np.random.randn(len(self.x)) + self.y = 0.25 * self.x + self.sigma * np.random.randn(len(self.x)) with pm.Model() as model: cov_func = pm.gp.cov.Linear(1, c=0.0) - c = pm.Normal("c", mu=20.0, sigma=100.0) # far from true value + c = pm.Normal("c", mu=20.0, sigma=100.0) # far from true value mean_func = pm.gp.mean.Constant(c) self.gp = pm.gp.Marginal(mean_func=mean_func, cov_func=cov_func) sigma = pm.HalfNormal("sigma", sigma=100) self.gp.marginal_likelihood("lik", self.x[:, None], self.y, sigma) - self.map_full = pm.find_MAP(method="bfgs") # bfgs seems to work much better than lbfgsb - + self.map_full = pm.find_MAP(method="bfgs") # bfgs seems to work much better than lbfgsb + self.x_new = np.linspace(-6, 6, 20) with model: self.pred_mu, self.pred_var = self.gp.predict( self.x_new[:, None], point=self.map_full, pred_noise=True, diag=True ) - + with model: self.pred_mu, self.pred_covar = self.gp.predict( self.x_new[:, None], point=self.map_full, pred_noise=False, diag=False @@ -883,12 +884,12 @@ def test_fits_and_preds(self, approx): sigma = pm.HalfNormal("sigma", sigma=100, initval=50.0) gp.marginal_likelihood("lik", self.x[:, None], self.x[:, None], self.y, sigma) map_approx = pm.find_MAP(method="bfgs") - + # use wide tolerances (but narrow relative to initial values of unknown parameters) because # test is likely flakey npt.assert_allclose(self.map_full["c"], map_approx["c"], atol=0.01, rtol=0.1) npt.assert_allclose(self.map_full["sigma"], map_approx["sigma"], atol=0.01, rtol=0.1) - + # check that predict (and conditional) work, include noise, with diagonal non-full pred var with model: pred_mu_approx, pred_var_approx = gp.predict( @@ -896,7 +897,7 @@ def test_fits_and_preds(self, approx): ) npt.assert_allclose(self.pred_mu, pred_mu_approx, atol=0.0, rtol=0.1) npt.assert_allclose(self.pred_var, pred_var_approx, atol=0.0, rtol=0.1) - + # check that predict (and conditional) work, no noise, full pred covariance with model: pred_mu_approx, pred_var_approx = gp.predict( @@ -904,8 +905,7 @@ def test_fits_and_preds(self, approx): ) npt.assert_allclose(self.pred_mu, pred_mu_approx, atol=0.0, rtol=0.1) npt.assert_allclose(self.pred_var, pred_var_approx, atol=0.0, rtol=0.1) - - + class TestGPAdditive: def setup_method(self): From 5740f2e6f793c8229e5de4cdbd54b9c191138c7a Mon Sep 17 00:00:00 2001 From: Bill Engels Date: Sat, 27 Aug 2022 15:23:53 -0700 Subject: [PATCH 5/6] address comments, pass approx arg correctly, improve docstrings --- pymc/gp/gp.py | 7 +------ pymc/tests/test_gp.py | 18 ++++++++++++------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/pymc/gp/gp.py b/pymc/gp/gp.py index df387081b8..a5c2f0dd1d 100644 --- a/pymc/gp/gp.py +++ b/pymc/gp/gp.py @@ -685,17 +685,12 @@ def __init__(self, approx="VFE", *, mean_func=Zero(), cov_func=Constant(0.0)): super().__init__(mean_func=mean_func, cov_func=cov_func) def __add__(self, other): - # new_gp will default to FITC approx new_gp = super().__add__(other) - # make sure new gp has correct approx if not self.approx == other.approx: raise TypeError("Cannot add GPs with different approximations") new_gp.approx = self.approx return new_gp - # Use y as first argument, so that we can use functools.partial - # in marginal_likelihood instead of lambda. This makes pickling - # possible. def _build_marginal_likelihood_logp(self, y, X, Xu, sigma, jitter): sigma2 = at.square(sigma) Kuu = self.cov_func(Xu) @@ -765,7 +760,7 @@ def marginal_likelihood( self.sigma = noise approx_logp = self._build_marginal_likelihood_logp(y, X, Xu, noise, JITTER_DEFAULT) - pm.Potential("marginalapprox_logp_" + name, approx_logp) + pm.Potential(f"marginalapprox_logp_{name}", approx_logp) def _build_conditional( self, Xnew, pred_noise, diag, X, Xu, y, sigma, cov_total, mean_total, jitter diff --git a/pymc/tests/test_gp.py b/pymc/tests/test_gp.py index 0d4414a45d..4b119e868f 100644 --- a/pymc/tests/test_gp.py +++ b/pymc/tests/test_gp.py @@ -863,11 +863,14 @@ def setup_method(self): self.map_full = pm.find_MAP(method="bfgs") # bfgs seems to work much better than lbfgsb self.x_new = np.linspace(-6, 6, 20) + + # Include additive Gaussian noise, return diagonal of predicted covariance matrix with model: self.pred_mu, self.pred_var = self.gp.predict( self.x_new[:, None], point=self.map_full, pred_noise=True, diag=True ) + # Dont include additive Gaussian noise, return full predicted covariance matrix with model: self.pred_mu, self.pred_covar = self.gp.predict( self.x_new[:, None], point=self.map_full, pred_noise=False, diag=False @@ -875,22 +878,25 @@ def setup_method(self): @pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"]) def test_fits_and_preds(self, approx): - # check logp & dlogp, optimization gets approximately correct result + """Get MAP estimate for GP approximation, compare results and predictions to what's returned + by an unapproximated GP. The tolerances are fairly wide, but narrow relative to initial + values of the unknown parameters. + """ + with pm.Model() as model: cov_func = pm.gp.cov.Linear(1, c=0.0) c = pm.Normal("c", mu=20.0, sigma=100.0, initval=-500.0) mean_func = pm.gp.mean.Constant(c) - gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx="VFE") + gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx=approx) sigma = pm.HalfNormal("sigma", sigma=100, initval=50.0) gp.marginal_likelihood("lik", self.x[:, None], self.x[:, None], self.y, sigma) map_approx = pm.find_MAP(method="bfgs") - # use wide tolerances (but narrow relative to initial values of unknown parameters) because - # test is likely flakey + # Check MAP gets approximately correct result npt.assert_allclose(self.map_full["c"], map_approx["c"], atol=0.01, rtol=0.1) npt.assert_allclose(self.map_full["sigma"], map_approx["sigma"], atol=0.01, rtol=0.1) - # check that predict (and conditional) work, include noise, with diagonal non-full pred var + # Check that predict (and conditional) work, include noise, with diagonal non-full pred var. with model: pred_mu_approx, pred_var_approx = gp.predict( self.x_new[:, None], point=map_approx, pred_noise=True, diag=True @@ -898,7 +904,7 @@ def test_fits_and_preds(self, approx): npt.assert_allclose(self.pred_mu, pred_mu_approx, atol=0.0, rtol=0.1) npt.assert_allclose(self.pred_var, pred_var_approx, atol=0.0, rtol=0.1) - # check that predict (and conditional) work, no noise, full pred covariance + # Check that predict (and conditional) work, no noise, full pred covariance. with model: pred_mu_approx, pred_var_approx = gp.predict( self.x_new[:, None], point=map_approx, pred_noise=True, diag=True From 426b5c6cd0d004e499105326c49ec5b13265b391 Mon Sep 17 00:00:00 2001 From: Bill Engels Date: Sun, 28 Aug 2022 16:48:16 -0700 Subject: [PATCH 6/6] fix comments, make pass jitter through correctly, get rid of is_observed arg --- pymc/gp/gp.py | 10 ++++------ pymc/tests/test_gp.py | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/pymc/gp/gp.py b/pymc/gp/gp.py index a5c2f0dd1d..b4ca06494d 100644 --- a/pymc/gp/gp.py +++ b/pymc/gp/gp.py @@ -691,7 +691,7 @@ def __add__(self, other): new_gp.approx = self.approx return new_gp - def _build_marginal_likelihood_logp(self, y, X, Xu, sigma, jitter): + def _build_marginal_likelihood_loglik(self, y, X, Xu, sigma, jitter): sigma2 = at.square(sigma) Kuu = self.cov_func(Xu) Kuf = self.cov_func(Xu, X) @@ -720,9 +720,7 @@ def _build_marginal_likelihood_logp(self, y, X, Xu, sigma, jitter): quadratic = 0.5 * (at.dot(r, r_l) - at.dot(c, c)) return -1.0 * (constant + logdet + quadratic + trace) - def marginal_likelihood( - self, name, X, Xu, y, noise=None, is_observed=True, jitter=JITTER_DEFAULT, **kwargs - ): + def marginal_likelihood(self, name, X, Xu, y, noise=None, jitter=JITTER_DEFAULT, **kwargs): R""" Returns the approximate marginal likelihood distribution, given the input locations `X`, inducing point locations `Xu`, data `y`, and white noise @@ -759,8 +757,8 @@ def marginal_likelihood( else: self.sigma = noise - approx_logp = self._build_marginal_likelihood_logp(y, X, Xu, noise, JITTER_DEFAULT) - pm.Potential(f"marginalapprox_logp_{name}", approx_logp) + approx_loglik = self._build_marginal_likelihood_loglik(y, X, Xu, noise, jitter) + pm.Potential(f"marginalapprox_loglik_{name}", approx_loglik, **kwargs) def _build_conditional( self, Xnew, pred_noise, diag, X, Xu, y, sigma, cov_total, mean_total, jitter diff --git a/pymc/tests/test_gp.py b/pymc/tests/test_gp.py index 4b119e868f..999578b17c 100644 --- a/pymc/tests/test_gp.py +++ b/pymc/tests/test_gp.py @@ -852,7 +852,7 @@ class TestMarginalVsMarginalApprox: def setup_method(self): self.sigma = 0.1 self.x = np.linspace(-5, 5, 30) - self.y = 0.25 * self.x + self.sigma * np.random.randn(len(self.x)) + self.y = np.random.normal(0.25 * self.x, self.sigma) with pm.Model() as model: cov_func = pm.gp.cov.Linear(1, c=0.0) c = pm.Normal("c", mu=20.0, sigma=100.0) # far from true value