Skip to content

MarginalSparse rename to MarginalApprox #5242

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Dec 14, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ All of the above apply to:
- The `is_observed` arguement for `gp.Marginal*` implementations has been deprecated.
- In the gp.utils file, the `kmeans_inducing_points` function now passes through `kmeans_kwargs` to scipy's k-means function.
- The function `replace_with_values` function has been added to `gp.utils`.
- `MarginalSparse` has been renamed `MarginalApprox`.
- ...

### Expected breaks
Expand Down
10 changes: 9 additions & 1 deletion pymc/gp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,12 @@
# limitations under the License.

from pymc.gp import cov, mean, util
from pymc.gp.gp import TP, Latent, LatentKron, Marginal, MarginalKron, MarginalSparse
from pymc.gp.gp import (
TP,
Latent,
LatentKron,
Marginal,
MarginalApprox,
MarginalKron,
MarginalSparse,
)
24 changes: 19 additions & 5 deletions pymc/gp/gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
)
from pymc.math import cartesian, kron_diag, kron_dot, kron_solve_lower, kron_solve_upper

__all__ = ["Latent", "Marginal", "TP", "MarginalSparse", "LatentKron", "MarginalKron"]
__all__ = ["Latent", "Marginal", "TP", "MarginalApprox", "LatentKron", "MarginalKron"]


class Base:
Expand Down Expand Up @@ -597,11 +597,11 @@ def _predict_at(self, Xnew, diag=False, pred_noise=False, given=None, jitter=0.0


@conditioned_vars(["X", "Xu", "y", "sigma"])
class MarginalSparse(Marginal):
class MarginalApprox(Marginal):
R"""
Approximate marginal Gaussian process.

The `gp.MarginalSparse` class is an implementation of the sum of a GP
The `gp.MarginalApprox` class is an implementation of the sum of a GP
prior and additive noise. It has `marginal_likelihood`, `conditional`
and `predict` methods. This GP implementation can be used to
implement regression on data that is normally distributed. The
Expand All @@ -619,6 +619,7 @@ class MarginalSparse(Marginal):
The mean function. Defaults to zero.
approx: string
The approximation to use. Must be one of `VFE`, `FITC` or `DTC`.
Default is VFE.

Examples
--------
Expand All @@ -635,7 +636,7 @@ class MarginalSparse(Marginal):
cov_func = pm.gp.cov.ExpQuad(1, ls=0.1)

# Specify the GP. The default mean function is `Zero`.
gp = pm.gp.MarginalSparse(cov_func=cov_func, approx="FITC")
gp = pm.gp.MarginalApprox(cov_func=cov_func, approx="FITC")

# Place a GP prior over the function f.
sigma = pm.HalfCauchy("sigma", beta=3)
Expand All @@ -657,11 +658,14 @@ class MarginalSparse(Marginal):

- Titsias, M. (2009). Variational Learning of Inducing Variables in
Sparse Gaussian Processes.

- Bauer, M., van der Wilk, M., and Rasmussen, C. E. (2016). Understanding
Probabilistic Sparse Gaussian Process Approximations.
"""

_available_approx = ("FITC", "VFE", "DTC")

def __init__(self, mean_func=Zero(), cov_func=Constant(0.0), approx="FITC"):
def __init__(self, approx="VFE", *, mean_func=Zero(), cov_func=Constant(0.0)):
if approx not in self._available_approx:
raise NotImplementedError(approx)
self.approx = approx
Expand Down Expand Up @@ -866,6 +870,16 @@ def conditional(self, name, Xnew, pred_noise=False, given=None, jitter=0.0, **kw
return pm.MvNormal(name, mu=mu, cov=cov, **kwargs)


@conditioned_vars(["X", "Xu", "y", "sigma"])
class MarginalSparse(MarginalApprox):
def __init__(self, approx="VFE", *, mean_func=Zero(), cov_func=Constant(0.0)):
warnings.warn(
"gp.MarginalSparse has been renamed to gp.MarginalApprox.",
FutureWarning,
)
super().__init__(mean_func=mean_func, cov_func=cov_func, approx=approx)


@conditioned_vars(["Xs", "f"])
class LatentKron(Base):
R"""
Expand Down
26 changes: 13 additions & 13 deletions pymc/tests/test_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -840,9 +840,9 @@ def testLatent2(self):
npt.assert_allclose(latent_logp, self.logp, atol=5)


class TestMarginalVsMarginalSparse:
class TestMarginalVsMarginalApprox:
R"""
Compare logp of models Marginal and MarginalSparse.
Compare logp of models Marginal and MarginalApprox.
Should be nearly equal when inducing points are same as inputs.
"""

Expand Down Expand Up @@ -871,7 +871,7 @@ def testApproximations(self, approx):
with pm.Model() as model:
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
mean_func = pm.gp.mean.Constant(0.5)
gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx=approx)
gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx=approx)
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
p = gp.conditional("p", self.Xnew)
approx_logp = model.logp({"f": self.y, "p": self.pnew})
Expand All @@ -882,7 +882,7 @@ def testPredictVar(self, approx):
with pm.Model() as model:
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
mean_func = pm.gp.mean.Constant(0.5)
gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx=approx)
gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx=approx)
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
mu1, var1 = self.gp.predict(self.Xnew, diag=True)
mu2, var2 = gp.predict(self.Xnew, diag=True)
Expand All @@ -893,7 +893,7 @@ def testPredictCov(self):
with pm.Model() as model:
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
mean_func = pm.gp.mean.Constant(0.5)
gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx="DTC")
gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx="DTC")
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
mu1, cov1 = self.gp.predict(self.Xnew, pred_noise=True)
mu2, cov2 = gp.predict(self.Xnew, pred_noise=True)
Expand Down Expand Up @@ -945,17 +945,17 @@ def testAdditiveMarginal(self):
npt.assert_allclose(logp1, logp2, atol=0, rtol=1e-2)

@pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"])
def testAdditiveMarginalSparse(self, approx):
def testAdditiveMarginalApprox(self, approx):
Xu = np.random.randn(10, 3)
sigma = 0.1
with pm.Model() as model1:
gp1 = pm.gp.MarginalSparse(
gp1 = pm.gp.MarginalApprox(
mean_func=self.means[0], cov_func=self.covs[0], approx=approx
)
gp2 = pm.gp.MarginalSparse(
gp2 = pm.gp.MarginalApprox(
mean_func=self.means[1], cov_func=self.covs[1], approx=approx
)
gp3 = pm.gp.MarginalSparse(
gp3 = pm.gp.MarginalApprox(
mean_func=self.means[2], cov_func=self.covs[2], approx=approx
)

Expand All @@ -964,7 +964,7 @@ def testAdditiveMarginalSparse(self, approx):
model1_logp = model1.logp({"fsum": self.y})

with pm.Model() as model2:
gptot = pm.gp.MarginalSparse(
gptot = pm.gp.MarginalApprox(
mean_func=reduce(add, self.means), cov_func=reduce(add, self.covs), approx=approx
)
fsum = gptot.marginal_likelihood("f", self.X, Xu, self.y, noise=sigma)
Expand Down Expand Up @@ -1017,15 +1017,15 @@ def testAdditiveSparseRaises(self):
# cant add different approximations
with pm.Model() as model:
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
gp1 = pm.gp.MarginalSparse(cov_func=cov_func, approx="DTC")
gp2 = pm.gp.MarginalSparse(cov_func=cov_func, approx="FITC")
gp1 = pm.gp.MarginalApprox(cov_func=cov_func, approx="DTC")
gp2 = pm.gp.MarginalApprox(cov_func=cov_func, approx="FITC")
with pytest.raises(Exception) as e_info:
gp1 + gp2

def testAdditiveTypeRaises1(self):
with pm.Model() as model:
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
gp1 = pm.gp.MarginalSparse(cov_func=cov_func, approx="DTC")
gp1 = pm.gp.MarginalApprox(cov_func=cov_func, approx="DTC")
gp2 = pm.gp.Marginal(cov_func=cov_func)
with pytest.raises(Exception) as e_info:
gp1 + gp2
Expand Down