Skip to content

Commit 1e323be

Browse files
authored
MarginalSparse rename to MarginalApprox (#5242)
* rename MarginalSparse to MarginalApprox add subclass MarginalSparse with warning change default approx from FITC to VFE fix bug in whether cov_func and mean_func are forced kwargs in MarginalApprox * rename MarginalSparse to MarginalApprox * add citation * add comma * fix so it actually runs * update release notes * trigger tests * fix init * fix precommit
1 parent beeb40f commit 1e323be

File tree

4 files changed

+42
-19
lines changed

4 files changed

+42
-19
lines changed

Diff for: RELEASE-NOTES.md

+1
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ All of the above apply to:
5353
- The `is_observed` arguement for `gp.Marginal*` implementations has been deprecated.
5454
- In the gp.utils file, the `kmeans_inducing_points` function now passes through `kmeans_kwargs` to scipy's k-means function.
5555
- The function `replace_with_values` function has been added to `gp.utils`.
56+
- `MarginalSparse` has been renamed `MarginalApprox`.
5657
- ...
5758

5859
### Expected breaks

Diff for: pymc/gp/__init__.py

+9-1
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,12 @@
1313
# limitations under the License.
1414

1515
from pymc.gp import cov, mean, util
16-
from pymc.gp.gp import TP, Latent, LatentKron, Marginal, MarginalKron, MarginalSparse
16+
from pymc.gp.gp import (
17+
TP,
18+
Latent,
19+
LatentKron,
20+
Marginal,
21+
MarginalApprox,
22+
MarginalKron,
23+
MarginalSparse,
24+
)

Diff for: pymc/gp/gp.py

+19-5
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
)
3636
from pymc.math import cartesian, kron_diag, kron_dot, kron_solve_lower, kron_solve_upper
3737

38-
__all__ = ["Latent", "Marginal", "TP", "MarginalSparse", "LatentKron", "MarginalKron"]
38+
__all__ = ["Latent", "Marginal", "TP", "MarginalApprox", "LatentKron", "MarginalKron"]
3939

4040

4141
class Base:
@@ -597,11 +597,11 @@ def _predict_at(self, Xnew, diag=False, pred_noise=False, given=None, jitter=0.0
597597

598598

599599
@conditioned_vars(["X", "Xu", "y", "sigma"])
600-
class MarginalSparse(Marginal):
600+
class MarginalApprox(Marginal):
601601
R"""
602602
Approximate marginal Gaussian process.
603603
604-
The `gp.MarginalSparse` class is an implementation of the sum of a GP
604+
The `gp.MarginalApprox` class is an implementation of the sum of a GP
605605
prior and additive noise. It has `marginal_likelihood`, `conditional`
606606
and `predict` methods. This GP implementation can be used to
607607
implement regression on data that is normally distributed. The
@@ -619,6 +619,7 @@ class MarginalSparse(Marginal):
619619
The mean function. Defaults to zero.
620620
approx: string
621621
The approximation to use. Must be one of `VFE`, `FITC` or `DTC`.
622+
Default is VFE.
622623
623624
Examples
624625
--------
@@ -635,7 +636,7 @@ class MarginalSparse(Marginal):
635636
cov_func = pm.gp.cov.ExpQuad(1, ls=0.1)
636637
637638
# Specify the GP. The default mean function is `Zero`.
638-
gp = pm.gp.MarginalSparse(cov_func=cov_func, approx="FITC")
639+
gp = pm.gp.MarginalApprox(cov_func=cov_func, approx="FITC")
639640
640641
# Place a GP prior over the function f.
641642
sigma = pm.HalfCauchy("sigma", beta=3)
@@ -657,11 +658,14 @@ class MarginalSparse(Marginal):
657658
658659
- Titsias, M. (2009). Variational Learning of Inducing Variables in
659660
Sparse Gaussian Processes.
661+
662+
- Bauer, M., van der Wilk, M., and Rasmussen, C. E. (2016). Understanding
663+
Probabilistic Sparse Gaussian Process Approximations.
660664
"""
661665

662666
_available_approx = ("FITC", "VFE", "DTC")
663667

664-
def __init__(self, mean_func=Zero(), cov_func=Constant(0.0), approx="FITC"):
668+
def __init__(self, approx="VFE", *, mean_func=Zero(), cov_func=Constant(0.0)):
665669
if approx not in self._available_approx:
666670
raise NotImplementedError(approx)
667671
self.approx = approx
@@ -866,6 +870,16 @@ def conditional(self, name, Xnew, pred_noise=False, given=None, jitter=0.0, **kw
866870
return pm.MvNormal(name, mu=mu, cov=cov, **kwargs)
867871

868872

873+
@conditioned_vars(["X", "Xu", "y", "sigma"])
874+
class MarginalSparse(MarginalApprox):
875+
def __init__(self, approx="VFE", *, mean_func=Zero(), cov_func=Constant(0.0)):
876+
warnings.warn(
877+
"gp.MarginalSparse has been renamed to gp.MarginalApprox.",
878+
FutureWarning,
879+
)
880+
super().__init__(mean_func=mean_func, cov_func=cov_func, approx=approx)
881+
882+
869883
@conditioned_vars(["Xs", "f"])
870884
class LatentKron(Base):
871885
R"""

Diff for: pymc/tests/test_gp.py

+13-13
Original file line numberDiff line numberDiff line change
@@ -840,9 +840,9 @@ def testLatent2(self):
840840
npt.assert_allclose(latent_logp, self.logp, atol=5)
841841

842842

843-
class TestMarginalVsMarginalSparse:
843+
class TestMarginalVsMarginalApprox:
844844
R"""
845-
Compare logp of models Marginal and MarginalSparse.
845+
Compare logp of models Marginal and MarginalApprox.
846846
Should be nearly equal when inducing points are same as inputs.
847847
"""
848848

@@ -871,7 +871,7 @@ def testApproximations(self, approx):
871871
with pm.Model() as model:
872872
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
873873
mean_func = pm.gp.mean.Constant(0.5)
874-
gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx=approx)
874+
gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx=approx)
875875
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
876876
p = gp.conditional("p", self.Xnew)
877877
approx_logp = model.logp({"f": self.y, "p": self.pnew})
@@ -882,7 +882,7 @@ def testPredictVar(self, approx):
882882
with pm.Model() as model:
883883
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
884884
mean_func = pm.gp.mean.Constant(0.5)
885-
gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx=approx)
885+
gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx=approx)
886886
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
887887
mu1, var1 = self.gp.predict(self.Xnew, diag=True)
888888
mu2, var2 = gp.predict(self.Xnew, diag=True)
@@ -893,7 +893,7 @@ def testPredictCov(self):
893893
with pm.Model() as model:
894894
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
895895
mean_func = pm.gp.mean.Constant(0.5)
896-
gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx="DTC")
896+
gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx="DTC")
897897
f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma)
898898
mu1, cov1 = self.gp.predict(self.Xnew, pred_noise=True)
899899
mu2, cov2 = gp.predict(self.Xnew, pred_noise=True)
@@ -945,17 +945,17 @@ def testAdditiveMarginal(self):
945945
npt.assert_allclose(logp1, logp2, atol=0, rtol=1e-2)
946946

947947
@pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"])
948-
def testAdditiveMarginalSparse(self, approx):
948+
def testAdditiveMarginalApprox(self, approx):
949949
Xu = np.random.randn(10, 3)
950950
sigma = 0.1
951951
with pm.Model() as model1:
952-
gp1 = pm.gp.MarginalSparse(
952+
gp1 = pm.gp.MarginalApprox(
953953
mean_func=self.means[0], cov_func=self.covs[0], approx=approx
954954
)
955-
gp2 = pm.gp.MarginalSparse(
955+
gp2 = pm.gp.MarginalApprox(
956956
mean_func=self.means[1], cov_func=self.covs[1], approx=approx
957957
)
958-
gp3 = pm.gp.MarginalSparse(
958+
gp3 = pm.gp.MarginalApprox(
959959
mean_func=self.means[2], cov_func=self.covs[2], approx=approx
960960
)
961961

@@ -964,7 +964,7 @@ def testAdditiveMarginalSparse(self, approx):
964964
model1_logp = model1.logp({"fsum": self.y})
965965

966966
with pm.Model() as model2:
967-
gptot = pm.gp.MarginalSparse(
967+
gptot = pm.gp.MarginalApprox(
968968
mean_func=reduce(add, self.means), cov_func=reduce(add, self.covs), approx=approx
969969
)
970970
fsum = gptot.marginal_likelihood("f", self.X, Xu, self.y, noise=sigma)
@@ -1017,15 +1017,15 @@ def testAdditiveSparseRaises(self):
10171017
# cant add different approximations
10181018
with pm.Model() as model:
10191019
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
1020-
gp1 = pm.gp.MarginalSparse(cov_func=cov_func, approx="DTC")
1021-
gp2 = pm.gp.MarginalSparse(cov_func=cov_func, approx="FITC")
1020+
gp1 = pm.gp.MarginalApprox(cov_func=cov_func, approx="DTC")
1021+
gp2 = pm.gp.MarginalApprox(cov_func=cov_func, approx="FITC")
10221022
with pytest.raises(Exception) as e_info:
10231023
gp1 + gp2
10241024

10251025
def testAdditiveTypeRaises1(self):
10261026
with pm.Model() as model:
10271027
cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3])
1028-
gp1 = pm.gp.MarginalSparse(cov_func=cov_func, approx="DTC")
1028+
gp1 = pm.gp.MarginalApprox(cov_func=cov_func, approx="DTC")
10291029
gp2 = pm.gp.Marginal(cov_func=cov_func)
10301030
with pytest.raises(Exception) as e_info:
10311031
gp1 + gp2

0 commit comments

Comments
 (0)