@@ -229,6 +229,82 @@ TEST(MixtureFactor, DifferentCovariances) {
229
229
EXPECT (assert_equal (expected_values, actual_values));
230
230
}
231
231
232
+ /* ************************************************************************* */
233
+ // Test components with differing means and covariances
234
+ TEST (MixtureFactor, DifferentMeansAndCovariances) {
235
+ DiscreteKey m1 (M (1 ), 2 );
236
+
237
+ Values values;
238
+ double x1 = 0.0 , x2 = 7.0 ;
239
+ values.insert (X (1 ), x1);
240
+
241
+ double between = 0.0 ;
242
+
243
+ auto model0 = noiseModel::Isotropic::Sigma (1 , 1e2 );
244
+ auto model1 = noiseModel::Isotropic::Sigma (1 , 1e-2 );
245
+ auto prior_noise = noiseModel::Isotropic::Sigma (1 , 1e-3 );
246
+
247
+ auto f0 =
248
+ std::make_shared<BetweenFactor<double >>(X (1 ), X (2 ), between, model0);
249
+ auto f1 =
250
+ std::make_shared<BetweenFactor<double >>(X (1 ), X (2 ), between, model1);
251
+ std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
252
+
253
+ // Create via toFactorGraph
254
+ using symbol_shorthand::Z;
255
+ Matrix H0_1, H0_2, H1_1, H1_2;
256
+ Vector d0 = f0->evaluateError (x1, x2, &H0_1, &H0_2);
257
+ std::vector<std::pair<Key, Matrix>> terms0 = {{Z (1 ), gtsam::I_1x1 /* Rx*/ },
258
+ //
259
+ {X (1 ), H0_1 /* Sp1*/ },
260
+ {X (2 ), H0_2 /* Tp2*/ }};
261
+
262
+ Vector d1 = f1->evaluateError (x1, x2, &H1_1, &H1_2);
263
+ std::vector<std::pair<Key, Matrix>> terms1 = {{Z (1 ), gtsam::I_1x1 /* Rx*/ },
264
+ //
265
+ {X (1 ), H1_1 /* Sp1*/ },
266
+ {X (2 ), H1_2 /* Tp2*/ }};
267
+ auto gm = new gtsam::GaussianMixture (
268
+ {Z (1 )}, {X (1 ), X (2 )}, {m1},
269
+ {std::make_shared<GaussianConditional>(terms0, 1 , -d0, model0),
270
+ std::make_shared<GaussianConditional>(terms1, 1 , -d1, model1)});
271
+ gtsam::HybridBayesNet bn;
272
+ bn.emplace_back (gm);
273
+
274
+ gtsam::VectorValues measurements;
275
+ measurements.insert (Z (1 ), gtsam::Z_1x1);
276
+ // Create FG with single GaussianMixtureFactor
277
+ HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph (measurements);
278
+
279
+ // Linearized prior factor on X1
280
+ auto prior = PriorFactor<double >(X (1 ), x1, prior_noise).linearize (values);
281
+ mixture_fg.push_back (prior);
282
+
283
+ // bn.print("BayesNet:");
284
+ // mixture_fg.print("\n\n");
285
+
286
+ VectorValues vv{{X (1 ), x1 * I_1x1}, {X (2 ), x2 * I_1x1}};
287
+ // std::cout << "FG error for m1=0: "
288
+ // << mixture_fg.error(HybridValues(vv, DiscreteValues{{m1.first, 0}}))
289
+ // << std::endl;
290
+ // std::cout << "FG error for m1=1: "
291
+ // << mixture_fg.error(HybridValues(vv, DiscreteValues{{m1.first, 1}}))
292
+ // << std::endl;
293
+
294
+ auto hbn = mixture_fg.eliminateSequential ();
295
+
296
+ HybridValues actual_values = hbn->optimize ();
297
+
298
+ VectorValues cv;
299
+ cv.insert (X (1 ), Vector1 (0.0 ));
300
+ cv.insert (X (2 ), Vector1 (-7.0 ));
301
+ DiscreteValues dv;
302
+ dv.insert ({M (1 ), 1 });
303
+ HybridValues expected_values (cv, dv);
304
+
305
+ EXPECT (assert_equal (expected_values, actual_values));
306
+ }
307
+
232
308
/* ************************************************************************* */
233
309
int main () {
234
310
TestResult tr;
0 commit comments