diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index a33a45179..899a353b6 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -39,7 +39,7 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { // Create hybrid Bayes net. HybridBayesNet bayesNet; - // Create Gaussian mixture Z(0) = X(0) + noise for each measurement. + // Create Gaussian mixture z_i = x0 + noise for each measurement. for (int i = 0; i < num_measurements; i++) { const auto conditional0 = boost::make_shared( GaussianConditional::FromMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5)); @@ -51,7 +51,7 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { // Create prior on X(0). const auto prior_on_x0 = - GaussianConditional::FromMeanAndStddev(X(0), Vector1(5.0), 5.0); + GaussianConditional::FromMeanAndStddev(X(0), Vector1(5.0), 0.5); bayesNet.emplaceGaussian(prior_on_x0); // copy :-( // Add prior on mode. @@ -61,12 +61,12 @@ HybridBayesNet createHybridBayesNet(int num_measurements = 1) { } HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, - const HybridValues& sample) { + const HybridValues& values) { HybridGaussianFactorGraph fg; int num_measurements = bayesNet.size() - 2; for (int i = 0; i < num_measurements; i++) { auto conditional = bayesNet.atMixture(i); - auto factor = conditional->likelihood(sample.continuousSubset({Z(i)})); + auto factor = conditional->likelihood(values.continuousSubset({Z(i)})); fg.push_back(factor); } fg.push_back(bayesNet.atGaussian(num_measurements)); @@ -75,10 +75,19 @@ HybridGaussianFactorGraph convertBayesNet(const HybridBayesNet& bayesNet, } HybridGaussianFactorGraph createHybridGaussianFactorGraph( - int num_measurements = 1) { + int num_measurements = 1, bool deterministic = false) { auto bayesNet = createHybridBayesNet(num_measurements); - auto sample = bayesNet.sample(); - return convertBayesNet(bayesNet, sample); + if (deterministic) { + // Create a deterministic set of measurements: + HybridValues values{{}, {{M(0), 0}}}; + for (int i = 0; i < num_measurements; i++) { + values.insert(Z(i), Vector1(4.0 + 1.0 * i)); + } + return convertBayesNet(bayesNet, values); + } else { + // Create a random set of measurements: + return convertBayesNet(bayesNet, bayesNet.sample()); + } } } // namespace tiny diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index a104dac4b..8f50895fe 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -617,7 +617,10 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { /* ****************************************************************************/ // SumFrontals just assembles Gaussian factor graphs for each assignment. TEST(HybridGaussianFactorGraph, SumFrontals) { - auto fg = tiny::createHybridGaussianFactorGraph(); + const int num_measurements = 1; + const bool deterministic = true; + auto fg = + tiny::createHybridGaussianFactorGraph(num_measurements, deterministic); EXPECT_LONGS_EQUAL(3, fg.size()); auto sum = fg.SumFrontals(); @@ -635,15 +638,39 @@ TEST(HybridGaussianFactorGraph, SumFrontals) { // Expected decision tree with two factor graphs: // f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0) - GaussianMixture::Sum expected{ + GaussianMixture::Sum expectedSum{ M(0), {GaussianFactorGraph(std::vector{mixture->factor(d0), prior}), mixture->constant(d0)}, {GaussianFactorGraph(std::vector{mixture->factor(d1), prior}), mixture->constant(d1)}}; - EXPECT(assert_equal(expected(d0), sum(d0), 1e-5)); - EXPECT(assert_equal(expected(d1), sum(d1), 1e-5)); + EXPECT(assert_equal(expectedSum(d0), sum(d0), 1e-5)); + EXPECT(assert_equal(expectedSum(d1), sum(d1), 1e-5)); + + // Create expected Bayes Net: + HybridBayesNet bayesNet; + + // Create Gaussian mixture on X(0). + using tiny::mode; + const auto conditional0 = boost::make_shared( + X(0), Vector1(12.7279), + I_1x1 * 2.82843); // regression, but mean checked to be 4.5 + const auto conditional1 = boost::make_shared( + X(0), Vector1(10.0831), + I_1x1 * 2.02759); // regression, but mean 4.97297is close to prior. + GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1}); + bayesNet.emplaceMixture(gm); // copy :-( + + // Add prior on mode. + bayesNet.emplaceDiscrete(mode, "4/6"); + + // Test elimination + Ordering ordering; + ordering.push_back(X(0)); + ordering.push_back(M(0)); + const auto posterior = fg.eliminateSequential(ordering); + EXPECT(assert_equal(bayesNet, *posterior, 1e-4)); } /* ************************************************************************* */