2022-03-12 04:52:28 +08:00
|
|
|
/* ----------------------------------------------------------------------------
|
|
|
|
|
|
|
|
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
|
|
|
* Atlanta, Georgia 30332-0415
|
|
|
|
* All Rights Reserved
|
|
|
|
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
|
|
|
|
|
|
|
* See LICENSE for the license information
|
|
|
|
|
|
|
|
* -------------------------------------------------------------------------- */
|
|
|
|
|
|
|
|
/*
|
2022-06-02 11:52:21 +08:00
|
|
|
* @file testHybridGaussianFactorGraph.cpp
|
2022-03-12 04:52:28 +08:00
|
|
|
* @date Mar 11, 2022
|
|
|
|
* @author Fan Jiang
|
|
|
|
*/
|
|
|
|
|
2022-03-16 00:50:31 +08:00
|
|
|
#include <CppUnitLite/Test.h>
|
|
|
|
#include <CppUnitLite/TestHarness.h>
|
2022-08-15 05:39:35 +08:00
|
|
|
#include <gtsam/base/TestableAssertions.h>
|
2022-04-01 12:20:22 +08:00
|
|
|
#include <gtsam/discrete/DecisionTreeFactor.h>
|
|
|
|
#include <gtsam/discrete/DiscreteKey.h>
|
|
|
|
#include <gtsam/discrete/DiscreteValues.h>
|
2022-06-02 12:13:02 +08:00
|
|
|
#include <gtsam/hybrid/GaussianMixture.h>
|
2022-03-24 09:31:22 +08:00
|
|
|
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
2022-03-12 04:52:28 +08:00
|
|
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
|
|
|
#include <gtsam/hybrid/HybridBayesTree.h>
|
2022-03-16 00:50:31 +08:00
|
|
|
#include <gtsam/hybrid/HybridConditional.h>
|
|
|
|
#include <gtsam/hybrid/HybridDiscreteFactor.h>
|
|
|
|
#include <gtsam/hybrid/HybridFactor.h>
|
|
|
|
#include <gtsam/hybrid/HybridGaussianFactor.h>
|
2022-06-02 11:52:21 +08:00
|
|
|
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
|
|
|
#include <gtsam/hybrid/HybridGaussianISAM.h>
|
2022-08-15 05:39:35 +08:00
|
|
|
#include <gtsam/hybrid/HybridValues.h>
|
2022-03-12 04:52:28 +08:00
|
|
|
#include <gtsam/inference/BayesNet.h>
|
2022-04-01 12:20:22 +08:00
|
|
|
#include <gtsam/inference/DotWriter.h>
|
|
|
|
#include <gtsam/inference/Key.h>
|
|
|
|
#include <gtsam/inference/Ordering.h>
|
2022-03-13 00:51:48 +08:00
|
|
|
#include <gtsam/inference/Symbol.h>
|
2022-03-16 00:50:31 +08:00
|
|
|
#include <gtsam/linear/JacobianFactor.h>
|
2022-03-12 04:52:28 +08:00
|
|
|
|
2022-04-01 12:20:22 +08:00
|
|
|
#include <algorithm>
|
2022-03-27 13:13:57 +08:00
|
|
|
#include <cstddef>
|
2022-04-01 12:20:22 +08:00
|
|
|
#include <functional>
|
|
|
|
#include <iostream>
|
|
|
|
#include <iterator>
|
|
|
|
#include <vector>
|
2022-03-27 13:13:57 +08:00
|
|
|
|
2022-04-01 12:20:22 +08:00
|
|
|
#include "Switching.h"
|
2023-01-01 07:27:13 +08:00
|
|
|
#include "TinyHybridExample.h"
|
2022-03-16 00:50:31 +08:00
|
|
|
|
2022-03-12 04:52:28 +08:00
|
|
|
using namespace std;
|
|
|
|
using namespace gtsam;
|
|
|
|
|
2022-04-01 12:20:22 +08:00
|
|
|
using gtsam::symbol_shorthand::D;
|
2022-08-09 05:16:47 +08:00
|
|
|
using gtsam::symbol_shorthand::M;
|
2022-03-16 00:50:31 +08:00
|
|
|
using gtsam::symbol_shorthand::X;
|
2022-04-01 12:20:22 +08:00
|
|
|
using gtsam::symbol_shorthand::Y;
|
2022-03-13 00:51:48 +08:00
|
|
|
|
2022-03-12 04:52:28 +08:00
|
|
|
/* ************************************************************************* */
|
2022-06-08 06:38:46 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, Creation) {
|
|
|
|
HybridConditional conditional;
|
2022-03-12 04:52:28 +08:00
|
|
|
|
2022-06-02 11:52:21 +08:00
|
|
|
HybridGaussianFactorGraph hfg;
|
2022-03-12 04:52:28 +08:00
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
hfg.add(HybridGaussianFactor(JacobianFactor(X(0), I_3x3, Z_3x1)));
|
|
|
|
|
|
|
|
// Define a gaussian mixture conditional P(x0|x1, c0) and add it to the factor
|
|
|
|
// graph
|
2022-08-09 05:16:47 +08:00
|
|
|
GaussianMixture gm({X(0)}, {X(1)}, DiscreteKeys(DiscreteKey{M(0), 2}),
|
2022-06-08 06:38:46 +08:00
|
|
|
GaussianMixture::Conditionals(
|
2022-08-09 05:16:47 +08:00
|
|
|
M(0),
|
2022-06-08 06:38:46 +08:00
|
|
|
boost::make_shared<GaussianConditional>(
|
|
|
|
X(0), Z_3x1, I_3x3, X(1), I_3x3),
|
|
|
|
boost::make_shared<GaussianConditional>(
|
|
|
|
X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)));
|
|
|
|
hfg.add(gm);
|
2022-03-13 23:42:36 +08:00
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
EXPECT_LONGS_EQUAL(2, hfg.size());
|
2022-03-12 04:52:28 +08:00
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-06-08 06:38:46 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, EliminateSequential) {
|
|
|
|
// Test elimination of a single variable.
|
2022-06-02 11:52:21 +08:00
|
|
|
HybridGaussianFactorGraph hfg;
|
2022-03-12 04:52:28 +08:00
|
|
|
|
|
|
|
hfg.add(HybridGaussianFactor(JacobianFactor(0, I_3x3, Z_3x1)));
|
|
|
|
|
2022-03-24 09:31:22 +08:00
|
|
|
auto result = hfg.eliminatePartialSequential(KeyVector{0});
|
2022-03-12 04:52:28 +08:00
|
|
|
|
2022-03-13 05:29:26 +08:00
|
|
|
EXPECT_LONGS_EQUAL(result.first->size(), 1);
|
2022-03-12 04:52:28 +08:00
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-06-08 06:38:46 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, EliminateMultifrontal) {
|
|
|
|
// Test multifrontal elimination
|
2022-06-02 11:52:21 +08:00
|
|
|
HybridGaussianFactorGraph hfg;
|
2022-03-12 04:52:28 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
DiscreteKey m(M(1), 2);
|
2022-03-12 08:20:39 +08:00
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
// Add priors on x0 and c1
|
2022-03-13 00:51:48 +08:00
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8})));
|
2022-03-12 04:52:28 +08:00
|
|
|
|
2022-05-22 06:04:43 +08:00
|
|
|
Ordering ordering;
|
|
|
|
ordering.push_back(X(0));
|
|
|
|
auto result = hfg.eliminatePartialMultifrontal(ordering);
|
2022-03-12 04:52:28 +08:00
|
|
|
|
2022-03-13 05:29:26 +08:00
|
|
|
EXPECT_LONGS_EQUAL(result.first->size(), 1);
|
|
|
|
EXPECT_LONGS_EQUAL(result.second->size(), 1);
|
2022-03-12 04:52:28 +08:00
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-06-02 11:52:21 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
|
|
|
|
HybridGaussianFactorGraph hfg;
|
2022-04-01 12:20:22 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
DiscreteKey m1(M(1), 2);
|
2022-04-01 12:20:22 +08:00
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
// Add prior on x0
|
2022-04-01 12:20:22 +08:00
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
2022-06-08 06:38:46 +08:00
|
|
|
// Add factor between x0 and x1
|
2022-04-01 12:20:22 +08:00
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
// Add a gaussian mixture factor ϕ(x1, c1)
|
2022-04-01 12:20:22 +08:00
|
|
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
2022-08-09 05:16:47 +08:00
|
|
|
M(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
2022-04-01 12:20:22 +08:00
|
|
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, dt));
|
2022-04-01 12:20:22 +08:00
|
|
|
|
|
|
|
auto result =
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)}));
|
2022-04-01 12:20:22 +08:00
|
|
|
|
2022-12-29 02:52:59 +08:00
|
|
|
auto dc = result->at(2)->asDiscrete();
|
2022-04-01 12:20:22 +08:00
|
|
|
DiscreteValues dv;
|
2022-08-09 05:16:47 +08:00
|
|
|
dv[M(1)] = 0;
|
2023-01-01 07:27:13 +08:00
|
|
|
// Regression test
|
|
|
|
EXPECT_DOUBLES_EQUAL(0.62245933120185448, dc->operator()(dv), 1e-3);
|
2022-04-01 12:20:22 +08:00
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-06-02 11:52:21 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) {
|
|
|
|
HybridGaussianFactorGraph hfg;
|
2022-03-24 08:18:02 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
DiscreteKey m1(M(1), 2);
|
2022-03-24 08:18:02 +08:00
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
// Add prior on x0
|
2022-03-24 08:18:02 +08:00
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
2022-06-08 06:38:46 +08:00
|
|
|
// Add factor between x0 and x1
|
2022-03-24 08:18:02 +08:00
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
std::vector<GaussianFactor::shared_ptr> factors = {
|
|
|
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
|
|
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())};
|
|
|
|
hfg.add(GaussianMixtureFactor({X(1)}, {m1}, factors));
|
2022-06-08 06:38:46 +08:00
|
|
|
|
|
|
|
// Discrete probability table for c1
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(DecisionTreeFactor(m1, {2, 8}));
|
2022-06-08 06:38:46 +08:00
|
|
|
// Joint discrete probability table for c1, c2
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
|
2022-03-24 08:18:02 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
HybridBayesNet::shared_ptr result = hfg.eliminateSequential(
|
|
|
|
Ordering::ColamdConstrainedLast(hfg, {M(1), M(2)}));
|
2022-03-24 08:18:02 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
// There are 4 variables (2 continuous + 2 discrete) in the bayes net.
|
2022-06-08 06:38:46 +08:00
|
|
|
EXPECT_LONGS_EQUAL(4, result->size());
|
2022-03-24 08:18:02 +08:00
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-06-02 11:52:21 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
|
|
|
|
HybridGaussianFactorGraph hfg;
|
2022-03-13 01:42:58 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
DiscreteKey m1(M(1), 2);
|
2022-03-13 01:42:58 +08:00
|
|
|
|
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
2022-03-13 05:29:26 +08:00
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
|
|
|
|
2022-12-30 01:22:56 +08:00
|
|
|
hfg.add(GaussianMixtureFactor(
|
2022-08-09 05:16:47 +08:00
|
|
|
{X(1)}, {{M(1), 2}},
|
2022-03-26 07:14:00 +08:00
|
|
|
{boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
|
|
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
|
2022-03-13 05:29:26 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(DecisionTreeFactor(m1, {2, 8}));
|
2022-12-04 17:25:17 +08:00
|
|
|
// TODO(Varun) Adding extra discrete variable not connected to continuous
|
|
|
|
// variable throws segfault
|
|
|
|
// hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
|
2022-03-13 01:42:58 +08:00
|
|
|
|
2022-09-01 00:25:57 +08:00
|
|
|
HybridBayesTree::shared_ptr result =
|
|
|
|
hfg.eliminateMultifrontal(hfg.getHybridOrdering());
|
2022-03-13 01:42:58 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
// The bayes tree should have 3 cliques
|
|
|
|
EXPECT_LONGS_EQUAL(3, result->size());
|
2022-06-08 06:38:46 +08:00
|
|
|
// GTSAM_PRINT(*result);
|
2022-08-09 05:16:47 +08:00
|
|
|
// GTSAM_PRINT(*result->marginalFactor(M(2)));
|
2022-03-13 01:42:58 +08:00
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-06-02 11:52:21 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) {
|
|
|
|
HybridGaussianFactorGraph hfg;
|
2022-03-14 11:15:20 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
DiscreteKey m(M(1), 2);
|
2022-03-14 11:15:20 +08:00
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
// Prior on x0
|
2022-03-14 11:15:20 +08:00
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
2022-06-08 06:38:46 +08:00
|
|
|
// Factor between x0-x1
|
2022-03-14 11:15:20 +08:00
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
// Decision tree with different modes on x1
|
2022-03-16 00:50:31 +08:00
|
|
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
2022-08-09 05:16:47 +08:00
|
|
|
M(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
2022-03-16 00:50:31 +08:00
|
|
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
2022-03-14 11:15:20 +08:00
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
// Hybrid factor P(x1|c1)
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(GaussianMixtureFactor({X(1)}, {m}, dt));
|
2022-06-08 06:38:46 +08:00
|
|
|
// Prior factor on c1
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(HybridDiscreteFactor(DecisionTreeFactor(m, {2, 8})));
|
2022-03-14 11:15:20 +08:00
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
// Get a constrained ordering keeping c1 last
|
2022-09-01 00:25:57 +08:00
|
|
|
auto ordering_full = hfg.getHybridOrdering();
|
2022-03-14 11:15:20 +08:00
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
// Returns a Hybrid Bayes Tree with distribution P(x0|x1)P(x1|c1)P(c1)
|
2022-03-23 03:37:28 +08:00
|
|
|
HybridBayesTree::shared_ptr hbt = hfg.eliminateMultifrontal(ordering_full);
|
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
EXPECT_LONGS_EQUAL(3, hbt->size());
|
2022-03-23 03:37:28 +08:00
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-05-28 03:17:30 +08:00
|
|
|
/*
|
2022-03-24 08:16:05 +08:00
|
|
|
* This test is about how to assemble the Bayes Tree roots after we do partial
|
|
|
|
* elimination
|
|
|
|
*/
|
2022-06-02 11:52:21 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
|
|
|
HybridGaussianFactorGraph hfg;
|
2022-03-23 03:37:28 +08:00
|
|
|
|
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
|
|
|
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
|
|
|
|
|
|
|
|
{
|
2022-12-30 01:22:56 +08:00
|
|
|
hfg.add(GaussianMixtureFactor(
|
2022-08-09 05:16:47 +08:00
|
|
|
{X(0)}, {{M(0), 2}},
|
2022-03-26 07:14:00 +08:00
|
|
|
{boost::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
|
|
|
|
boost::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())}));
|
2022-03-23 03:37:28 +08:00
|
|
|
|
|
|
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
|
2022-08-09 05:16:47 +08:00
|
|
|
M(1), boost::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1),
|
2022-03-23 03:37:28 +08:00
|
|
|
boost::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()));
|
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(GaussianMixtureFactor({X(2)}, {{M(1), 2}}, dt1));
|
2022-03-23 03:37:28 +08:00
|
|
|
}
|
|
|
|
|
2022-03-24 08:16:05 +08:00
|
|
|
hfg.add(HybridDiscreteFactor(
|
2022-08-09 05:16:47 +08:00
|
|
|
DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")));
|
2022-03-23 03:37:28 +08:00
|
|
|
|
|
|
|
hfg.add(JacobianFactor(X(3), I_3x3, X(4), -I_3x3, Z_3x1));
|
|
|
|
hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1));
|
|
|
|
|
|
|
|
{
|
|
|
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
2022-08-09 05:16:47 +08:00
|
|
|
M(3), boost::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1),
|
2022-03-23 03:37:28 +08:00
|
|
|
boost::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()));
|
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(GaussianMixtureFactor({X(3)}, {{M(3), 2}}, dt));
|
2022-03-23 03:37:28 +08:00
|
|
|
|
|
|
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt1(
|
2022-08-09 05:16:47 +08:00
|
|
|
M(2), boost::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1),
|
2022-03-23 03:37:28 +08:00
|
|
|
boost::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()));
|
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
hfg.add(GaussianMixtureFactor({X(5)}, {{M(2), 2}}, dt1));
|
2022-03-23 03:37:28 +08:00
|
|
|
}
|
|
|
|
|
2022-03-24 08:16:05 +08:00
|
|
|
auto ordering_full =
|
2022-08-09 05:16:47 +08:00
|
|
|
Ordering::ColamdConstrainedLast(hfg, {M(0), M(1), M(2), M(3)});
|
2022-03-23 03:37:28 +08:00
|
|
|
|
|
|
|
HybridBayesTree::shared_ptr hbt;
|
2022-06-02 11:52:21 +08:00
|
|
|
HybridGaussianFactorGraph::shared_ptr remaining;
|
2022-03-27 13:13:57 +08:00
|
|
|
std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full);
|
2022-03-23 03:37:28 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
// 9 cliques in the bayes tree and 0 remaining variables to eliminate.
|
2022-12-10 13:05:46 +08:00
|
|
|
EXPECT_LONGS_EQUAL(9, hbt->size());
|
2022-08-09 05:16:47 +08:00
|
|
|
EXPECT_LONGS_EQUAL(0, remaining->size());
|
2022-03-27 13:13:57 +08:00
|
|
|
|
|
|
|
/*
|
2022-06-08 06:38:46 +08:00
|
|
|
(Fan) Explanation: the Junction tree will need to reeliminate to get to the
|
|
|
|
marginal on X(1), which is not possible because it involves eliminating
|
|
|
|
discrete before continuous. The solution to this, however, is in Murphy02.
|
|
|
|
TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable.
|
|
|
|
And I believe that we should do this.
|
2022-03-27 13:13:57 +08:00
|
|
|
*/
|
|
|
|
}
|
|
|
|
|
2022-06-08 06:38:46 +08:00
|
|
|
void dotPrint(const HybridGaussianFactorGraph::shared_ptr &hfg,
|
|
|
|
const HybridBayesTree::shared_ptr &hbt,
|
|
|
|
const Ordering &ordering) {
|
|
|
|
DotWriter dw;
|
|
|
|
dw.positionHints['c'] = 2;
|
|
|
|
dw.positionHints['x'] = 1;
|
|
|
|
std::cout << hfg->dot(DefaultKeyFormatter, dw);
|
|
|
|
std::cout << "\n";
|
|
|
|
hbt->dot(std::cout);
|
|
|
|
|
|
|
|
std::cout << "\n";
|
|
|
|
std::cout << hfg->eliminateSequential(ordering)->dot(DefaultKeyFormatter, dw);
|
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-03-27 13:13:57 +08:00
|
|
|
// TODO(fan): make a graph like Varun's paper one
|
2022-06-02 11:52:21 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, Switching) {
|
2022-04-01 12:20:22 +08:00
|
|
|
auto N = 12;
|
|
|
|
auto hfg = makeSwitchingChain(N);
|
2022-03-27 13:13:57 +08:00
|
|
|
|
|
|
|
// X(5) will be the center, X(1-4), X(6-9)
|
|
|
|
// X(3), X(7)
|
|
|
|
// X(2), X(8)
|
|
|
|
// X(1), X(4), X(6), X(9)
|
2022-08-09 05:16:47 +08:00
|
|
|
// M(5) will be the center, M(1-4), M(6-8)
|
|
|
|
// M(3), M(7)
|
|
|
|
// M(1), M(4), M(2), M(6), M(8)
|
2022-04-01 12:20:22 +08:00
|
|
|
// auto ordering_full =
|
|
|
|
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
|
|
|
|
// X(5),
|
2022-08-09 05:16:47 +08:00
|
|
|
// M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
|
2022-04-01 12:20:22 +08:00
|
|
|
KeyVector ordering;
|
2022-03-27 13:13:57 +08:00
|
|
|
|
2022-04-01 12:20:22 +08:00
|
|
|
{
|
|
|
|
std::vector<int> naturalX(N);
|
|
|
|
std::iota(naturalX.begin(), naturalX.end(), 1);
|
|
|
|
std::vector<Key> ordX;
|
|
|
|
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
|
|
|
|
[](int x) { return X(x); });
|
|
|
|
|
|
|
|
KeyVector ndX;
|
|
|
|
std::vector<int> lvls;
|
|
|
|
std::tie(ndX, lvls) = makeBinaryOrdering(ordX);
|
|
|
|
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
|
|
|
|
for (auto &l : lvls) {
|
|
|
|
l = -l;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
|
|
|
std::vector<int> naturalC(N - 1);
|
|
|
|
std::iota(naturalC.begin(), naturalC.end(), 1);
|
|
|
|
std::vector<Key> ordC;
|
|
|
|
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
|
2022-08-09 05:16:47 +08:00
|
|
|
[](int x) { return M(x); });
|
2022-04-01 12:20:22 +08:00
|
|
|
KeyVector ndC;
|
|
|
|
std::vector<int> lvls;
|
|
|
|
|
|
|
|
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
|
|
|
|
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
|
|
|
|
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
|
|
|
|
}
|
|
|
|
auto ordering_full = Ordering(ordering);
|
|
|
|
|
|
|
|
// GTSAM_PRINT(*hfg);
|
2022-06-08 06:38:46 +08:00
|
|
|
// GTSAM_PRINT(ordering_full);
|
2022-03-27 13:13:57 +08:00
|
|
|
|
|
|
|
HybridBayesTree::shared_ptr hbt;
|
2022-06-02 11:52:21 +08:00
|
|
|
HybridGaussianFactorGraph::shared_ptr remaining;
|
2022-03-27 13:13:57 +08:00
|
|
|
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
|
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
// 12 cliques in the bayes tree and 0 remaining variables to eliminate.
|
|
|
|
EXPECT_LONGS_EQUAL(12, hbt->size());
|
|
|
|
EXPECT_LONGS_EQUAL(0, remaining->size());
|
2022-04-01 12:20:22 +08:00
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-04-01 12:20:22 +08:00
|
|
|
// TODO(fan): make a graph like Varun's paper one
|
2022-06-02 11:52:21 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, SwitchingISAM) {
|
2022-04-01 12:20:22 +08:00
|
|
|
auto N = 11;
|
|
|
|
auto hfg = makeSwitchingChain(N);
|
|
|
|
|
|
|
|
// X(5) will be the center, X(1-4), X(6-9)
|
|
|
|
// X(3), X(7)
|
|
|
|
// X(2), X(8)
|
|
|
|
// X(1), X(4), X(6), X(9)
|
2022-08-09 05:16:47 +08:00
|
|
|
// M(5) will be the center, M(1-4), M(6-8)
|
|
|
|
// M(3), M(7)
|
|
|
|
// M(1), M(4), M(2), M(6), M(8)
|
2022-04-01 12:20:22 +08:00
|
|
|
// auto ordering_full =
|
|
|
|
// Ordering(KeyVector{X(1), X(4), X(2), X(6), X(9), X(8), X(3), X(7),
|
|
|
|
// X(5),
|
2022-08-09 05:16:47 +08:00
|
|
|
// M(1), M(4), M(2), M(6), M(8), M(3), M(7), M(5)});
|
2022-04-01 12:20:22 +08:00
|
|
|
KeyVector ordering;
|
|
|
|
|
|
|
|
{
|
|
|
|
std::vector<int> naturalX(N);
|
|
|
|
std::iota(naturalX.begin(), naturalX.end(), 1);
|
|
|
|
std::vector<Key> ordX;
|
|
|
|
std::transform(naturalX.begin(), naturalX.end(), std::back_inserter(ordX),
|
|
|
|
[](int x) { return X(x); });
|
|
|
|
|
|
|
|
KeyVector ndX;
|
|
|
|
std::vector<int> lvls;
|
|
|
|
std::tie(ndX, lvls) = makeBinaryOrdering(ordX);
|
|
|
|
std::copy(ndX.begin(), ndX.end(), std::back_inserter(ordering));
|
|
|
|
for (auto &l : lvls) {
|
|
|
|
l = -l;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
|
|
|
std::vector<int> naturalC(N - 1);
|
|
|
|
std::iota(naturalC.begin(), naturalC.end(), 1);
|
|
|
|
std::vector<Key> ordC;
|
|
|
|
std::transform(naturalC.begin(), naturalC.end(), std::back_inserter(ordC),
|
2022-08-09 05:16:47 +08:00
|
|
|
[](int x) { return M(x); });
|
2022-04-01 12:20:22 +08:00
|
|
|
KeyVector ndC;
|
|
|
|
std::vector<int> lvls;
|
|
|
|
|
|
|
|
// std::copy(ordC.begin(), ordC.end(), std::back_inserter(ordering));
|
|
|
|
std::tie(ndC, lvls) = makeBinaryOrdering(ordC);
|
|
|
|
std::copy(ndC.begin(), ndC.end(), std::back_inserter(ordering));
|
|
|
|
}
|
|
|
|
auto ordering_full = Ordering(ordering);
|
|
|
|
|
|
|
|
HybridBayesTree::shared_ptr hbt;
|
2022-06-02 11:52:21 +08:00
|
|
|
HybridGaussianFactorGraph::shared_ptr remaining;
|
2022-04-01 12:20:22 +08:00
|
|
|
std::tie(hbt, remaining) = hfg->eliminatePartialMultifrontal(ordering_full);
|
|
|
|
|
|
|
|
auto new_fg = makeSwitchingChain(12);
|
2022-06-02 11:52:21 +08:00
|
|
|
auto isam = HybridGaussianISAM(*hbt);
|
2022-04-01 12:20:22 +08:00
|
|
|
|
2022-08-09 05:16:47 +08:00
|
|
|
// Run an ISAM update.
|
|
|
|
HybridGaussianFactorGraph factorGraph;
|
|
|
|
factorGraph.push_back(new_fg->at(new_fg->size() - 2));
|
|
|
|
factorGraph.push_back(new_fg->at(new_fg->size() - 1));
|
|
|
|
isam.update(factorGraph);
|
|
|
|
|
|
|
|
// ISAM should have 12 factors after the last update
|
|
|
|
EXPECT_LONGS_EQUAL(12, isam.size());
|
2022-04-01 12:20:22 +08:00
|
|
|
}
|
|
|
|
|
2022-05-28 03:28:11 +08:00
|
|
|
/* ************************************************************************* */
|
2022-08-09 05:16:47 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, SwitchingTwoVar) {
|
2022-04-01 12:20:22 +08:00
|
|
|
const int N = 7;
|
|
|
|
auto hfg = makeSwitchingChain(N, X);
|
|
|
|
hfg->push_back(*makeSwitchingChain(N, Y, D));
|
|
|
|
|
|
|
|
for (int t = 1; t <= N; t++) {
|
|
|
|
hfg->add(JacobianFactor(X(t), I_3x3, Y(t), -I_3x3, Vector3(1.0, 0.0, 0.0)));
|
|
|
|
}
|
|
|
|
|
|
|
|
KeyVector ordering;
|
|
|
|
|
|
|
|
KeyVector naturalX(N);
|
|
|
|
std::iota(naturalX.begin(), naturalX.end(), 1);
|
|
|
|
KeyVector ordX;
|
|
|
|
for (size_t i = 1; i <= N; i++) {
|
|
|
|
ordX.emplace_back(X(i));
|
|
|
|
ordX.emplace_back(Y(i));
|
|
|
|
}
|
|
|
|
|
|
|
|
for (size_t i = 1; i <= N - 1; i++) {
|
2022-08-09 05:16:47 +08:00
|
|
|
ordX.emplace_back(M(i));
|
2022-04-01 12:20:22 +08:00
|
|
|
}
|
|
|
|
for (size_t i = 1; i <= N - 1; i++) {
|
|
|
|
ordX.emplace_back(D(i));
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
DotWriter dw;
|
|
|
|
dw.positionHints['x'] = 1;
|
|
|
|
dw.positionHints['c'] = 0;
|
|
|
|
dw.positionHints['d'] = 3;
|
|
|
|
dw.positionHints['y'] = 2;
|
2022-08-09 05:16:47 +08:00
|
|
|
// std::cout << hfg->dot(DefaultKeyFormatter, dw);
|
|
|
|
// std::cout << "\n";
|
2022-04-01 12:20:22 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
DotWriter dw;
|
|
|
|
dw.positionHints['y'] = 9;
|
|
|
|
// dw.positionHints['c'] = 0;
|
|
|
|
// dw.positionHints['d'] = 3;
|
|
|
|
dw.positionHints['x'] = 1;
|
2022-08-09 05:16:47 +08:00
|
|
|
// std::cout << "\n";
|
2022-04-01 12:20:22 +08:00
|
|
|
// std::cout << hfg->eliminateSequential(Ordering(ordX))
|
|
|
|
// ->dot(DefaultKeyFormatter, dw);
|
2022-08-09 05:16:47 +08:00
|
|
|
// hfg->eliminateMultifrontal(Ordering(ordX))->dot(std::cout);
|
2022-04-01 12:20:22 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
Ordering ordering_partial;
|
|
|
|
for (size_t i = 1; i <= N; i++) {
|
|
|
|
ordering_partial.emplace_back(X(i));
|
|
|
|
ordering_partial.emplace_back(Y(i));
|
|
|
|
}
|
2022-08-09 05:16:47 +08:00
|
|
|
HybridBayesNet::shared_ptr hbn;
|
|
|
|
HybridGaussianFactorGraph::shared_ptr remaining;
|
2022-09-01 00:25:57 +08:00
|
|
|
std::tie(hbn, remaining) = hfg->eliminatePartialSequential(ordering_partial);
|
2022-08-09 05:16:47 +08:00
|
|
|
|
|
|
|
EXPECT_LONGS_EQUAL(14, hbn->size());
|
|
|
|
EXPECT_LONGS_EQUAL(11, remaining->size());
|
|
|
|
|
2022-04-01 12:20:22 +08:00
|
|
|
{
|
2022-08-09 05:16:47 +08:00
|
|
|
DotWriter dw;
|
|
|
|
dw.positionHints['x'] = 1;
|
|
|
|
dw.positionHints['c'] = 0;
|
|
|
|
dw.positionHints['d'] = 3;
|
|
|
|
dw.positionHints['y'] = 2;
|
|
|
|
// std::cout << remaining->dot(DefaultKeyFormatter, dw);
|
|
|
|
// std::cout << "\n";
|
2022-04-01 12:20:22 +08:00
|
|
|
}
|
2022-03-14 11:15:20 +08:00
|
|
|
}
|
|
|
|
|
2022-10-05 00:33:28 +08:00
|
|
|
/* ************************************************************************* */
|
2022-08-15 05:39:35 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, optimize) {
|
|
|
|
HybridGaussianFactorGraph hfg;
|
|
|
|
|
|
|
|
DiscreteKey c1(C(1), 2);
|
|
|
|
|
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
|
|
|
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
|
|
|
|
|
|
|
DecisionTree<Key, GaussianFactor::shared_ptr> dt(
|
|
|
|
C(1), boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
|
|
|
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()));
|
|
|
|
|
|
|
|
hfg.add(GaussianMixtureFactor({X(1)}, {c1}, dt));
|
|
|
|
|
|
|
|
auto result =
|
|
|
|
hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {C(1)}));
|
|
|
|
|
|
|
|
HybridValues hv = result->optimize();
|
|
|
|
|
|
|
|
EXPECT(assert_equal(hv.atDiscrete(C(1)), int(0)));
|
|
|
|
}
|
2022-10-05 00:33:28 +08:00
|
|
|
|
|
|
|
/* ************************************************************************* */
|
|
|
|
// Test adding of gaussian conditional and re-elimination.
|
|
|
|
TEST(HybridGaussianFactorGraph, Conditionals) {
|
|
|
|
Switching switching(4);
|
|
|
|
HybridGaussianFactorGraph hfg;
|
|
|
|
|
|
|
|
hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
|
|
|
|
Ordering ordering;
|
2022-10-23 07:52:44 +08:00
|
|
|
ordering.push_back(X(0));
|
2022-10-05 00:33:28 +08:00
|
|
|
HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering);
|
|
|
|
|
|
|
|
hfg.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
|
|
|
|
hfg.push_back(*bayes_net);
|
|
|
|
hfg.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
|
|
|
|
hfg.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
|
2022-10-23 07:52:44 +08:00
|
|
|
ordering.push_back(X(1));
|
2022-10-05 00:33:28 +08:00
|
|
|
ordering.push_back(X(2));
|
2022-10-23 07:52:44 +08:00
|
|
|
ordering.push_back(M(0));
|
2022-10-05 00:33:28 +08:00
|
|
|
ordering.push_back(M(1));
|
|
|
|
|
|
|
|
bayes_net = hfg.eliminateSequential(ordering);
|
|
|
|
|
|
|
|
HybridValues result = bayes_net->optimize();
|
|
|
|
|
|
|
|
Values expected_continuous;
|
2022-10-23 07:52:44 +08:00
|
|
|
expected_continuous.insert<double>(X(0), 0);
|
|
|
|
expected_continuous.insert<double>(X(1), 1);
|
|
|
|
expected_continuous.insert<double>(X(2), 2);
|
|
|
|
expected_continuous.insert<double>(X(3), 4);
|
2022-10-05 00:33:28 +08:00
|
|
|
Values result_continuous =
|
|
|
|
switching.linearizationPoint.retract(result.continuous());
|
|
|
|
EXPECT(assert_equal(expected_continuous, result_continuous));
|
|
|
|
|
|
|
|
DiscreteValues expected_discrete;
|
2022-10-23 07:52:44 +08:00
|
|
|
expected_discrete[M(0)] = 1;
|
2022-10-05 00:33:28 +08:00
|
|
|
expected_discrete[M(1)] = 1;
|
|
|
|
EXPECT(assert_equal(expected_discrete, result.discrete()));
|
|
|
|
}
|
|
|
|
|
2022-11-08 04:42:47 +08:00
|
|
|
/* ****************************************************************************/
|
|
|
|
// Test hybrid gaussian factor graph error and unnormalized probabilities
|
|
|
|
TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) {
|
|
|
|
Switching s(3);
|
|
|
|
|
|
|
|
HybridGaussianFactorGraph graph = s.linearizedFactorGraph;
|
|
|
|
|
2022-11-09 03:20:51 +08:00
|
|
|
Ordering hybridOrdering = graph.getHybridOrdering();
|
|
|
|
HybridBayesNet::shared_ptr hybridBayesNet =
|
|
|
|
graph.eliminateSequential(hybridOrdering);
|
|
|
|
|
2022-12-31 02:59:48 +08:00
|
|
|
const HybridValues delta = hybridBayesNet->optimize();
|
|
|
|
const double error = graph.error(delta);
|
2022-11-09 03:20:51 +08:00
|
|
|
|
|
|
|
// regression
|
2022-12-31 04:19:46 +08:00
|
|
|
EXPECT(assert_equal(1.58886, error, 1e-5));
|
2022-11-09 03:20:51 +08:00
|
|
|
|
2022-12-31 04:19:46 +08:00
|
|
|
// Real test:
|
|
|
|
EXPECT(assert_equal(graph.probPrime(delta), exp(-error), 1e-7));
|
2022-11-09 03:20:51 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/* ****************************************************************************/
|
|
|
|
// Test hybrid gaussian factor graph error and unnormalized probabilities
|
|
|
|
TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) {
|
|
|
|
Switching s(3);
|
|
|
|
|
|
|
|
HybridGaussianFactorGraph graph = s.linearizedFactorGraph;
|
|
|
|
|
2022-11-08 04:42:47 +08:00
|
|
|
Ordering hybridOrdering = graph.getHybridOrdering();
|
|
|
|
HybridBayesNet::shared_ptr hybridBayesNet =
|
|
|
|
graph.eliminateSequential(hybridOrdering);
|
|
|
|
|
|
|
|
HybridValues delta = hybridBayesNet->optimize();
|
|
|
|
auto error_tree = graph.error(delta.continuous());
|
|
|
|
|
|
|
|
std::vector<DiscreteKey> discrete_keys = {{M(0), 2}, {M(1), 2}};
|
|
|
|
std::vector<double> leaves = {0.9998558, 0.4902432, 0.5193694, 0.0097568};
|
|
|
|
AlgebraicDecisionTree<Key> expected_error(discrete_keys, leaves);
|
|
|
|
|
|
|
|
// regression
|
|
|
|
EXPECT(assert_equal(expected_error, error_tree, 1e-7));
|
|
|
|
|
|
|
|
auto probs = graph.probPrime(delta.continuous());
|
|
|
|
std::vector<double> prob_leaves = {0.36793249, 0.61247742, 0.59489556,
|
|
|
|
0.99029064};
|
|
|
|
AlgebraicDecisionTree<Key> expected_probs(discrete_keys, prob_leaves);
|
|
|
|
|
|
|
|
// regression
|
|
|
|
EXPECT(assert_equal(expected_probs, probs, 1e-7));
|
|
|
|
}
|
|
|
|
|
2023-01-01 07:27:13 +08:00
|
|
|
/* ****************************************************************************/
|
2023-01-02 14:05:52 +08:00
|
|
|
// Check that SumFrontals assembles Gaussian factor graphs for each assignment.
|
2023-01-02 00:23:01 +08:00
|
|
|
TEST(HybridGaussianFactorGraph, SumFrontals) {
|
2023-01-02 05:36:57 +08:00
|
|
|
const int num_measurements = 1;
|
|
|
|
const bool deterministic = true;
|
|
|
|
auto fg =
|
|
|
|
tiny::createHybridGaussianFactorGraph(num_measurements, deterministic);
|
2023-01-01 07:27:13 +08:00
|
|
|
EXPECT_LONGS_EQUAL(3, fg.size());
|
|
|
|
|
2023-01-02 00:23:01 +08:00
|
|
|
auto sum = fg.SumFrontals();
|
|
|
|
|
|
|
|
// Get mixture factor:
|
|
|
|
auto mixture = boost::dynamic_pointer_cast<GaussianMixtureFactor>(fg.at(0));
|
|
|
|
using GF = GaussianFactor::shared_ptr;
|
|
|
|
|
|
|
|
// Get prior factor:
|
|
|
|
const GF prior =
|
|
|
|
boost::dynamic_pointer_cast<HybridGaussianFactor>(fg.at(1))->inner();
|
|
|
|
|
|
|
|
// Create DiscreteValues for both 0 and 1:
|
|
|
|
DiscreteValues d0{{M(0), 0}}, d1{{M(0), 1}};
|
|
|
|
|
|
|
|
// Expected decision tree with two factor graphs:
|
|
|
|
// f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0)
|
2023-01-02 05:36:57 +08:00
|
|
|
GaussianMixture::Sum expectedSum{
|
2023-01-02 00:43:52 +08:00
|
|
|
M(0),
|
|
|
|
{GaussianFactorGraph(std::vector<GF>{mixture->factor(d0), prior}),
|
2023-01-02 00:47:24 +08:00
|
|
|
mixture->constant(d0)},
|
2023-01-02 00:43:52 +08:00
|
|
|
{GaussianFactorGraph(std::vector<GF>{mixture->factor(d1), prior}),
|
2023-01-02 00:47:24 +08:00
|
|
|
mixture->constant(d1)}};
|
2023-01-02 00:43:52 +08:00
|
|
|
|
2023-01-02 05:36:57 +08:00
|
|
|
EXPECT(assert_equal(expectedSum(d0), sum(d0), 1e-5));
|
|
|
|
EXPECT(assert_equal(expectedSum(d1), sum(d1), 1e-5));
|
2023-01-02 14:05:52 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/* ****************************************************************************/
|
|
|
|
// Check that eliminating tiny net with 1 measurement yields correct result.
|
|
|
|
TEST(HybridGaussianFactorGraph, EliminateTiny1) {
|
|
|
|
const int num_measurements = 1;
|
|
|
|
const bool deterministic = true;
|
|
|
|
auto fg =
|
|
|
|
tiny::createHybridGaussianFactorGraph(num_measurements, deterministic);
|
2023-01-02 05:36:57 +08:00
|
|
|
|
|
|
|
// Create expected Bayes Net:
|
|
|
|
HybridBayesNet bayesNet;
|
|
|
|
|
|
|
|
// Create Gaussian mixture on X(0).
|
|
|
|
using tiny::mode;
|
2023-01-02 06:44:39 +08:00
|
|
|
// regression, but mean checked to be 5.0 in both cases:
|
2023-01-02 05:36:57 +08:00
|
|
|
const auto conditional0 = boost::make_shared<GaussianConditional>(
|
2023-01-02 06:44:39 +08:00
|
|
|
X(0), Vector1(14.1421), I_1x1 * 2.82843),
|
|
|
|
conditional1 = boost::make_shared<GaussianConditional>(
|
|
|
|
X(0), Vector1(10.1379), I_1x1 * 2.02759);
|
2023-01-02 05:36:57 +08:00
|
|
|
GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1});
|
|
|
|
bayesNet.emplaceMixture(gm); // copy :-(
|
|
|
|
|
|
|
|
// Add prior on mode.
|
|
|
|
bayesNet.emplaceDiscrete(mode, "4/6");
|
|
|
|
|
|
|
|
// Test elimination
|
|
|
|
Ordering ordering;
|
|
|
|
ordering.push_back(X(0));
|
|
|
|
ordering.push_back(M(0));
|
|
|
|
const auto posterior = fg.eliminateSequential(ordering);
|
|
|
|
EXPECT(assert_equal(bayesNet, *posterior, 1e-4));
|
2023-01-02 00:23:01 +08:00
|
|
|
}
|
2023-01-01 07:27:13 +08:00
|
|
|
|
2023-01-02 14:05:52 +08:00
|
|
|
/* ****************************************************************************/
|
|
|
|
// Check that eliminating tiny net with 2 measurements yields correct result.
|
|
|
|
TEST(HybridGaussianFactorGraph, EliminateTiny2) {
|
|
|
|
const int num_measurements = 2;
|
|
|
|
const bool deterministic = true;
|
|
|
|
auto fg =
|
|
|
|
tiny::createHybridGaussianFactorGraph(num_measurements, deterministic);
|
|
|
|
|
|
|
|
// Create expected Bayes Net:
|
|
|
|
HybridBayesNet bayesNet;
|
|
|
|
|
|
|
|
// Create Gaussian mixture on X(0).
|
|
|
|
using tiny::mode;
|
|
|
|
// regression, but mean checked to be > 5.0 in both cases:
|
|
|
|
const auto conditional0 = boost::make_shared<GaussianConditional>(
|
|
|
|
X(0), Vector1(18.4752), I_1x1 * 3.4641),
|
|
|
|
conditional1 = boost::make_shared<GaussianConditional>(
|
|
|
|
X(0), Vector1(10.3281), I_1x1 * 2.0548);
|
|
|
|
GaussianMixture gm({X(0)}, {}, {mode}, {conditional0, conditional1});
|
|
|
|
bayesNet.emplaceMixture(gm); // copy :-(
|
|
|
|
|
|
|
|
// Add prior on mode.
|
|
|
|
bayesNet.emplaceDiscrete(mode, "4/6");
|
|
|
|
|
|
|
|
// Test elimination
|
|
|
|
Ordering ordering;
|
|
|
|
ordering.push_back(X(0));
|
|
|
|
ordering.push_back(M(0));
|
|
|
|
const auto posterior = fg.eliminateSequential(ordering);
|
|
|
|
EXPECT(assert_equal(bayesNet, *posterior, 1e-4));
|
|
|
|
}
|
|
|
|
|
2022-03-12 04:52:28 +08:00
|
|
|
/* ************************************************************************* */
|
|
|
|
int main() {
|
|
|
|
TestResult tr;
|
|
|
|
return TestRegistry::runAllTests(tr);
|
|
|
|
}
|
|
|
|
/* ************************************************************************* */
|