Use DecisionTree for constructing HybridGaussianConditional
parent
091352806b
commit
d4923dbfa9
|
|
@ -55,24 +55,6 @@ HybridGaussianConditional::conditionals() const {
|
|||
return conditionals_;
|
||||
}
|
||||
|
||||
/* *******************************************************************************/
|
||||
HybridGaussianConditional::HybridGaussianConditional(
|
||||
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
|
||||
DiscreteKeys &&discreteParents,
|
||||
std::vector<GaussianConditional::shared_ptr> &&conditionals)
|
||||
: HybridGaussianConditional(continuousFrontals, continuousParents,
|
||||
discreteParents,
|
||||
Conditionals(discreteParents, conditionals)) {}
|
||||
|
||||
/* *******************************************************************************/
|
||||
HybridGaussianConditional::HybridGaussianConditional(
|
||||
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
||||
const DiscreteKeys &discreteParents,
|
||||
const std::vector<GaussianConditional::shared_ptr> &conditionals)
|
||||
: HybridGaussianConditional(continuousFrontals, continuousParents,
|
||||
discreteParents,
|
||||
Conditionals(discreteParents, conditionals)) {}
|
||||
|
||||
/* *******************************************************************************/
|
||||
// TODO(dellaert): This is copy/paste: HybridGaussianConditional should be
|
||||
// derived from HybridGaussianFactor, no?
|
||||
|
|
|
|||
|
|
@ -106,32 +106,6 @@ class GTSAM_EXPORT HybridGaussianConditional
|
|||
const DiscreteKeys &discreteParents,
|
||||
const Conditionals &conditionals);
|
||||
|
||||
/**
|
||||
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
|
||||
*
|
||||
* @param continuousFrontals The continuous frontal variables
|
||||
* @param continuousParents The continuous parent variables
|
||||
* @param discreteParents Discrete parents variables
|
||||
* @param conditionals List of conditionals
|
||||
*/
|
||||
HybridGaussianConditional(
|
||||
KeyVector &&continuousFrontals, KeyVector &&continuousParents,
|
||||
DiscreteKeys &&discreteParents,
|
||||
std::vector<GaussianConditional::shared_ptr> &&conditionals);
|
||||
|
||||
/**
|
||||
* @brief Make a Gaussian Mixture from a list of Gaussian conditionals
|
||||
*
|
||||
* @param continuousFrontals The continuous frontal variables
|
||||
* @param continuousParents The continuous parent variables
|
||||
* @param discreteParents Discrete parents variables
|
||||
* @param conditionals List of conditionals
|
||||
*/
|
||||
HybridGaussianConditional(
|
||||
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
||||
const DiscreteKeys &discreteParents,
|
||||
const std::vector<GaussianConditional::shared_ptr> &conditionals);
|
||||
|
||||
/// @}
|
||||
/// @name Testable
|
||||
/// @{
|
||||
|
|
@ -273,7 +247,7 @@ class GTSAM_EXPORT HybridGaussianConditional
|
|||
#endif
|
||||
};
|
||||
|
||||
/// Return the DiscreteKey vector as a set.
|
||||
/// Return the DiscreteKeys vector as a set.
|
||||
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys);
|
||||
|
||||
// traits
|
||||
|
|
|
|||
|
|
@ -43,12 +43,13 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1,
|
|||
// Create Gaussian mixture z_i = x0 + noise for each measurement.
|
||||
for (size_t i = 0; i < num_measurements; i++) {
|
||||
const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode;
|
||||
DiscreteKeys modes{mode_i};
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)};
|
||||
bayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
|
||||
Z_1x1, 0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0),
|
||||
Z_1x1, 3)});
|
||||
HybridGaussianConditional::Conditionals(modes, conditionals));
|
||||
}
|
||||
|
||||
// Create prior on X(0).
|
||||
|
|
|
|||
|
|
@ -107,9 +107,11 @@ TEST(HybridBayesNet, evaluateHybrid) {
|
|||
// Create hybrid Bayes net.
|
||||
HybridBayesNet bayesNet;
|
||||
bayesNet.push_back(continuousConditional);
|
||||
DiscreteKeys discreteParents{Asia};
|
||||
bayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
||||
std::vector{conditional0, conditional1});
|
||||
KeyVector{X(1)}, KeyVector{}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(
|
||||
discreteParents, std::vector{conditional0, conditional1}));
|
||||
bayesNet.emplace_shared<DiscreteConditional>(Asia, "99/1");
|
||||
|
||||
// Create values at which to evaluate.
|
||||
|
|
@ -168,9 +170,11 @@ TEST(HybridBayesNet, Error) {
|
|||
conditional1 = std::make_shared<GaussianConditional>(
|
||||
X(1), Vector1::Constant(2), I_1x1, model1);
|
||||
|
||||
DiscreteKeys discreteParents{Asia};
|
||||
auto gm = std::make_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia},
|
||||
std::vector{conditional0, conditional1});
|
||||
KeyVector{X(1)}, KeyVector{}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(
|
||||
discreteParents, std::vector{conditional0, conditional1}));
|
||||
// Create hybrid Bayes net.
|
||||
HybridBayesNet bayesNet;
|
||||
bayesNet.push_back(continuousConditional);
|
||||
|
|
|
|||
|
|
@ -620,12 +620,16 @@ TEST(HybridEstimation, ModeSelection) {
|
|||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1));
|
||||
bn.push_back(
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1));
|
||||
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
|
||||
Z_1x1, noise_loose),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1),
|
||||
Z_1x1, noise_tight)};
|
||||
bn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose),
|
||||
GaussianConditional::sharedMeanAndStddev(
|
||||
Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{mode},
|
||||
conditionals));
|
||||
|
||||
VectorValues vv;
|
||||
vv.insert(Z(0), Z_1x1);
|
||||
|
|
@ -651,12 +655,16 @@ TEST(HybridEstimation, ModeSelection2) {
|
|||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1));
|
||||
bn.push_back(
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1));
|
||||
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
|
||||
Z_3x1, noise_loose),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1),
|
||||
Z_3x1, noise_tight)};
|
||||
bn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose),
|
||||
GaussianConditional::sharedMeanAndStddev(
|
||||
Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{mode},
|
||||
conditionals));
|
||||
|
||||
VectorValues vv;
|
||||
vv.insert(Z(0), Z_3x1);
|
||||
|
|
|
|||
|
|
@ -52,7 +52,9 @@ const std::vector<GaussianConditional::shared_ptr> conditionals{
|
|||
commonSigma),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
||||
commonSigma)};
|
||||
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
|
||||
const HybridGaussianConditional mixture(
|
||||
{Z(0)}, {X(0)}, {mode},
|
||||
HybridGaussianConditional::Conditionals({mode}, conditionals));
|
||||
} // namespace equal_constants
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -153,7 +155,9 @@ const std::vector<GaussianConditional::shared_ptr> conditionals{
|
|||
0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0),
|
||||
3.0)};
|
||||
const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals);
|
||||
const HybridGaussianConditional mixture(
|
||||
{Z(0)}, {X(0)}, {mode},
|
||||
HybridGaussianConditional::Conditionals({mode}, conditionals));
|
||||
} // namespace mode_dependent_constants
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
|||
|
|
@ -233,8 +233,11 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1,
|
|||
c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1);
|
||||
|
||||
HybridBayesNet hbn;
|
||||
DiscreteKeys discreteParents{m};
|
||||
hbn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1});
|
||||
KeyVector{z}, KeyVector{}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(discreteParents,
|
||||
std::vector{c0, c1}));
|
||||
|
||||
auto mixing = make_shared<DiscreteConditional>(m, "50/50");
|
||||
hbn.push_back(mixing);
|
||||
|
|
@ -408,8 +411,11 @@ static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(
|
|||
-I_1x1, model0),
|
||||
c1 = make_shared<GaussianConditional>(X(1), Vector1(mu1), I_1x1, X(0),
|
||||
-I_1x1, model1);
|
||||
DiscreteKeys discreteParents{m1};
|
||||
return std::make_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1});
|
||||
KeyVector{X(1)}, KeyVector{X(0)}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(discreteParents,
|
||||
std::vector{c0, c1}));
|
||||
}
|
||||
|
||||
/// Create two state Bayes network with 1 or two measurement models
|
||||
|
|
|
|||
|
|
@ -682,8 +682,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) {
|
|||
x0, -I_1x1, model0),
|
||||
c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1,
|
||||
x0, -I_1x1, model1);
|
||||
DiscreteKeys discreteParents{m1};
|
||||
hbn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1});
|
||||
KeyVector{f01}, KeyVector{x0, x1}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(discreteParents,
|
||||
std::vector{c0, c1}));
|
||||
|
||||
// Discrete uniform prior.
|
||||
hbn.emplace_shared<DiscreteConditional>(m1, "0.5/0.5");
|
||||
|
|
@ -806,9 +809,11 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
|
|||
X(0), Vector1(14.1421), I_1x1 * 2.82843),
|
||||
conditional1 = std::make_shared<GaussianConditional>(
|
||||
X(0), Vector1(10.1379), I_1x1 * 2.02759);
|
||||
DiscreteKeys discreteParents{mode};
|
||||
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||
std::vector{conditional0, conditional1});
|
||||
KeyVector{X(0)}, KeyVector{}, discreteParents,
|
||||
HybridGaussianConditional::Conditionals(
|
||||
discreteParents, std::vector{conditional0, conditional1}));
|
||||
|
||||
// Add prior on mode.
|
||||
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "74/26");
|
||||
|
|
@ -831,12 +836,13 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
|
|||
HybridBayesNet bn;
|
||||
|
||||
// Create Gaussian mixture z_0 = x0 + noise for each measurement.
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)};
|
||||
auto gm = std::make_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode},
|
||||
std::vector{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1,
|
||||
0.5)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{mode},
|
||||
conditionals));
|
||||
bn.push_back(gm);
|
||||
|
||||
// Create prior on X(0).
|
||||
|
|
@ -865,7 +871,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
|
|||
X(0), Vector1(14.1421), I_1x1 * 2.82843);
|
||||
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||
std::vector{conditional0, conditional1});
|
||||
HybridGaussianConditional::Conditionals(
|
||||
DiscreteKeys{mode}, std::vector{conditional0, conditional1}));
|
||||
|
||||
// Add prior on mode.
|
||||
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "1/1");
|
||||
|
|
@ -902,7 +909,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
|
|||
X(0), Vector1(10.274), I_1x1 * 2.0548);
|
||||
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode},
|
||||
std::vector{conditional0, conditional1});
|
||||
HybridGaussianConditional::Conditionals(
|
||||
DiscreteKeys{mode}, std::vector{conditional0, conditional1}));
|
||||
|
||||
// Add prior on mode.
|
||||
expectedBayesNet.emplace_shared<DiscreteConditional>(mode, "23/77");
|
||||
|
|
@ -947,12 +955,14 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
|
|||
for (size_t t : {0, 1, 2}) {
|
||||
// Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t):
|
||||
const auto noise_mode_t = DiscreteKey{N(t), 2};
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1,
|
||||
3.0)};
|
||||
bn.emplace_shared<HybridGaussianConditional>(
|
||||
KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
|
||||
Z_1x1, 0.5),
|
||||
GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t),
|
||||
Z_1x1, 3.0)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{noise_mode_t},
|
||||
conditionals));
|
||||
|
||||
// Create prior on discrete mode N(t):
|
||||
bn.emplace_shared<DiscreteConditional>(noise_mode_t, "20/80");
|
||||
|
|
@ -962,12 +972,15 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
|
|||
for (size_t t : {2, 1}) {
|
||||
// Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1):
|
||||
const auto motion_model_t = DiscreteKey{M(t), 2};
|
||||
std::vector<GaussianConditional::shared_ptr> conditionals{
|
||||
GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), Z_1x1,
|
||||
0.2),
|
||||
GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), I_1x1,
|
||||
0.2)};
|
||||
auto gm = std::make_shared<HybridGaussianConditional>(
|
||||
KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t},
|
||||
std::vector{GaussianConditional::sharedMeanAndStddev(
|
||||
X(t), I_1x1, X(t - 1), Z_1x1, 0.2),
|
||||
GaussianConditional::sharedMeanAndStddev(
|
||||
X(t), I_1x1, X(t - 1), I_1x1, 0.2)});
|
||||
HybridGaussianConditional::Conditionals(DiscreteKeys{motion_model_t},
|
||||
conditionals));
|
||||
bn.push_back(gm);
|
||||
|
||||
// Create prior on motion model M(t):
|
||||
|
|
|
|||
|
|
@ -116,7 +116,8 @@ TEST(HybridSerialization, HybridGaussianConditional) {
|
|||
const auto conditional1 = std::make_shared<GaussianConditional>(
|
||||
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3));
|
||||
const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode},
|
||||
{conditional0, conditional1});
|
||||
HybridGaussianConditional::Conditionals(
|
||||
{mode}, {conditional0, conditional1}));
|
||||
|
||||
EXPECT(equalsObj<HybridGaussianConditional>(gm));
|
||||
EXPECT(equalsXML<HybridGaussianConditional>(gm));
|
||||
|
|
|
|||
Loading…
Reference in New Issue