improved MixtureFactor tests
parent
7269d80b1c
commit
7fab3f8cc3
|
|
@ -18,6 +18,9 @@
|
||||||
|
|
||||||
#include <gtsam/base/TestableAssertions.h>
|
#include <gtsam/base/TestableAssertions.h>
|
||||||
#include <gtsam/discrete/DiscreteValues.h>
|
#include <gtsam/discrete/DiscreteValues.h>
|
||||||
|
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||||
|
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||||
|
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||||
#include <gtsam/hybrid/MixtureFactor.h>
|
#include <gtsam/hybrid/MixtureFactor.h>
|
||||||
#include <gtsam/inference/Symbol.h>
|
#include <gtsam/inference/Symbol.h>
|
||||||
#include <gtsam/slam/BetweenFactor.h>
|
#include <gtsam/slam/BetweenFactor.h>
|
||||||
|
|
@ -115,6 +118,156 @@ TEST(MixtureFactor, Dim) {
|
||||||
EXPECT_LONGS_EQUAL(1, mixtureFactor.dim());
|
EXPECT_LONGS_EQUAL(1, mixtureFactor.dim());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
// Test components with differing means
|
||||||
|
TEST(MixtureFactor, DifferentMeans) {
|
||||||
|
DiscreteKey m1(M(1), 2), m2(M(2), 2);
|
||||||
|
|
||||||
|
Values values;
|
||||||
|
double x1 = 0.0, x2 = 1.75, x3 = 2.60;
|
||||||
|
values.insert(X(1), x1);
|
||||||
|
values.insert(X(2), x2);
|
||||||
|
values.insert(X(3), x3);
|
||||||
|
|
||||||
|
auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0);
|
||||||
|
auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0);
|
||||||
|
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0);
|
||||||
|
|
||||||
|
auto f0 = std::make_shared<BetweenFactor<double>>(X(1), X(2), 0.0, model0);
|
||||||
|
auto f1 = std::make_shared<BetweenFactor<double>>(X(1), X(2), 2.0, model1);
|
||||||
|
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
|
||||||
|
|
||||||
|
MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors);
|
||||||
|
HybridNonlinearFactorGraph hnfg;
|
||||||
|
hnfg.push_back(mixtureFactor);
|
||||||
|
|
||||||
|
f0 = std::make_shared<BetweenFactor<double>>(X(2), X(3), 0.0, model0);
|
||||||
|
f1 = std::make_shared<BetweenFactor<double>>(X(2), X(3), 2.0, model1);
|
||||||
|
std::vector<NonlinearFactor::shared_ptr> factors23{f0, f1};
|
||||||
|
hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23));
|
||||||
|
|
||||||
|
auto prior = PriorFactor<double>(X(1), x1, prior_noise);
|
||||||
|
hnfg.push_back(prior);
|
||||||
|
|
||||||
|
hnfg.emplace_shared<PriorFactor<double>>(X(2), 2.0, prior_noise);
|
||||||
|
|
||||||
|
auto hgfg = hnfg.linearize(values);
|
||||||
|
auto bn = hgfg->eliminateSequential();
|
||||||
|
HybridValues actual = bn->optimize();
|
||||||
|
|
||||||
|
HybridValues expected(
|
||||||
|
VectorValues{
|
||||||
|
{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}},
|
||||||
|
DiscreteValues{{M(1), 1}, {M(2), 0}});
|
||||||
|
|
||||||
|
EXPECT(assert_equal(expected, actual));
|
||||||
|
|
||||||
|
{
|
||||||
|
DiscreteValues dv{{M(1), 0}, {M(2), 0}};
|
||||||
|
VectorValues cont = bn->optimize(dv);
|
||||||
|
double error = bn->error(HybridValues(cont, dv));
|
||||||
|
// regression
|
||||||
|
EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
DiscreteValues dv{{M(1), 0}, {M(2), 1}};
|
||||||
|
VectorValues cont = bn->optimize(dv);
|
||||||
|
double error = bn->error(HybridValues(cont, dv));
|
||||||
|
// regression
|
||||||
|
EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
DiscreteValues dv{{M(1), 1}, {M(2), 0}};
|
||||||
|
VectorValues cont = bn->optimize(dv);
|
||||||
|
double error = bn->error(HybridValues(cont, dv));
|
||||||
|
// regression
|
||||||
|
EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
DiscreteValues dv{{M(1), 1}, {M(2), 1}};
|
||||||
|
VectorValues cont = bn->optimize(dv);
|
||||||
|
double error = bn->error(HybridValues(cont, dv));
|
||||||
|
// regression
|
||||||
|
EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
// Test components with differing covariances
|
||||||
|
TEST(MixtureFactor, DifferentCovariances) {
|
||||||
|
DiscreteKey m1(M(1), 2);
|
||||||
|
|
||||||
|
Values values;
|
||||||
|
double x1 = 1.0, x2 = 1.0;
|
||||||
|
values.insert(X(1), x1);
|
||||||
|
values.insert(X(2), x2);
|
||||||
|
|
||||||
|
double between = 0.0;
|
||||||
|
|
||||||
|
auto model0 = noiseModel::Isotropic::Sigma(1, 1e2);
|
||||||
|
auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2);
|
||||||
|
auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3);
|
||||||
|
|
||||||
|
auto f0 =
|
||||||
|
std::make_shared<BetweenFactor<double>>(X(1), X(2), between, model0);
|
||||||
|
auto f1 =
|
||||||
|
std::make_shared<BetweenFactor<double>>(X(1), X(2), between, model1);
|
||||||
|
std::vector<NonlinearFactor::shared_ptr> factors{f0, f1};
|
||||||
|
|
||||||
|
// Create via toFactorGraph
|
||||||
|
using symbol_shorthand::Z;
|
||||||
|
Matrix H0_1, H0_2, H1_1, H1_2;
|
||||||
|
Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2);
|
||||||
|
std::vector<std::pair<Key, Matrix>> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/},
|
||||||
|
//
|
||||||
|
{X(1), H0_1 /*Sp1*/},
|
||||||
|
{X(2), H0_2 /*Tp2*/}};
|
||||||
|
|
||||||
|
Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2);
|
||||||
|
std::vector<std::pair<Key, Matrix>> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/},
|
||||||
|
//
|
||||||
|
{X(1), H1_1 /*Sp1*/},
|
||||||
|
{X(2), H1_2 /*Tp2*/}};
|
||||||
|
auto gm = new gtsam::GaussianMixture(
|
||||||
|
{Z(1)}, {X(1), X(2)}, {m1},
|
||||||
|
{std::make_shared<GaussianConditional>(terms0, 1, -d0, model0),
|
||||||
|
std::make_shared<GaussianConditional>(terms1, 1, -d1, model1)});
|
||||||
|
gtsam::HybridBayesNet bn;
|
||||||
|
bn.emplace_back(gm);
|
||||||
|
|
||||||
|
gtsam::VectorValues measurements;
|
||||||
|
measurements.insert(Z(1), gtsam::Z_1x1);
|
||||||
|
// Create FG with single GaussianMixtureFactor
|
||||||
|
HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements);
|
||||||
|
|
||||||
|
// Linearized prior factor on X1
|
||||||
|
auto prior = PriorFactor<double>(X(1), x1, prior_noise).linearize(values);
|
||||||
|
mixture_fg.push_back(prior);
|
||||||
|
|
||||||
|
auto hbn = mixture_fg.eliminateSequential();
|
||||||
|
|
||||||
|
VectorValues cv;
|
||||||
|
cv.insert(X(1), Vector1(0.0));
|
||||||
|
cv.insert(X(2), Vector1(0.0));
|
||||||
|
// P(m1) = [0.5, 0.5], so we should pick 0
|
||||||
|
DiscreteValues dv;
|
||||||
|
dv.insert({M(1), 0});
|
||||||
|
HybridValues expected_values(cv, dv);
|
||||||
|
|
||||||
|
HybridValues actual_values = hbn->optimize();
|
||||||
|
EXPECT(assert_equal(expected_values, actual_values));
|
||||||
|
|
||||||
|
// Check that we get different error values at the MLE point μ.
|
||||||
|
AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv);
|
||||||
|
|
||||||
|
HybridValues hv0(cv, DiscreteValues{{M(1), 0}});
|
||||||
|
HybridValues hv1(cv, DiscreteValues{{M(1), 1}});
|
||||||
|
|
||||||
|
AlgebraicDecisionTree<Key> expectedErrorTree(m1, 9.90348755254,
|
||||||
|
0.69314718056);
|
||||||
|
EXPECT(assert_equal(expectedErrorTree, errorTree));
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
int main() {
|
int main() {
|
||||||
TestResult tr;
|
TestResult tr;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue