gtsam/tests/testGncOptimizer.cpp

451 lines
16 KiB
C++
Raw Normal View History

2020-11-26 00:02:01 +08:00
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testGncOptimizer.cpp
* @brief Unit tests for GncOptimizer class
* @author Jingnan Shi
2020-11-26 00:02:01 +08:00
* @author Luca Carlone
* @author Frank Dellaert
2020-11-28 05:10:03 +08:00
*
* Implementation of the paper: Yang, Antonante, Tzoumas, Carlone, "Graduated
* Non-Convexity for Robust Spatial Perception: From Non-Minimal Solvers to
* Global Outlier Rejection", ICRA/RAL, 2020. (arxiv version:
* https://arxiv.org/pdf/1909.08605.pdf)
2020-12-06 02:47:40 +08:00
*
* See also:
* Antonante, Tzoumas, Yang, Carlone, "Outlier-Robust Estimation: Hardness,
* Minimally-Tuned Algorithms, and Applications", arxiv:
* https://arxiv.org/pdf/2007.15109.pdf, 2020.
2020-11-26 00:02:01 +08:00
*/
#include <CppUnitLite/TestHarness.h>
2020-12-06 02:47:40 +08:00
#include <gtsam/nonlinear/GncOptimizer.h>
#include <gtsam/slam/dataset.h>
2020-11-26 00:02:01 +08:00
#include <tests/smallExample.h>
using namespace std;
using namespace gtsam;
using symbol_shorthand::L;
using symbol_shorthand::X;
2020-11-28 04:14:41 +08:00
static double tol = 1e-7;
2020-11-28 02:05:54 +08:00
/* ************************************************************************* */
TEST(GncOptimizer, gncParamsConstructor) {
// check params are correctly parsed
2020-11-28 02:05:54 +08:00
LevenbergMarquardtParams lmParams;
GncParams<LevenbergMarquardtParams> gncParams1(lmParams);
CHECK(lmParams.equals(gncParams1.baseOptimizerParams));
// check also default constructor
GncParams<LevenbergMarquardtParams> gncParams1b;
CHECK(lmParams.equals(gncParams1b.baseOptimizerParams));
// and check params become different if we change lmParams
lmParams.setVerbosity("DELTA");
2020-11-28 11:54:51 +08:00
CHECK(!lmParams.equals(gncParams1.baseOptimizerParams));
2020-11-28 02:05:54 +08:00
// and same for GN
GaussNewtonParams gnParams;
GncParams<GaussNewtonParams> gncParams2(gnParams);
CHECK(gnParams.equals(gncParams2.baseOptimizerParams));
// check default constructor
GncParams<GaussNewtonParams> gncParams2b;
CHECK(gnParams.equals(gncParams2b.baseOptimizerParams));
2020-11-28 04:14:41 +08:00
// change something at the gncParams level
GncParams<GaussNewtonParams> gncParams2c(gncParams2b);
gncParams2c.setLossType(GncParams<GaussNewtonParams>::RobustLossType::TLS);
2020-11-28 11:54:51 +08:00
CHECK(!gncParams2c.equals(gncParams2b.baseOptimizerParams));
2020-11-28 02:05:54 +08:00
}
2020-11-26 00:02:01 +08:00
/* ************************************************************************* */
2020-11-28 04:14:41 +08:00
TEST(GncOptimizer, gncConstructor) {
// has to have Gaussian noise models !
auto fg = example::createReallyNonlinearFactorGraph(); // just a unary factor
// on a 2D point
2020-11-28 04:14:41 +08:00
Point2 p0(3, 3);
Values initial;
initial.insert(X(1), p0);
LevenbergMarquardtParams lmParams;
GncParams<LevenbergMarquardtParams> gncParams(lmParams);
auto gnc =
GncOptimizer<GncParams<LevenbergMarquardtParams>>(fg, initial, gncParams);
2020-11-28 04:14:41 +08:00
CHECK(gnc.getFactors().equals(fg));
CHECK(gnc.getState().equals(initial));
CHECK(gnc.getParams().equals(gncParams));
}
2020-11-28 06:14:34 +08:00
/* ************************************************************************* */
TEST(GncOptimizer, gncConstructorWithRobustGraphAsInput) {
auto fg = example::sharedNonRobustFactorGraphWithOutliers();
2020-11-28 06:14:34 +08:00
// same graph with robust noise model
auto fg_robust = example::sharedRobustFactorGraphWithOutliers();
Point2 p0(3, 3);
Values initial;
initial.insert(X(1), p0);
LevenbergMarquardtParams lmParams;
GncParams<LevenbergMarquardtParams> gncParams(lmParams);
auto gnc = GncOptimizer<GncParams<LevenbergMarquardtParams>>(
fg_robust, initial, gncParams);
2020-11-28 11:07:16 +08:00
// make sure that when parsing the graph is transformed into one without
// robust loss
2020-11-28 11:54:51 +08:00
CHECK(fg.equals(gnc.getFactors()));
2020-11-28 06:14:34 +08:00
}
2020-11-28 05:10:03 +08:00
/* ************************************************************************* */
TEST(GncOptimizer, initializeMu) {
auto fg = example::createReallyNonlinearFactorGraph();
Point2 p0(3, 3);
Values initial;
initial.insert(X(1), p0);
// testing GM mu initialization
2020-11-28 05:10:03 +08:00
LevenbergMarquardtParams lmParams;
GncParams<LevenbergMarquardtParams> gncParams(lmParams);
2020-11-28 11:54:51 +08:00
gncParams.setLossType(
GncParams<LevenbergMarquardtParams>::RobustLossType::GM);
auto gnc_gm =
GncOptimizer<GncParams<LevenbergMarquardtParams>>(fg, initial, gncParams);
// according to rmk 5 in the gnc paper: m0 = 2 rmax^2 / barcSq
// (barcSq=1 in this example)
EXPECT_DOUBLES_EQUAL(gnc_gm.initializeMu(), 2 * 198.999, 1e-3);
// testing TLS mu initialization
gncParams.setLossType(
GncParams<LevenbergMarquardtParams>::RobustLossType::TLS);
auto gnc_tls =
GncOptimizer<GncParams<LevenbergMarquardtParams>>(fg, initial, gncParams);
// according to rmk 5 in the gnc paper: m0 = barcSq / (2 * rmax^2 - barcSq)
// (barcSq=1 in this example)
2020-12-08 06:28:35 +08:00
EXPECT_DOUBLES_EQUAL(gnc_tls.initializeMu(), 1 / (2 * 198.999 - 1), 1e-3);
2020-11-28 05:10:03 +08:00
}
/* ************************************************************************* */
TEST(GncOptimizer, updateMuGM) {
2020-11-28 05:10:03 +08:00
// has to have Gaussian noise models !
auto fg = example::createReallyNonlinearFactorGraph();
Point2 p0(3, 3);
Values initial;
initial.insert(X(1), p0);
LevenbergMarquardtParams lmParams;
GncParams<LevenbergMarquardtParams> gncParams(lmParams);
2020-11-28 11:54:51 +08:00
gncParams.setLossType(
GncParams<LevenbergMarquardtParams>::RobustLossType::GM);
gncParams.setMuStep(1.4);
auto gnc =
GncOptimizer<GncParams<LevenbergMarquardtParams>>(fg, initial, gncParams);
2020-11-28 05:10:03 +08:00
double mu = 5.0;
EXPECT_DOUBLES_EQUAL(gnc.updateMu(mu), mu / 1.4, tol);
// check it correctly saturates to 1 for GM
mu = 1.2;
EXPECT_DOUBLES_EQUAL(gnc.updateMu(mu), 1.0, tol);
}
/* ************************************************************************* */
TEST(GncOptimizer, updateMuTLS) {
// has to have Gaussian noise models !
auto fg = example::createReallyNonlinearFactorGraph();
Point2 p0(3, 3);
Values initial;
initial.insert(X(1), p0);
LevenbergMarquardtParams lmParams;
GncParams<LevenbergMarquardtParams> gncParams(lmParams);
gncParams.setMuStep(1.4);
gncParams.setLossType(
GncParams<LevenbergMarquardtParams>::RobustLossType::TLS);
auto gnc =
GncOptimizer<GncParams<LevenbergMarquardtParams>>(fg, initial, gncParams);
double mu = 5.0;
EXPECT_DOUBLES_EQUAL(gnc.updateMu(mu), mu * 1.4, tol);
}
2020-11-28 05:10:03 +08:00
/* ************************************************************************* */
2020-12-08 06:28:35 +08:00
TEST(GncOptimizer, checkMuConvergenceGM) {
2020-11-28 05:10:03 +08:00
// has to have Gaussian noise models !
auto fg = example::createReallyNonlinearFactorGraph();
Point2 p0(3, 3);
Values initial;
initial.insert(X(1), p0);
LevenbergMarquardtParams lmParams;
GncParams<LevenbergMarquardtParams> gncParams(lmParams);
2020-11-28 11:54:51 +08:00
gncParams.setLossType(
GncParams<LevenbergMarquardtParams>::RobustLossType::GM);
auto gnc =
GncOptimizer<GncParams<LevenbergMarquardtParams>>(fg, initial, gncParams);
2020-11-28 05:10:03 +08:00
double mu = 1.0;
2020-12-08 02:24:49 +08:00
CHECK(gnc.checkMuConvergence(mu, 0));
2020-12-08 06:28:35 +08:00
}
/* ************************************************************************* */
TEST(GncOptimizer, checkMuConvergenceTLS) {
// has to have Gaussian noise models !
auto fg = example::createReallyNonlinearFactorGraph();
Point2 p0(3, 3);
Values initial;
initial.insert(X(1), p0);
2020-12-08 02:24:49 +08:00
2020-12-08 06:28:35 +08:00
LevenbergMarquardtParams lmParams;
GncParams<LevenbergMarquardtParams> gncParams(lmParams);
gncParams.setLossType(
GncParams<LevenbergMarquardtParams>::RobustLossType::TLS);
auto gnc =
GncOptimizer<GncParams<LevenbergMarquardtParams>>(fg, initial, gncParams);
double mu = 1.0;
CHECK(gnc.checkMuConvergence(mu, mu));
2020-11-28 05:10:03 +08:00
}
2020-11-28 05:31:32 +08:00
/* ************************************************************************* */
2020-12-08 09:20:51 +08:00
TEST(GncOptimizer, calculateWeightsGM) {
2020-11-28 11:54:51 +08:00
auto fg = example::sharedNonRobustFactorGraphWithOutliers();
Point2 p0(0, 0);
Values initial;
initial.insert(X(1), p0);
// we have 4 factors, 3 with zero errors (inliers), 1 with error 50 = 0.5 *
// 1/sigma^2 || [1;0] - [0;0] ||^2 (outlier)
2020-11-28 11:54:51 +08:00
Vector weights_expected = Vector::Zero(4);
weights_expected[0] = 1.0; // zero error
weights_expected[1] = 1.0; // zero error
weights_expected[2] = 1.0; // zero error
2020-11-28 11:54:51 +08:00
weights_expected[3] = std::pow(1.0 / (50.0 + 1.0), 2); // outlier, error = 50
GaussNewtonParams gnParams;
GncParams<GaussNewtonParams> gncParams(gnParams);
2020-12-08 09:20:51 +08:00
gncParams.setLossType(
GncParams<GaussNewtonParams>::RobustLossType::GM);
2020-11-28 11:54:51 +08:00
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial, gncParams);
double mu = 1.0;
Vector weights_actual = gnc.calculateWeights(initial, mu);
CHECK(assert_equal(weights_expected, weights_actual, tol));
mu = 2.0;
double barcSq = 5.0;
weights_expected[3] =
std::pow(mu * barcSq / (50.0 + mu * barcSq), 2); // outlier, error = 50
2020-11-28 11:54:51 +08:00
gncParams.setInlierThreshold(barcSq);
auto gnc2 =
GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial, gncParams);
2020-11-28 11:54:51 +08:00
weights_actual = gnc2.calculateWeights(initial, mu);
CHECK(assert_equal(weights_expected, weights_actual, tol));
2020-12-08 09:20:51 +08:00
}
/* ************************************************************************* */
TEST(GncOptimizer, calculateWeightsTLS) {
auto fg = example::sharedNonRobustFactorGraphWithOutliers();
Point2 p0(0, 0);
Values initial;
initial.insert(X(1), p0);
// we have 4 factors, 3 with zero errors (inliers), 1 with error
Vector weights_expected = Vector::Zero(4);
weights_expected[0] = 1.0; // zero error
weights_expected[1] = 1.0; // zero error
weights_expected[2] = 1.0; // zero error
weights_expected[3] = 0; // outliers
GaussNewtonParams gnParams;
GncParams<GaussNewtonParams> gncParams(gnParams);
gncParams.setLossType(
GncParams<GaussNewtonParams>::RobustLossType::TLS);
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial, gncParams);
double mu = 1.0;
Vector weights_actual = gnc.calculateWeights(initial, mu);
CHECK(assert_equal(weights_expected, weights_actual, tol));
2020-11-28 05:31:32 +08:00
}
2020-11-28 04:14:41 +08:00
2020-11-28 06:14:34 +08:00
/* ************************************************************************* */
2020-11-28 08:29:42 +08:00
TEST(GncOptimizer, makeWeightedGraph) {
// create original factor
2020-11-28 11:54:51 +08:00
double sigma1 = 0.1;
NonlinearFactorGraph nfg =
example::nonlinearFactorGraphWithGivenSigma(sigma1);
2020-11-28 08:29:42 +08:00
2020-11-28 11:54:51 +08:00
// create expected
double sigma2 = 10;
NonlinearFactorGraph expected =
example::nonlinearFactorGraphWithGivenSigma(sigma2);
2020-11-28 08:29:42 +08:00
2020-11-28 11:54:51 +08:00
// create weights
Vector weights = Vector::Ones(
1); // original info:1/0.1^2 = 100. New info: 1/10^2 = 0.01. Ratio is 10-4
2020-11-28 11:54:51 +08:00
weights[0] = 1e-4;
2020-11-28 08:29:42 +08:00
2020-11-28 11:54:51 +08:00
// create actual
Point2 p0(3, 3);
Values initial;
initial.insert(X(1), p0);
2020-11-28 08:29:42 +08:00
2020-11-28 11:54:51 +08:00
LevenbergMarquardtParams lmParams;
GncParams<LevenbergMarquardtParams> gncParams(lmParams);
auto gnc = GncOptimizer<GncParams<LevenbergMarquardtParams>>(nfg, initial,
gncParams);
2020-11-28 11:54:51 +08:00
NonlinearFactorGraph actual = gnc.makeWeightedGraph(weights);
2020-11-28 08:29:42 +08:00
2020-11-28 11:54:51 +08:00
// check it's all good
CHECK(assert_equal(expected, actual));
2020-11-26 00:02:01 +08:00
}
2020-11-28 04:14:41 +08:00
/* ************************************************************************* */
2020-11-28 04:46:12 +08:00
TEST(GncOptimizer, optimizeSimple) {
2020-11-26 00:02:01 +08:00
auto fg = example::createReallyNonlinearFactorGraph();
Point2 p0(3, 3);
Values initial;
initial.insert(X(1), p0);
LevenbergMarquardtParams lmParams;
2020-11-28 04:14:41 +08:00
GncParams<LevenbergMarquardtParams> gncParams(lmParams);
auto gnc =
GncOptimizer<GncParams<LevenbergMarquardtParams>>(fg, initial, gncParams);
2020-11-28 04:14:41 +08:00
2020-11-26 00:02:01 +08:00
Values actual = gnc.optimize();
2020-11-28 04:14:41 +08:00
DOUBLES_EQUAL(0, fg.error(actual), tol);
2020-11-26 00:02:01 +08:00
}
2020-11-28 04:46:12 +08:00
/* ************************************************************************* */
TEST(GncOptimizer, optimize) {
auto fg = example::sharedNonRobustFactorGraphWithOutliers();
Point2 p0(1, 0);
Values initial;
initial.insert(X(1), p0);
// try with nonrobust cost function and standard GN
GaussNewtonParams gnParams;
GaussNewtonOptimizer gn(fg, initial, gnParams);
Values gn_results = gn.optimize();
// converges to incorrect point due to lack of robustness to an outlier, ideal
// solution is Point2(0,0)
2020-11-28 11:54:51 +08:00
CHECK(assert_equal(Point2(0.25, 0.0), gn_results.at<Point2>(X(1)), 1e-3));
2020-11-28 04:46:12 +08:00
// try with robust loss function and standard GN
auto fg_robust =
example::sharedRobustFactorGraphWithOutliers(); // same as fg, but with
// factors wrapped in
// Geman McClure losses
2020-11-28 04:46:12 +08:00
GaussNewtonOptimizer gn2(fg_robust, initial, gnParams);
Values gn2_results = gn2.optimize();
// converges to incorrect point, this time due to the nonconvexity of the loss
CHECK(
assert_equal(Point2(0.999706, 0.0), gn2_results.at<Point2>(X(1)), 1e-3));
2020-11-28 04:46:12 +08:00
// .. but graduated nonconvexity ensures both robustness and convergence in
// the face of nonconvexity
2020-11-28 04:46:12 +08:00
GncParams<GaussNewtonParams> gncParams(gnParams);
2020-11-28 11:22:14 +08:00
// gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::VerbosityGNC::SUMMARY);
2020-11-28 04:46:12 +08:00
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial, gncParams);
Values gnc_result = gnc.optimize();
2020-11-28 11:54:51 +08:00
CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
2020-11-28 04:46:12 +08:00
}
2020-11-28 11:50:41 +08:00
/* ************************************************************************* */
TEST(GncOptimizer, optimizeWithKnownInliers) {
auto fg = example::sharedNonRobustFactorGraphWithOutliers();
Point2 p0(1, 0);
Values initial;
initial.insert(X(1), p0);
std::vector<size_t> knownInliers;
knownInliers.push_back(0);
knownInliers.push_back(1);
knownInliers.push_back(2);
// nonconvexity with known inliers
2020-12-06 02:47:40 +08:00
GncParams<GaussNewtonParams> gncParams;
2020-11-28 11:50:41 +08:00
gncParams.setKnownInliers(knownInliers);
// gncParams.setVerbosityGNC(GncParams<GaussNewtonParams>::VerbosityGNC::VALUES);
auto gnc = GncOptimizer<GncParams<GaussNewtonParams>>(fg, initial, gncParams);
Values gnc_result = gnc.optimize();
2020-11-28 11:54:51 +08:00
CHECK(assert_equal(Point2(0.0, 0.0), gnc_result.at<Point2>(X(1)), 1e-3));
2020-11-28 11:50:41 +08:00
// check weights were actually fixed:
Vector finalWeights = gnc.getWeights();
DOUBLES_EQUAL(1.0, finalWeights[0], tol);
DOUBLES_EQUAL(1.0, finalWeights[1], tol);
DOUBLES_EQUAL(1.0, finalWeights[2], tol);
}
2020-11-28 12:12:26 +08:00
/* ************************************************************************* */
TEST(GncOptimizer, optimizeSmallPoseGraph) {
/// load small pose graph
const string filename = findExampleDataFile("w100.graph");
NonlinearFactorGraph::shared_ptr graph;
Values::shared_ptr initial;
boost::tie(graph, initial) = load2D(filename);
// Add a Gaussian prior on first poses
Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin
SharedDiagonal priorNoise =
noiseModel::Diagonal::Sigmas(Vector3(0.01, 0.01, 0.01));
graph->addPrior(0, priorMean, priorNoise);
2020-11-28 12:12:26 +08:00
/// get expected values by optimizing outlier-free graph
Values expected = LevenbergMarquardtOptimizer(*graph, *initial).optimize();
// add a few outliers
SharedDiagonal betweenNoise =
noiseModel::Diagonal::Sigmas(Vector3(0.1, 0.1, 0.01));
graph->push_back(BetweenFactor<Pose2>(
90, 50, Pose2(),
betweenNoise)); // some arbitrary and incorrect between factor
2020-11-28 12:12:26 +08:00
/// get expected values by optimizing outlier-free graph
Values expectedWithOutliers =
LevenbergMarquardtOptimizer(*graph, *initial).optimize();
2020-11-28 12:12:26 +08:00
// as expected, the following test fails due to the presence of an outlier!
// CHECK(assert_equal(expected, expectedWithOutliers, 1e-3));
// GNC
// Note: in difficult instances, we set the odometry measurements to be
// inliers, but this problem is simple enought to succeed even without that
// assumption std::vector<size_t> knownInliers;
2020-12-06 02:47:40 +08:00
GncParams<GaussNewtonParams> gncParams;
auto gnc =
GncOptimizer<GncParams<GaussNewtonParams>>(*graph, *initial, gncParams);
2020-11-28 12:12:26 +08:00
Values actual = gnc.optimize();
// compare
CHECK(
assert_equal(expected, actual, 1e-3)); // yay! we are robust to outliers!
2020-11-28 12:12:26 +08:00
}
2020-11-26 00:02:01 +08:00
/* ************************************************************************* */
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}
/* ************************************************************************* */