| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | /* ----------------------------------------------------------------------------
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  * GTSAM Copyright 2010, Georgia Tech Research Corporation, | 
					
						
							|  |  |  |  * Atlanta, Georgia 30332-0415 | 
					
						
							|  |  |  |  * All Rights Reserved | 
					
						
							|  |  |  |  * Authors: Frank Dellaert, et al. (see THANKS for the full author list) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  * See LICENSE for the license information | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |  * -------------------------------------------------------------------------- */ | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /**
 | 
					
						
							| 
									
										
										
										
											2019-06-12 01:40:29 +08:00
										 |  |  |  * @file    testDoglegOptimizer.cpp | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  |  * @brief   Unit tests for DoglegOptimizer | 
					
						
							|  |  |  |  * @author  Richard Roberts | 
					
						
							| 
									
										
										
										
											2019-06-12 01:40:29 +08:00
										 |  |  |  * @author  Frank dellaert | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  |  */ | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-08-06 21:44:22 +08:00
										 |  |  | #include <CppUnitLite/TestHarness.h>
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2012-06-10 04:15:44 +08:00
										 |  |  | #include <tests/smallExample.h>
 | 
					
						
							| 
									
										
										
										
											2019-05-29 05:42:30 +08:00
										 |  |  | #include <gtsam/geometry/Pose2.h>
 | 
					
						
							|  |  |  | #include <gtsam/nonlinear/DoglegOptimizer.h>
 | 
					
						
							| 
									
										
										
										
											2012-06-03 03:05:38 +08:00
										 |  |  | #include <gtsam/nonlinear/DoglegOptimizerImpl.h>
 | 
					
						
							| 
									
										
										
										
											2019-05-29 05:42:30 +08:00
										 |  |  | #include <gtsam/nonlinear/NonlinearEquality.h>
 | 
					
						
							|  |  |  | #include <gtsam/slam/BetweenFactor.h>
 | 
					
						
							| 
									
										
										
										
											2013-08-19 23:32:16 +08:00
										 |  |  | #include <gtsam/inference/Symbol.h>
 | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:33 +08:00
										 |  |  | #include <gtsam/linear/JacobianFactor.h>
 | 
					
						
							|  |  |  | #include <gtsam/linear/GaussianBayesTree.h>
 | 
					
						
							| 
									
										
										
										
											2012-06-03 03:05:38 +08:00
										 |  |  | #include <gtsam/base/numericalDerivative.h>
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | #include <functional>
 | 
					
						
							| 
									
										
										
										
											2012-12-18 22:21:02 +08:00
										 |  |  | #include <boost/iterator/counting_iterator.hpp>
 | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | using namespace std; | 
					
						
							|  |  |  | using namespace gtsam; | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2012-06-03 03:28:21 +08:00
										 |  |  | // Convenience for named keys
 | 
					
						
							|  |  |  | using symbol_shorthand::X; | 
					
						
							|  |  |  | using symbol_shorthand::L; | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | /* ************************************************************************* */ | 
					
						
							|  |  |  | TEST(DoglegOptimizer, ComputeBlend) { | 
					
						
							|  |  |  |   // Create an arbitrary Bayes Net
 | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   GaussianBayesNet gbn; | 
					
						
							|  |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       0, Vector2(1.0,2.0), (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(), | 
					
						
							| 
									
										
										
										
											2014-11-23 08:35:27 +08:00
										 |  |  |       3, (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(), | 
					
						
							|  |  |  |       4, (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished())); | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       1, Vector2(15.0,16.0), (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(), | 
					
						
							| 
									
										
										
										
											2014-11-23 08:35:27 +08:00
										 |  |  |       2, (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(), | 
					
						
							|  |  |  |       4, (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished())); | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       2, Vector2(29.0,30.0), (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(), | 
					
						
							| 
									
										
										
										
											2014-11-23 08:35:27 +08:00
										 |  |  |       3, (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished())); | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       3, Vector2(39.0,40.0), (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(), | 
					
						
							| 
									
										
										
										
											2014-11-23 08:35:27 +08:00
										 |  |  |       4, (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished())); | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       4, Vector2(49.0,50.0), (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished())); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |   // Compute steepest descent point
 | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |   VectorValues xu = gbn.optimizeGradientSearch(); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |   // Compute Newton's method point
 | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |   VectorValues xn = gbn.optimize(); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |   // The Newton's method point should be more "adventurous", i.e. larger, than the steepest descent point
 | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |   EXPECT(xu.vector().norm() < xn.vector().norm()); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |   // Compute blend
 | 
					
						
							|  |  |  |   double Delta = 1.5; | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   VectorValues xb = DoglegOptimizerImpl::ComputeBlend(Delta, xu, xn); | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |   DOUBLES_EQUAL(Delta, xb.vector().norm(), 1e-10); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* ************************************************************************* */ | 
					
						
							|  |  |  | TEST(DoglegOptimizer, ComputeDoglegPoint) { | 
					
						
							|  |  |  |   // Create an arbitrary Bayes Net
 | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   GaussianBayesNet gbn; | 
					
						
							|  |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       0, Vector2(1.0,2.0), (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(), | 
					
						
							| 
									
										
										
										
											2014-11-23 08:35:27 +08:00
										 |  |  |       3, (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(), | 
					
						
							|  |  |  |       4, (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished())); | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       1, Vector2(15.0,16.0), (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(), | 
					
						
							| 
									
										
										
										
											2014-11-23 08:35:27 +08:00
										 |  |  |       2, (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(), | 
					
						
							|  |  |  |       4, (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished())); | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       2, Vector2(29.0,30.0), (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(), | 
					
						
							| 
									
										
										
										
											2014-11-23 08:35:27 +08:00
										 |  |  |       3, (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished())); | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       3, Vector2(39.0,40.0), (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(), | 
					
						
							| 
									
										
										
										
											2014-11-23 08:35:27 +08:00
										 |  |  |       4, (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished())); | 
					
						
							| 
									
										
										
										
											2013-08-06 06:31:44 +08:00
										 |  |  |   gbn += GaussianConditional::shared_ptr(new GaussianConditional( | 
					
						
							| 
									
										
										
										
											2014-11-24 02:22:25 +08:00
										 |  |  |       4, Vector2(49.0,50.0), (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished())); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |   // Compute dogleg point for different deltas
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   double Delta1 = 0.5;  // Less than steepest descent
 | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |   VectorValues actual1 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta1, gbn.optimizeGradientSearch(), gbn.optimize()); | 
					
						
							|  |  |  |   DOUBLES_EQUAL(Delta1, actual1.vector().norm(), 1e-5); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |   double Delta2 = 1.5;  // Between steepest descent and Newton's method
 | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |   VectorValues expected2 = DoglegOptimizerImpl::ComputeBlend(Delta2, gbn.optimizeGradientSearch(), gbn.optimize()); | 
					
						
							|  |  |  |   VectorValues actual2 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta2, gbn.optimizeGradientSearch(), gbn.optimize()); | 
					
						
							|  |  |  |   DOUBLES_EQUAL(Delta2, actual2.vector().norm(), 1e-5); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  |   EXPECT(assert_equal(expected2, actual2)); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   double Delta3 = 5.0;  // Larger than Newton's method point
 | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |   VectorValues expected3 = gbn.optimize(); | 
					
						
							|  |  |  |   VectorValues actual3 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta3, gbn.optimizeGradientSearch(), gbn.optimize()); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  |   EXPECT(assert_equal(expected3, actual3)); | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | /* ************************************************************************* */ | 
					
						
							|  |  |  | TEST(DoglegOptimizer, Iterate) { | 
					
						
							|  |  |  |   // really non-linear factor graph
 | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |   NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |   // config far from minimum
 | 
					
						
							|  |  |  |   Point2 x0(3,0); | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |   Values config; | 
					
						
							|  |  |  |   config.insert(X(1), x0); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |   double Delta = 1.0; | 
					
						
							|  |  |  |   for(size_t it=0; it<10; ++it) { | 
					
						
							| 
									
										
										
										
											2023-01-09 13:53:26 +08:00
										 |  |  |     auto linearized = fg.linearize(config); | 
					
						
							|  |  |  |      | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  |     // Iterate assumes that linear error = nonlinear error at the linearization point, and this should be true
 | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |     double nonlinearError = fg.error(config); | 
					
						
							| 
									
										
										
										
											2023-01-09 13:53:26 +08:00
										 |  |  |     double linearError = linearized->error(config.zeroVectors()); | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  |     DOUBLES_EQUAL(nonlinearError, linearError, 1e-5); | 
					
						
							| 
									
										
										
										
											2023-01-09 13:53:26 +08:00
										 |  |  |      | 
					
						
							|  |  |  |     auto gbn = linearized->eliminateSequential(); | 
					
						
							|  |  |  |     VectorValues dx_u = gbn->optimizeGradientSearch(); | 
					
						
							|  |  |  |     VectorValues dx_n = gbn->optimize(); | 
					
						
							|  |  |  |     DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate( | 
					
						
							|  |  |  |         Delta, DoglegOptimizerImpl::SEARCH_EACH_ITERATION, dx_u, dx_n, *gbn, fg, | 
					
						
							|  |  |  |         config, fg.error(config)); | 
					
						
							| 
									
										
										
										
											2016-06-19 14:13:59 +08:00
										 |  |  |     Delta = result.delta; | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |     EXPECT(result.f_error < fg.error(config)); // Check that error decreases
 | 
					
						
							| 
									
										
										
										
											2023-01-09 13:53:26 +08:00
										 |  |  |      | 
					
						
							| 
									
										
										
										
											2013-08-12 02:17:32 +08:00
										 |  |  |     Values newConfig(config.retract(result.dx_d)); | 
					
						
							|  |  |  |     config = newConfig; | 
					
						
							|  |  |  |     DOUBLES_EQUAL(fg.error(config), result.f_error, 1e-5); // Check that error is correctly filled in
 | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  |   } | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2019-05-29 05:42:30 +08:00
										 |  |  | /* ************************************************************************* */ | 
					
						
							|  |  |  | TEST(DoglegOptimizer, Constraint) { | 
					
						
							|  |  |  |   // Create a pose-graph graph with a constraint on the first pose
 | 
					
						
							|  |  |  |   NonlinearFactorGraph graph; | 
					
						
							|  |  |  |   const Pose2 origin(0, 0, 0), pose2(2, 0, 0); | 
					
						
							|  |  |  |   graph.emplace_shared<NonlinearEquality<Pose2> >(1, origin); | 
					
						
							|  |  |  |   auto model = noiseModel::Diagonal::Sigmas(Vector3(0.2, 0.2, 0.1)); | 
					
						
							|  |  |  |   graph.emplace_shared<BetweenFactor<Pose2> >(1, 2, pose2, model); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   // Create feasible initial estimate
 | 
					
						
							|  |  |  |   Values initial; | 
					
						
							|  |  |  |   initial.insert(1, origin); // feasible !
 | 
					
						
							|  |  |  |   initial.insert(2, Pose2(2.3, 0.1, -0.2)); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   // Optimize the initial values using DoglegOptimizer
 | 
					
						
							|  |  |  |   DoglegParams params; | 
					
						
							|  |  |  |   params.setVerbosityDL("VERBOSITY"); | 
					
						
							|  |  |  |   DoglegOptimizer optimizer(graph, initial, params); | 
					
						
							|  |  |  |   Values result = optimizer.optimize(); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   // Check result
 | 
					
						
							|  |  |  |   EXPECT(assert_equal(pose2, result.at<Pose2>(2))); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   // Create infeasible initial estimate
 | 
					
						
							|  |  |  |   Values infeasible; | 
					
						
							|  |  |  |   infeasible.insert(1, Pose2(0.1, 0, 0)); // infeasible !
 | 
					
						
							|  |  |  |   infeasible.insert(2, Pose2(2.3, 0.1, -0.2)); | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   // Try optimizing with infeasible initial estimate
 | 
					
						
							|  |  |  |   DoglegOptimizer optimizer2(graph, infeasible, params); | 
					
						
							| 
									
										
										
										
											2019-06-12 01:40:29 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | #ifdef GTSAM_USE_TBB
 | 
					
						
							|  |  |  |   CHECK_EXCEPTION(optimizer2.optimize(), std::exception); | 
					
						
							|  |  |  | #else
 | 
					
						
							| 
									
										
										
										
											2019-05-31 04:43:34 +08:00
										 |  |  |   CHECK_EXCEPTION(optimizer2.optimize(), std::invalid_argument); | 
					
						
							| 
									
										
										
										
											2019-06-12 01:40:29 +08:00
										 |  |  | #endif
 | 
					
						
							| 
									
										
										
										
											2019-05-29 05:42:30 +08:00
										 |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2011-11-06 05:29:02 +08:00
										 |  |  | /* ************************************************************************* */ | 
					
						
							|  |  |  | int main() { TestResult tr; return TestRegistry::runAllTests(tr); } | 
					
						
							|  |  |  | /* ************************************************************************* */ |