diff --git a/gtsam/nonlinear/DoglegOptimizer.cpp b/gtsam/nonlinear/DoglegOptimizer.cpp index 4eeac0296..374ef0c6b 100644 --- a/gtsam/nonlinear/DoglegOptimizer.cpp +++ b/gtsam/nonlinear/DoglegOptimizer.cpp @@ -18,8 +18,8 @@ #include -#include -#include +#include +#include #include using namespace std; @@ -33,14 +33,8 @@ void DoglegOptimizer::iterate(void) { const Ordering& ordering = *params_.ordering; GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values, ordering); - // Check whether to use QR - bool useQR; - if(params_.factorization == DoglegParams::LDL) - useQR = false; - else if(params_.factorization == DoglegParams::QR) - useQR = true; - else - throw runtime_error("Optimization parameter is invalid: DoglegParams::factorization"); + // Get elimination method + GaussianFactorGraph::Eliminate eliminationMethod = params_.getEliminationFunction(); // Pull out parameters we'll use const bool dlVerbose = (params_.dlVerbosity > DoglegParams::SILENT); @@ -49,11 +43,12 @@ void DoglegOptimizer::iterate(void) { DoglegOptimizerImpl::IterationResult result; if(params_.elimination == DoglegParams::MULTIFRONTAL) { - GaussianBayesTree::shared_ptr bt = GaussianMultifrontalSolver(*linear, useQR).eliminate(); - result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, graph_, state_.values, ordering, state_.error, dlVerbose); + GaussianBayesTree bt; + bt.insert(GaussianJunctionTree(*linear).eliminate(eliminationMethod)); + result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bt, graph_, state_.values, ordering, state_.error, dlVerbose); } else if(params_.elimination == DoglegParams::SEQUENTIAL) { - GaussianBayesNet::shared_ptr bn = GaussianSequentialSolver(*linear, useQR).eliminate(); + GaussianBayesNet::shared_ptr bn = EliminationTree::Create(*linear)->eliminate(eliminationMethod); result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, graph_, state_.values, ordering, state_.error, dlVerbose); } else { diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.cpp b/gtsam/nonlinear/GaussNewtonOptimizer.cpp index 39a4d32d5..8e2b20859 100644 --- a/gtsam/nonlinear/GaussNewtonOptimizer.cpp +++ b/gtsam/nonlinear/GaussNewtonOptimizer.cpp @@ -16,9 +16,9 @@ * @created Feb 26, 2012 */ +#include +#include #include -#include -#include using namespace std; @@ -32,29 +32,23 @@ void GaussNewtonOptimizer::iterate() { // Linearize graph GaussianFactorGraph::shared_ptr linear = graph_.linearize(current.values, *params_.ordering); - // Check whether to use QR - bool useQR; - if(params_.factorization == GaussNewtonParams::LDL) - useQR = false; - else if(params_.factorization == GaussNewtonParams::QR) - useQR = true; - else - throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::factorization"); - // Optimize - VectorValues::shared_ptr delta; - if(params_.elimination == GaussNewtonParams::MULTIFRONTAL) - delta = GaussianMultifrontalSolver(*linear, useQR).optimize(); - else if(params_.elimination == GaussNewtonParams::SEQUENTIAL) - delta = GaussianSequentialSolver(*linear, useQR).optimize(); - else - throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::elimination"); + VectorValues delta; + { + GaussianFactorGraph::Eliminate eliminationMethod = params_.getEliminationFunction(); + if(params_.elimination == GaussNewtonParams::MULTIFRONTAL) + delta = GaussianJunctionTree(*linear).optimize(eliminationMethod); + else if(params_.elimination == GaussNewtonParams::SEQUENTIAL) + delta = gtsam::optimize(*EliminationTree::Create(*linear)->eliminate(eliminationMethod)); + else + throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::elimination"); + } // Maybe show output - if(params_.verbosity >= NonlinearOptimizerParams::DELTA) delta->print("delta"); + if(params_.verbosity >= NonlinearOptimizerParams::DELTA) delta.print("delta"); // Create new state with new values and new error - state_.values = current.values.retract(*delta, *params_.ordering); + state_.values = current.values.retract(delta, *params_.ordering); state_.error = graph_.error(state_.values); ++ state_.iterations; } diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp index 50d87a0d3..2b9865ada 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp @@ -19,8 +19,8 @@ #include #include // For NegativeMatrixException -#include -#include +#include +#include using namespace std; @@ -32,14 +32,8 @@ void LevenbergMarquardtOptimizer::iterate() { // Linearize graph GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values, *params_.ordering); - // Check whether to use QR - bool useQR; - if(params_.factorization == LevenbergMarquardtParams::LDL) - useQR = false; - else if(params_.factorization == LevenbergMarquardtParams::QR) - useQR = true; - else - throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::factorization"); + // Get elimination method + GaussianFactorGraph::Eliminate eliminationMethod = params_.getEliminationFunction(); // Pull out parameters we'll use const NonlinearOptimizerParams::Verbosity nloVerbosity = params_.verbosity; @@ -72,19 +66,19 @@ void LevenbergMarquardtOptimizer::iterate() { try { // Optimize - VectorValues::shared_ptr delta; - if(params_.elimination == LevenbergMarquardtParams::MULTIFRONTAL) - delta = GaussianMultifrontalSolver(dampedSystem, useQR).optimize(); - else if(params_.elimination == LevenbergMarquardtParams::SEQUENTIAL) - delta = GaussianSequentialSolver(dampedSystem, useQR).optimize(); + VectorValues delta; + if(params_.elimination == SuccessiveLinearizationParams::MULTIFRONTAL) + delta = GaussianJunctionTree(*linear).optimize(eliminationMethod); + else if(params_.elimination == SuccessiveLinearizationParams::SEQUENTIAL) + delta = gtsam::optimize(*EliminationTree::Create(*linear)->eliminate(eliminationMethod)); else throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination"); - if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "linear delta norm = " << delta->vector().norm() << endl; - if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta->print("delta"); + if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "linear delta norm = " << delta.vector().norm() << endl; + if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta.print("delta"); // update values - Values newValues = state_.values.retract(*delta, *params_.ordering); + Values newValues = state_.values.retract(delta, *params_.ordering); // create new optimization state with more adventurous lambda double error = graph_.error(newValues); diff --git a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h index e95502b28..8f02059fc 100644 --- a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h +++ b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h @@ -32,6 +32,7 @@ public: /** See SuccessiveLinearizationParams::factorization */ enum Factorization { + CHOLESKY, LDL, QR, }; @@ -54,7 +55,9 @@ public: else std::cout << " elimination method: (invalid)\n"; - if(factorization == LDL) + if(factorization == CHOLESKY) + std::cout << " factorization method: CHOLESKY\n"; + else if(factorization == LDL) std::cout << " factorization method: LDL\n"; else if(factorization == QR) std::cout << " factorization method: QR\n"; @@ -68,6 +71,17 @@ public: std::cout.flush(); } + + GaussianFactorGraph::Eliminate getEliminationFunction() const { + if(factorization == SuccessiveLinearizationParams::CHOLESKY) + return EliminatePreferCholesky; + else if(factorization == SuccessiveLinearizationParams::LDL) + return EliminatePreferLDL; + else if(factorization == SuccessiveLinearizationParams::QR) + return EliminateQR; + else + throw runtime_error("Nonlinear optimization parameter \"factorization\" is invalid"); + } }; } /* namespace gtsam */ diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index b8d80d645..7173253b8 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -153,6 +153,8 @@ TEST( NonlinearOptimizer, optimization_method ) paramsQR.factorization = LevenbergMarquardtParams::QR; LevenbergMarquardtParams paramsLDL; paramsLDL.factorization = LevenbergMarquardtParams::LDL; + LevenbergMarquardtParams paramsChol; + paramsLDL.factorization = LevenbergMarquardtParams::CHOLESKY; example::Graph fg = example::createReallyNonlinearFactorGraph(); @@ -165,6 +167,9 @@ TEST( NonlinearOptimizer, optimization_method ) Values actualMFLDL = LevenbergMarquardtOptimizer(fg, c0, paramsLDL).optimize(); DOUBLES_EQUAL(0,fg.error(actualMFLDL),tol); + + Values actualMFChol = LevenbergMarquardtOptimizer(fg, c0, paramsChol).optimize(); + DOUBLES_EQUAL(0,fg.error(actualMFChol),tol); } /* ************************************************************************* */