diff --git a/examples/PlanarSLAMSelfContained_advanced.cpp b/examples/PlanarSLAMSelfContained_advanced.cpp index bd5c5295a..36f9ad8ca 100644 --- a/examples/PlanarSLAMSelfContained_advanced.cpp +++ b/examples/PlanarSLAMSelfContained_advanced.cpp @@ -113,7 +113,7 @@ int main(int argc, char** argv) { // first using sequential elimination LevenbergMarquardtParams lmParams; - lmParams.elimination = LevenbergMarquardtParams::SEQUENTIAL; + lmParams.linearSolverType = LevenbergMarquardtParams::SEQUENTIAL_CHOLESKY; Values resultSequential = LevenbergMarquardtOptimizer(graph, initial, lmParams).optimize(); resultSequential.print("final result (solved with a sequential solver)"); diff --git a/gtsam/nonlinear/DoglegOptimizer.cpp b/gtsam/nonlinear/DoglegOptimizer.cpp index 84090d666..91b3c92fd 100644 --- a/gtsam/nonlinear/DoglegOptimizer.cpp +++ b/gtsam/nonlinear/DoglegOptimizer.cpp @@ -33,25 +33,25 @@ void DoglegOptimizer::iterate(void) { const Ordering& ordering = *params_.ordering; GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values, ordering); - // Get elimination method - GaussianFactorGraph::Eliminate eliminationMethod = params_.getEliminationFunction(); - // Pull out parameters we'll use const bool dlVerbose = (params_.verbosityDL > DoglegParams::SILENT); // Do Dogleg iteration with either Multifrontal or Sequential elimination DoglegOptimizerImpl::IterationResult result; - if(params_.elimination == DoglegParams::MULTIFRONTAL) { + if ( params_.isMultifrontal() ) { GaussianBayesTree bt; - bt.insert(GaussianJunctionTree(*linear).eliminate(eliminationMethod)); + bt.insert(GaussianJunctionTree(*linear).eliminate(params_.getEliminationFunction())); result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bt, graph_, state_.values, ordering, state_.error, dlVerbose); - - } else if(params_.elimination == DoglegParams::SEQUENTIAL) { - GaussianBayesNet::shared_ptr bn = EliminationTree::Create(*linear)->eliminate(eliminationMethod); + } + else if ( params_.isSequential() ) { + GaussianBayesNet::shared_ptr bn = EliminationTree::Create(*linear)->eliminate(params_.getEliminationFunction()); result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, graph_, state_.values, ordering, state_.error, dlVerbose); - - } else { + } + else if ( params_.isSPCG() ) { + throw runtime_error("todo: "); + } + else { throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination"); } diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.cpp b/gtsam/nonlinear/GaussNewtonOptimizer.cpp index 8e2b20859..8d09b3edc 100644 --- a/gtsam/nonlinear/GaussNewtonOptimizer.cpp +++ b/gtsam/nonlinear/GaussNewtonOptimizer.cpp @@ -35,13 +35,18 @@ void GaussNewtonOptimizer::iterate() { // Optimize VectorValues delta; { - GaussianFactorGraph::Eliminate eliminationMethod = params_.getEliminationFunction(); - if(params_.elimination == GaussNewtonParams::MULTIFRONTAL) - delta = GaussianJunctionTree(*linear).optimize(eliminationMethod); - else if(params_.elimination == GaussNewtonParams::SEQUENTIAL) - delta = gtsam::optimize(*EliminationTree::Create(*linear)->eliminate(eliminationMethod)); - else + if ( params_.isMultifrontal() ) { + delta = GaussianJunctionTree(*linear).optimize(params_.getEliminationFunction()); + } + else if ( params_.isSequential() ) { + delta = gtsam::optimize(*EliminationTree::Create(*linear)->eliminate(params_.getEliminationFunction())); + } + else if ( params_.isSPCG() ) { + throw runtime_error("todo: "); + } + else { throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::elimination"); + } } // Maybe show output diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp index 1b19cfe75..3ad26cb3a 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp @@ -32,9 +32,6 @@ void LevenbergMarquardtOptimizer::iterate() { // Linearize graph GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values, *params_.ordering); - // Get elimination method - GaussianFactorGraph::Eliminate eliminationMethod = params_.getEliminationFunction(); - // Pull out parameters we'll use const NonlinearOptimizerParams::Verbosity nloVerbosity = params_.verbosity; const LevenbergMarquardtParams::VerbosityLM lmVerbosity = params_.verbosityLM; @@ -67,12 +64,18 @@ void LevenbergMarquardtOptimizer::iterate() { // Optimize VectorValues delta; - if(params_.elimination == SuccessiveLinearizationParams::MULTIFRONTAL) - delta = GaussianJunctionTree(dampedSystem).optimize(eliminationMethod); - else if(params_.elimination == SuccessiveLinearizationParams::SEQUENTIAL) - delta = gtsam::optimize(*EliminationTree::Create(dampedSystem)->eliminate(eliminationMethod)); - else + if ( params_.isMultifrontal() ) { + delta = GaussianJunctionTree(dampedSystem).optimize(params_.getEliminationFunction()); + } + else if ( params_.isSequential() ) { + delta = gtsam::optimize(*EliminationTree::Create(dampedSystem)->eliminate(params_.getEliminationFunction())); + } + else if ( params_.isSPCG() ) { + throw runtime_error("todo: "); + } + else { throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination"); + } if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "linear delta norm = " << delta.vector().norm() << endl; if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta.print("delta"); diff --git a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h index f17e01d49..2b3bfd3b6 100644 --- a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h +++ b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h @@ -24,42 +24,44 @@ namespace gtsam { class SuccessiveLinearizationParams : public NonlinearOptimizerParams { public: - /** See SuccessiveLinearizationParams::elimination */ - enum Elimination { - MULTIFRONTAL, - SEQUENTIAL + /** See SuccessiveLinearizationParams::linearSolverType */ + enum LinearSolverType { + MULTIFRONTAL_CHOLESKY, + MULTIFRONTAL_QR, + SEQUENTIAL_CHOLESKY, + SEQUENTIAL_QR, + SPCG }; - /** See SuccessiveLinearizationParams::factorization */ - enum Factorization { - CHOLESKY, - QR, - }; - - Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL) - Factorization factorization; ///< The numerical factorization (default: Cholesky) + LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer boost::optional ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty) - SuccessiveLinearizationParams() : - elimination(MULTIFRONTAL), factorization(CHOLESKY) {} + SuccessiveLinearizationParams() : linearSolverType(MULTIFRONTAL_CHOLESKY) {} virtual ~SuccessiveLinearizationParams() {} virtual void print(const std::string& str = "") const { NonlinearOptimizerParams::print(str); - if(elimination == MULTIFRONTAL) - std::cout << " elimination method: MULTIFRONTAL\n"; - else if(elimination == SEQUENTIAL) - std::cout << " elimination method: SEQUENTIAL\n"; - else - std::cout << " elimination method: (invalid)\n"; - - if(factorization == CHOLESKY) - std::cout << " factorization method: CHOLESKY\n"; - else if(factorization == QR) - std::cout << " factorization method: QR\n"; - else - std::cout << " factorization method: (invalid)\n"; + switch ( linearSolverType ) { + case MULTIFRONTAL_CHOLESKY: + std::cout << " linear solver type: MULTIFRONTAL CHOLESKY\n"; + break; + case MULTIFRONTAL_QR: + std::cout << " linear solver type: MULTIFRONTAL QR\n"; + break; + case SEQUENTIAL_CHOLESKY: + std::cout << " linear solver type: SEQUENTIAL CHOLESKY\n"; + break; + case SEQUENTIAL_QR: + std::cout << " linear solver type: SEQUENTIAL QR\n"; + break; + case SPCG: + std::cout << " linear solver type: SPCG\n"; + break; + default: + std::cout << " linear solver type: (invalid)\n"; + break; + } if(ordering) std::cout << " ordering: custom\n"; @@ -69,13 +71,32 @@ public: std::cout.flush(); } - GaussianFactorGraph::Eliminate getEliminationFunction() const { - if(factorization == SuccessiveLinearizationParams::CHOLESKY) + inline bool isMultifrontal() const { + return (linearSolverType == MULTIFRONTAL_CHOLESKY) || (linearSolverType == MULTIFRONTAL_QR); + } + + inline bool isSequential() const { + return (linearSolverType == SEQUENTIAL_CHOLESKY) || (linearSolverType == SEQUENTIAL_QR); + } + + inline bool isSPCG() const { + return (linearSolverType == SPCG); + } + + GaussianFactorGraph::Eliminate getEliminationFunction() { + switch (linearSolverType) { + case MULTIFRONTAL_CHOLESKY: + case MULTIFRONTAL_QR: return EliminatePreferCholesky; - else if(factorization == SuccessiveLinearizationParams::QR) + + case SEQUENTIAL_CHOLESKY: + case SEQUENTIAL_QR: return EliminateQR; - else + + default: throw runtime_error("Nonlinear optimization parameter \"factorization\" is invalid"); + break; + } } }; diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index a926b9e1b..eb6357fbd 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -149,9 +149,9 @@ TEST( NonlinearOptimizer, SimpleDLOptimizer ) TEST( NonlinearOptimizer, optimization_method ) { LevenbergMarquardtParams paramsQR; - paramsQR.factorization = LevenbergMarquardtParams::QR; + paramsQR.linearSolverType = LevenbergMarquardtParams::MULTIFRONTAL_QR; LevenbergMarquardtParams paramsChol; - paramsChol.factorization = LevenbergMarquardtParams::CHOLESKY; + paramsChol.linearSolverType = LevenbergMarquardtParams::MULTIFRONTAL_CHOLESKY; example::Graph fg = example::createReallyNonlinearFactorGraph();