Added Cholesky/LDL switch in NonlinearOptimizer, preparing to remove

LDL, remove dependency of NonlinearOptimizer on linear solvers.
release/4.3a0
Richard Roberts 2012-05-15 05:08:57 +00:00
parent ff3edc6823
commit 9e0996296a
5 changed files with 54 additions and 52 deletions

View File

@ -18,8 +18,8 @@
#include <gtsam/nonlinear/DoglegOptimizer.h>
#include <gtsam/linear/GaussianMultifrontalSolver.h>
#include <gtsam/linear/GaussianSequentialSolver.h>
#include <gtsam/inference/EliminationTree.h>
#include <gtsam/linear/GaussianJunctionTree.h>
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
using namespace std;
@ -33,14 +33,8 @@ void DoglegOptimizer::iterate(void) {
const Ordering& ordering = *params_.ordering;
GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values, ordering);
// Check whether to use QR
bool useQR;
if(params_.factorization == DoglegParams::LDL)
useQR = false;
else if(params_.factorization == DoglegParams::QR)
useQR = true;
else
throw runtime_error("Optimization parameter is invalid: DoglegParams::factorization");
// Get elimination method
GaussianFactorGraph::Eliminate eliminationMethod = params_.getEliminationFunction();
// Pull out parameters we'll use
const bool dlVerbose = (params_.dlVerbosity > DoglegParams::SILENT);
@ -49,11 +43,12 @@ void DoglegOptimizer::iterate(void) {
DoglegOptimizerImpl::IterationResult result;
if(params_.elimination == DoglegParams::MULTIFRONTAL) {
GaussianBayesTree::shared_ptr bt = GaussianMultifrontalSolver(*linear, useQR).eliminate();
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, graph_, state_.values, ordering, state_.error, dlVerbose);
GaussianBayesTree bt;
bt.insert(GaussianJunctionTree(*linear).eliminate(eliminationMethod));
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bt, graph_, state_.values, ordering, state_.error, dlVerbose);
} else if(params_.elimination == DoglegParams::SEQUENTIAL) {
GaussianBayesNet::shared_ptr bn = GaussianSequentialSolver(*linear, useQR).eliminate();
GaussianBayesNet::shared_ptr bn = EliminationTree<GaussianFactor>::Create(*linear)->eliminate(eliminationMethod);
result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, graph_, state_.values, ordering, state_.error, dlVerbose);
} else {

View File

@ -16,9 +16,9 @@
* @created Feb 26, 2012
*/
#include <gtsam/inference/EliminationTree.h>
#include <gtsam/linear/GaussianJunctionTree.h>
#include <gtsam/nonlinear/GaussNewtonOptimizer.h>
#include <gtsam/linear/GaussianMultifrontalSolver.h>
#include <gtsam/linear/GaussianSequentialSolver.h>
using namespace std;
@ -32,29 +32,23 @@ void GaussNewtonOptimizer::iterate() {
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_.linearize(current.values, *params_.ordering);
// Check whether to use QR
bool useQR;
if(params_.factorization == GaussNewtonParams::LDL)
useQR = false;
else if(params_.factorization == GaussNewtonParams::QR)
useQR = true;
else
throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::factorization");
// Optimize
VectorValues::shared_ptr delta;
if(params_.elimination == GaussNewtonParams::MULTIFRONTAL)
delta = GaussianMultifrontalSolver(*linear, useQR).optimize();
else if(params_.elimination == GaussNewtonParams::SEQUENTIAL)
delta = GaussianSequentialSolver(*linear, useQR).optimize();
else
throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::elimination");
VectorValues delta;
{
GaussianFactorGraph::Eliminate eliminationMethod = params_.getEliminationFunction();
if(params_.elimination == GaussNewtonParams::MULTIFRONTAL)
delta = GaussianJunctionTree(*linear).optimize(eliminationMethod);
else if(params_.elimination == GaussNewtonParams::SEQUENTIAL)
delta = gtsam::optimize(*EliminationTree<GaussianFactor>::Create(*linear)->eliminate(eliminationMethod));
else
throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::elimination");
}
// Maybe show output
if(params_.verbosity >= NonlinearOptimizerParams::DELTA) delta->print("delta");
if(params_.verbosity >= NonlinearOptimizerParams::DELTA) delta.print("delta");
// Create new state with new values and new error
state_.values = current.values.retract(*delta, *params_.ordering);
state_.values = current.values.retract(delta, *params_.ordering);
state_.error = graph_.error(state_.values);
++ state_.iterations;
}

View File

@ -19,8 +19,8 @@
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/base/cholesky.h> // For NegativeMatrixException
#include <gtsam/linear/GaussianMultifrontalSolver.h>
#include <gtsam/linear/GaussianSequentialSolver.h>
#include <gtsam/inference/EliminationTree.h>
#include <gtsam/linear/GaussianJunctionTree.h>
using namespace std;
@ -32,14 +32,8 @@ void LevenbergMarquardtOptimizer::iterate() {
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values, *params_.ordering);
// Check whether to use QR
bool useQR;
if(params_.factorization == LevenbergMarquardtParams::LDL)
useQR = false;
else if(params_.factorization == LevenbergMarquardtParams::QR)
useQR = true;
else
throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::factorization");
// Get elimination method
GaussianFactorGraph::Eliminate eliminationMethod = params_.getEliminationFunction();
// Pull out parameters we'll use
const NonlinearOptimizerParams::Verbosity nloVerbosity = params_.verbosity;
@ -72,19 +66,19 @@ void LevenbergMarquardtOptimizer::iterate() {
try {
// Optimize
VectorValues::shared_ptr delta;
if(params_.elimination == LevenbergMarquardtParams::MULTIFRONTAL)
delta = GaussianMultifrontalSolver(dampedSystem, useQR).optimize();
else if(params_.elimination == LevenbergMarquardtParams::SEQUENTIAL)
delta = GaussianSequentialSolver(dampedSystem, useQR).optimize();
VectorValues delta;
if(params_.elimination == SuccessiveLinearizationParams::MULTIFRONTAL)
delta = GaussianJunctionTree(*linear).optimize(eliminationMethod);
else if(params_.elimination == SuccessiveLinearizationParams::SEQUENTIAL)
delta = gtsam::optimize(*EliminationTree<GaussianFactor>::Create(*linear)->eliminate(eliminationMethod));
else
throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination");
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "linear delta norm = " << delta->vector().norm() << endl;
if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta->print("delta");
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "linear delta norm = " << delta.vector().norm() << endl;
if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta.print("delta");
// update values
Values newValues = state_.values.retract(*delta, *params_.ordering);
Values newValues = state_.values.retract(delta, *params_.ordering);
// create new optimization state with more adventurous lambda
double error = graph_.error(newValues);

View File

@ -32,6 +32,7 @@ public:
/** See SuccessiveLinearizationParams::factorization */
enum Factorization {
CHOLESKY,
LDL,
QR,
};
@ -54,7 +55,9 @@ public:
else
std::cout << " elimination method: (invalid)\n";
if(factorization == LDL)
if(factorization == CHOLESKY)
std::cout << " factorization method: CHOLESKY\n";
else if(factorization == LDL)
std::cout << " factorization method: LDL\n";
else if(factorization == QR)
std::cout << " factorization method: QR\n";
@ -68,6 +71,17 @@ public:
std::cout.flush();
}
GaussianFactorGraph::Eliminate getEliminationFunction() const {
if(factorization == SuccessiveLinearizationParams::CHOLESKY)
return EliminatePreferCholesky;
else if(factorization == SuccessiveLinearizationParams::LDL)
return EliminatePreferLDL;
else if(factorization == SuccessiveLinearizationParams::QR)
return EliminateQR;
else
throw runtime_error("Nonlinear optimization parameter \"factorization\" is invalid");
}
};
} /* namespace gtsam */

View File

@ -153,6 +153,8 @@ TEST( NonlinearOptimizer, optimization_method )
paramsQR.factorization = LevenbergMarquardtParams::QR;
LevenbergMarquardtParams paramsLDL;
paramsLDL.factorization = LevenbergMarquardtParams::LDL;
LevenbergMarquardtParams paramsChol;
paramsLDL.factorization = LevenbergMarquardtParams::CHOLESKY;
example::Graph fg = example::createReallyNonlinearFactorGraph();
@ -165,6 +167,9 @@ TEST( NonlinearOptimizer, optimization_method )
Values actualMFLDL = LevenbergMarquardtOptimizer(fg, c0, paramsLDL).optimize();
DOUBLES_EQUAL(0,fg.error(actualMFLDL),tol);
Values actualMFChol = LevenbergMarquardtOptimizer(fg, c0, paramsChol).optimize();
DOUBLES_EQUAL(0,fg.error(actualMFChol),tol);
}
/* ************************************************************************* */