Continuing code simplification / restructuring

release/4.3a0
Richard Roberts 2012-04-05 02:45:47 +00:00
parent 48e277a095
commit c83a399bba
10 changed files with 98 additions and 114 deletions

View File

@ -27,11 +27,13 @@ using namespace std;
namespace gtsam {
/* ************************************************************************* */
NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const {
NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimizer::SharedState& _current) const {
const DoglegState& current = dynamic_cast<const DoglegState&>(*_current);
// Linearize graph
const Ordering& ordering = *ordering(current->values);
GaussianFactorGraph::shared_ptr linear = graph_->linearize(current->values, ordering);
const Ordering& ordering = this->ordering(current.values);
GaussianFactorGraph::shared_ptr linear = graph_->linearize(current.values, ordering);
// Check whether to use QR
bool useQR;
@ -50,24 +52,24 @@ NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimize
if(params_->elimination == DoglegParams::MULTIFRONTAL) {
GaussianBayesTree::shared_ptr bt = GaussianMultifrontalSolver(*linear, useQR).eliminate();
result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, *values(), ordering, error(), dlVerbose);
result = DoglegOptimizerImpl::Iterate(current.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, current.values, ordering, current.error, dlVerbose);
} else if(params_->elimination == DoglegParams::SEQUENTIAL) {
GaussianBayesNet::shared_ptr bn = GaussianSequentialSolver(*linear, useQR).eliminate();
result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, *values(), ordering, error(), dlVerbose);
result = DoglegOptimizerImpl::Iterate(current.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, current.values, ordering, current.error, dlVerbose);
} else {
throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination");
}
// Maybe show output
if(params_->verbosity >= NonlinearOptimizerParams::DELTA) result.dx_d.print("delta");
// Create new state with new values and new error
DoglegOptimizer::SharedState newState = boost::make_shared<DoglegState>();
// Update values
newState->values = current->values.retract(result.dx_d, ordering);
SharedState newState = boost::make_shared<DoglegState>();
newState->values = current.values.retract(result.dx_d, ordering);
newState->error = result.f_error;
newState->iterations = current->iterations + 1;
newState->iterations = current.iterations + 1;
newState->Delta = result.Delta;
return newState;
@ -76,7 +78,7 @@ NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimize
/* ************************************************************************* */
NonlinearOptimizer::SharedState DoglegOptimizer::initialState(const Values& initialValues) const {
SharedState initial = boost::make_shared<DoglegState>();
defaultInitialState(initialValues);
defaultInitialState(initialValues, *initial);
initial->Delta = params_->deltaInitial;
return initial;
}

View File

@ -67,7 +67,7 @@ class DoglegOptimizer : public SuccessiveLinearizationOptimizer {
public:
typedef boost::shared_ptr<const DoglegParams> SharedParams;
typedef boost::shared_ptr<DoglegParams> SharedParams;
typedef boost::shared_ptr<DoglegState> SharedState;
typedef boost::shared_ptr<DoglegOptimizer> shared_ptr;
@ -83,8 +83,7 @@ public:
* @param params The optimization parameters
*/
DoglegOptimizer(const NonlinearFactorGraph& graph,
const DoglegParams& params = DoglegParams(),
const Ordering& ordering = Ordering()) :
const DoglegParams& params = DoglegParams()) :
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new DoglegParams(params)) {}
@ -98,7 +97,8 @@ public:
*/
DoglegOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) :
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new DoglegParams()) {}
params_(new DoglegParams()) {
params_->ordering = ordering; }
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters.
@ -107,8 +107,7 @@ public:
* @param params The optimization parameters
*/
DoglegOptimizer(const SharedGraph& graph,
const DoglegParams& params = DoglegParams(),
const SharedOrdering& ordering = SharedOrdering()) :
const DoglegParams& params = DoglegParams()) :
SuccessiveLinearizationOptimizer(graph),
params_(new DoglegParams(params)) {}

View File

@ -25,25 +25,28 @@ using namespace std;
namespace gtsam {
/* ************************************************************************* */
NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
NonlinearOptimizer::SharedState GaussNewtonOptimizer::iterate(const NonlinearOptimizer::SharedState& _current) const {
const GaussNewtonState& current = dynamic_cast<const GaussNewtonState&>(*_current);
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
const Ordering& ordering = this->ordering(current.values);
GaussianFactorGraph::shared_ptr linear = graph_->linearize(current.values, ordering);
// Check whether to use QR
bool useQR;
if(gnParams_->factorization == GaussNewtonParams::LDL)
if(params_->factorization == GaussNewtonParams::LDL)
useQR = false;
else if(gnParams_->factorization == GaussNewtonParams::QR)
else if(params_->factorization == GaussNewtonParams::QR)
useQR = true;
else
throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::factorization");
// Optimize
VectorValues::shared_ptr delta;
if(gnParams_->elimination == GaussNewtonParams::MULTIFRONTAL)
if(params_->elimination == GaussNewtonParams::MULTIFRONTAL)
delta = GaussianMultifrontalSolver(*linear, useQR).optimize();
else if(gnParams_->elimination == GaussNewtonParams::SEQUENTIAL)
else if(params_->elimination == GaussNewtonParams::SEQUENTIAL)
delta = GaussianSequentialSolver(*linear, useQR).optimize();
else
throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::elimination");
@ -51,21 +54,19 @@ NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const {
// Maybe show output
if(params_->verbosity >= NonlinearOptimizerParams::DELTA) delta->print("delta");
// Update values
SharedValues newValues(new Values(values_->retract(*delta, *ordering_)));
double newError = graph_->error(*newValues);
// Create new state with new values and new error
SharedState newState = boost::make_shared<GaussNewtonState>();
newState->values = current.values.retract(*delta, ordering);
newState->error = graph_->error(newState->values);
newState->iterations = current.iterations + 1;
// Create new optimizer with new values and new error
NonlinearOptimizer::auto_ptr newOptimizer(new GaussNewtonOptimizer(
*this, newValues, newError));
return newOptimizer;
return newState;
}
/* ************************************************************************* */
NonlinearOptimizer::SharedState GaussNewtonOptimizer::initialState(const Values& initialValues) const {
SharedState initial = boost::make_shared<GaussNewtonState>();
defaultInitialState(*initial);
defaultInitialState(initialValues, *initial);
return initial;
}

View File

@ -39,7 +39,7 @@ class GaussNewtonOptimizer : public SuccessiveLinearizationOptimizer {
public:
typedef boost::shared_ptr<const GaussNewtonParams> SharedParams;
typedef boost::shared_ptr<GaussNewtonParams> SharedParams;
/// @name Standard interface
/// @{
@ -53,8 +53,7 @@ public:
* @param params The optimization parameters
*/
GaussNewtonOptimizer(const NonlinearFactorGraph& graph,
const GaussNewtonParams& params = GaussNewtonParams(),
const Ordering& ordering = Ordering()) :
const GaussNewtonParams& params = GaussNewtonParams()) :
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new GaussNewtonParams(params)) {}
@ -68,7 +67,8 @@ public:
*/
GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) :
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new GaussNewtonParams()) {}
params_(new GaussNewtonParams()) {
params_->ordering = ordering; }
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters.
@ -77,8 +77,7 @@ public:
* @param params The optimization parameters
*/
GaussNewtonOptimizer(const SharedGraph& graph,
const GaussNewtonParams& params = GaussNewtonParams(),
const SharedOrdering& ordering = SharedOrdering()) :
const GaussNewtonParams& params = GaussNewtonParams()) :
SuccessiveLinearizationOptimizer(graph),
params_(new GaussNewtonParams(params)) {}

View File

@ -27,29 +27,36 @@ using namespace std;
namespace gtsam {
/* ************************************************************************* */
NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const {
NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const NonlinearOptimizer::SharedState& _current) const {
const LevenbergMarquardtState& current = dynamic_cast<const LevenbergMarquardtState&>(*_current);
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
const Ordering& ordering = this->ordering(current.values);
GaussianFactorGraph::shared_ptr linear = graph_->linearize(current.values, ordering);
// Check whether to use QR
bool useQR;
if(lmParams_->factorization == LevenbergMarquardtParams::LDL)
if(params_->factorization == LevenbergMarquardtParams::LDL)
useQR = false;
else if(lmParams_->factorization == LevenbergMarquardtParams::QR)
else if(params_->factorization == LevenbergMarquardtParams::QR)
useQR = true;
else
throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::factorization");
// Pull out parameters we'll use
const NonlinearOptimizerParams::Verbosity nloVerbosity = params_->verbosity;
const LevenbergMarquardtParams::LMVerbosity lmVerbosity = lmParams_->lmVerbosity;
const double lambdaFactor = lmParams_->lambdaFactor;
const LevenbergMarquardtParams::LMVerbosity lmVerbosity = params_->lmVerbosity;
const double lambdaFactor = params_->lambdaFactor;
// Variables to update during try_lambda loop
double lambda = lambda_;
double next_error = error();
SharedValues next_values = values();
double lambda = current.lambda;
double next_error = current.error;
Values next_values = current.values;
// Compute dimensions if we haven't done it yet
if(!dimensions_)
dimensions_.reset(new vector<size_t>(current.values.dims(ordering)));
// Keep increasing lambda until we make make progress
while(true) {
@ -79,9 +86,9 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli
// Optimize
VectorValues::shared_ptr delta;
if(lmParams_->elimination == LevenbergMarquardtParams::MULTIFRONTAL)
if(params_->elimination == LevenbergMarquardtParams::MULTIFRONTAL)
delta = GaussianMultifrontalSolver(dampedSystem, useQR).optimize();
else if(lmParams_->elimination == LevenbergMarquardtParams::SEQUENTIAL)
else if(params_->elimination == LevenbergMarquardtParams::SEQUENTIAL)
delta = GaussianSequentialSolver(dampedSystem, useQR).optimize();
else
throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination");
@ -90,15 +97,15 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli
if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta->print("delta");
// update values
SharedValues newValues(new Values(values_->retract(*delta, *ordering_)));
Values newValues = current.values.retract(*delta, ordering);
// create new optimization state with more adventurous lambda
double error = graph_->error(*newValues);
double error = graph_->error(newValues);
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "next error = " << error << endl;
if( error <= error_ ) {
next_values = newValues;
if(error <= current.error) {
next_values.swap(newValues);
next_error = error;
lambda /= lambdaFactor;
break;
@ -106,7 +113,7 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli
// Either we're not cautious, or the same lambda was worse than the current error.
// The more adventurous lambda was worse too, so make lambda more conservative
// and keep the same values.
if(lambda >= lmParams_->lambdaUpperBound) {
if(lambda >= params_->lambdaUpperBound) {
if(nloVerbosity >= NonlinearOptimizerParams::ERROR)
cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl;
break;
@ -120,7 +127,7 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli
// Either we're not cautious, or the same lambda was worse than the current error.
// The more adventurous lambda was worse too, so make lambda more conservative
// and keep the same values.
if(lambda >= lmParams_->lambdaUpperBound) {
if(lambda >= params_->lambdaUpperBound) {
if(nloVerbosity >= NonlinearOptimizerParams::ERROR)
cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl;
break;
@ -136,8 +143,8 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli
LevenbergMarquardtOptimizer::SharedState newState = boost::make_shared<LevenbergMarquardtState>();
newState->values = next_values;
newState->error = next_error;
newState->iterations = current->iterations + 1;
newState->Delta = lambda;
newState->iterations = current.iterations + 1;
newState->lambda = lambda;
return newState;
}
@ -145,7 +152,7 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli
/* ************************************************************************* */
NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::initialState(const Values& initialValues) const {
SharedState initial = boost::make_shared<LevenbergMarquardtState>();
defaultInitialState(*initial);
defaultInitialState(initialValues, *initial);
initial->lambda = params_->lambdaInitial;
return initial;
}

View File

@ -76,7 +76,7 @@ class LevenbergMarquardtOptimizer : public SuccessiveLinearizationOptimizer {
public:
typedef boost::shared_ptr<const LevenbergMarquardtParams> SharedParams;
typedef boost::shared_ptr<LevenbergMarquardtParams> SharedParams;
typedef boost::shared_ptr<LevenbergMarquardtState> SharedState;
typedef boost::shared_ptr<LevenbergMarquardtOptimizer> shared_ptr;
@ -92,8 +92,7 @@ public:
* @param params The optimization parameters
*/
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
const Ordering& ordering = Ordering()) :
const LevenbergMarquardtParams& params = LevenbergMarquardtParams()) :
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new LevenbergMarquardtParams(params)) {}
@ -105,10 +104,10 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
const Ordering& ordering) :
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) :
SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new LevenbergMarquardtParams()) {}
params_(new LevenbergMarquardtParams()) {
params_->ordering = ordering; }
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters.
@ -117,8 +116,7 @@ public:
* @param params The optimization parameters
*/
LevenbergMarquardtOptimizer(const SharedGraph& graph,
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
const SharedOrdering& ordering = SharedOrdering()) :
const LevenbergMarquardtParams& params = LevenbergMarquardtParams()) :
SuccessiveLinearizationOptimizer(graph),
params_(new LevenbergMarquardtParams(params)) {}
@ -151,7 +149,7 @@ protected:
typedef boost::shared_ptr<const std::vector<size_t> > SharedDimensions;
SharedParams params_;
const SharedDimensions dimensions_;
mutable SharedDimensions dimensions_; // Mutable because we compute it when needed and cache it
};
}

View File

@ -24,25 +24,11 @@ using namespace std;
namespace gtsam {
/* ************************************************************************* */
NonlinearOptimizer::shared_ptr NonlinearOptimizer::update(const NonlinearOptimizer::SharedGraph& newGraph) const {
shared_ptr result(this->clone());
result->graph_ = newGraph;
return result;
}
/* ************************************************************************* */
NonlinearOptimizer::shared_ptr NonlinearOptimizer::update(const NonlinearOptimizer::SharedParams& newParams) const {
shared_ptr result(this->clone());
result->params_ = newParams;
return result;
}
/* ************************************************************************* */
NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const SharedState& initial) const {
const SharedParams& params = this->params();
double currentError = initial->error();
double currentError = initial->error;
// check if we're already close enough
if(currentError <= params->errorTol) {
@ -52,11 +38,11 @@ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const Shared
}
// Maybe show output
if (params->verbosity >= NonlinearOptimizerParams::VALUES) this->values()->print("Initial values");
if (params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "Initial error: " << this->error() << endl;
if (params->verbosity >= NonlinearOptimizerParams::VALUES) initial->values.print("Initial values");
if (params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "Initial error: " << initial->error << endl;
// Return if we already have too many iterations
if(this->iterations() >= params->maxIterations)
if(initial->iterations >= params->maxIterations)
return initial;
// Iterative loop
@ -67,8 +53,8 @@ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const Shared
next = this->iterate(next);
// Maybe show output
if (params->verbosity >= NonlinearOptimizerParams::VALUES) next->values->print("newValues");
if (params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "newError: " << next->error << endl;
if(params->verbosity >= NonlinearOptimizerParams::VALUES) next->values.print("newValues");
if(params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "newError: " << next->error << endl;
} while(next->iterations < params->maxIterations &&
!checkConvergence(params->relativeErrorTol, params->absoluteErrorTol,
params->errorTol, currentError, next->error, params->verbosity));
@ -83,10 +69,10 @@ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const Shared
}
/* ************************************************************************* */
void NonlinearOptimizer::defaultInitialState(NonlinearOptimizerState& initial) const {
state.values = initialValues;
state.error = graph_->error(initialValues);
state.iterations = 0;
void NonlinearOptimizer::defaultInitialState(const Values& initialValues, NonlinearOptimizerState& initialState) const {
initialState.values = initialValues;
initialState.error = graph_->error(initialValues);
initialState.iterations = 0;
}
/* ************************************************************************* */

View File

@ -36,7 +36,7 @@ public:
LINEAR
};
int maxIterations; ///< The maximum iterations to stop iterating (default 100)
size_t maxIterations; ///< The maximum iterations to stop iterating (default 100)
double relativeErrorTol; ///< The maximum relative error decrease to stop iterating (default 1e-5)
double absoluteErrorTol; ///< The maximum absolute error decrease to stop iterating (default 1e-5)
double errorTol; ///< The maximum total error to stop iterating (default 0.0)
@ -80,9 +80,6 @@ public:
/** Virtual destructor */
virtual ~NonlinearOptimizerState() {}
/** Clone the state (i.e. make a copy of the derived class) */
virtual boost::shared_ptr<NonlinearOptimizerState> clone() = 0;
};
@ -233,7 +230,7 @@ protected:
SharedState defaultOptimize(const SharedState& initial) const;
/** Initialize a state, using the current error and 0 iterations */
void defaultInitialState(SharedState& initial) const;
void defaultInitialState(const Values& initialValues, NonlinearOptimizerState& initialState) const;
/** Constructor for initial construction of base classes.
*/

View File

@ -22,20 +22,20 @@
namespace gtsam {
/* ************************************************************************* */
const SuccessiveLinearizationOptimizer::SharedOrdering& SuccessiveLinearizationOptimizer::ordering(const Values& values) const {
const Ordering& SuccessiveLinearizationOptimizer::ordering(const Values& values) const {
if(!ordering_) {
SharedParams params =
boost::dynamic_pointer_cast<const SuccessiveLinearizationParams>(params());
boost::dynamic_pointer_cast<const SuccessiveLinearizationParams>(this->params());
// If we're using a COLAMD ordering, compute it
if(!params.ordering)
ordering_ = graph_->orderingCOLAMD(values);
if(params->ordering)
ordering_ = params->ordering;
else
ordering_ = params.ordering;
ordering_ = *graph_->orderingCOLAMD(values);
}
return ordering_;
return *ordering_;
}
} /* namespace gtsam */

View File

@ -24,6 +24,9 @@
namespace gtsam {
class SuccessiveLinearizationParams;
class SuccessiveLinearizationState;
/**
* This is an intermediate base class for NonlinearOptimizers that use direct
* solving, i.e. factorization. This class is here to reduce code duplication
@ -33,23 +36,18 @@ namespace gtsam {
class SuccessiveLinearizationOptimizer : public NonlinearOptimizer {
public:
class SuccessiveLinearizationParams;
class SuccessiveLinearizationState;
typedef boost::shared_ptr<const SuccessiveLinearizationParams> SharedParams;
typedef boost::shared_ptr<const SuccessiveLinearizationOptimizer> shared_ptr;
protected:
typedef boost::shared_ptr<const Ordering> SharedOrdering;
SuccessiveLinearizationOptimizer(const SharedGraph& graph) : NonlinearOptimizer(graph) {}
const SharedOrdering& ordering(const Values& values) const;
const Ordering& ordering(const Values& values) const;
private:
SharedOrdering ordering_;
mutable boost::optional<Ordering> ordering_; // Mutable because we set/compute it when needed and cache it
};
class SuccessiveLinearizationParams : public NonlinearOptimizerParams {
@ -66,12 +64,9 @@ public:
QR,
};
/** See SuccessiveLinearizationParams::ordering */
typedef boost::shared_ptr<const Ordering> SharedOrdering;
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
Factorization factorization; ///< The numerical factorization (default: LDL)
SharedOrdering ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
boost::optional<Ordering> ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
SuccessiveLinearizationParams() :
elimination(MULTIFRONTAL), factorization(LDL) {}