In progress - making NLO interface less confusing / error prone with a separate "state" class. Refactoring in NLO to reduce amount of code and remove code duplication.

release/4.3a0
Richard Roberts 2012-04-02 00:26:42 +00:00
parent fdb4138d10
commit 9366136c78
8 changed files with 314 additions and 407 deletions

View File

@ -0,0 +1,58 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file DirectOptimizer.cpp
* @brief
* @author Richard Roberts
* @date Apr 1, 2012
*/
#include <gtsam/nonlinear/NonlinearOptimizer.h>
namespace gtsam {
/* ************************************************************************* */
NonlinearOptimizer::shared_ptr DirectOptimizer::update(const SharedGraph& newGraph) const {
// Call update on the base class
shared_ptr result = boost::static_pointer_cast<DirectOptimizer>();
// Need to recompute the ordering if we're using an automatic COLAMD ordering
if(result->colamdOrdering_) {
result->ordering_ = result->graph_->orderingCOLAMD(*result->values_);
}
return result;
}
/* ************************************************************************* */
DirectOptimizer::shared_ptr DirectOptimizer::update(const SharedOrdering& newOrdering) const {
return update(graph_, newOrdering);
}
/* ************************************************************************* */
DirectOptimizer::shared_ptr DirectOptimizer::update(const SharedGraph& newGraph, const SharedOrdering& newOrdering) const {
// Call update on base class
shared_ptr result = boost::static_pointer_cast<DirectOptimizer>(NonlinearOptimizer::update(newGraph));
if(newOrdering && newOrdering->size() > 0) {
result->colamdOrdering_ = false;
result->ordering_ = newOrdering;
} else {
result->colamdOrdering_ = true;
result->ordering_ = result->graph_->orderingCOLAMD(*result->values_);
}
return result;
}
} /* namespace gtsam */

View File

@ -0,0 +1,99 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file DirectOptimizer.h
* @brief
* @author Richard Roberts
* @date Apr 1, 2012
*/
#pragma once
#include <gtsam/nonlinear/NonlinearOptimizer.h>
namespace gtsam {
class DirectOptimizerParams : public NonlinearOptimizerParams {
public:
/** See DoglegParams::elimination */
enum Elimination {
MULTIFRONTAL,
SEQUENTIAL
};
/** See DoglegParams::factorization */
enum Factorization {
LDL,
QR,
};
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
Factorization factorization; ///< The numerical factorization (default: LDL)
DirectOptimizerParams() :
elimination(MULTIFRONTAL), factorization(LDL) {}
virtual ~DirectOptimizerParams() {}
virtual void print(const std::string& str = "") const {
NonlinearOptimizerParams::print(str);
if(elimination == MULTIFRONTAL)
std::cout << " elimination method: MULTIFRONTAL\n";
else if(elimination == SEQUENTIAL)
std::cout << " elimination method: SEQUENTIAL\n";
else
std::cout << " elimination method: (invalid)\n";
if(factorization == LDL)
std::cout << " factorization method: LDL\n";
else if(factorization == QR)
std::cout << " factorization method: QR\n";
else
std::cout << " factorization method: (invalid)\n";
std::cout.flush();
}
};
/**
* This is an intermediate base class for NonlinearOptimizers that use direct
* solving, i.e. factorization. This class is here to reduce code duplication
* for storing the ordering, having factorization and elimination parameters,
* etc.
*/
class DirectOptimizer : public NonlinearOptimizer {
public:
typedef boost::shared_ptr<const Ordering> SharedOrdering;
typedef boost::shared_ptr<NonlinearOptimizer> shared_ptr;
virtual NonlinearOptimizer::shared_ptr update(const SharedGraph& newGraph) const;
virtual shared_ptr update(const SharedOrdering& newOrdering) const;
virtual shared_ptr update(const SharedGraph& newGraph, const SharedOrdering& newOrdering) const;
/** Access the variable ordering used by this optimizer */
const SharedOrdering& ordering() const { return ordering_; }
protected:
const bool colamdOrdering_;
const SharedOrdering ordering_;
DirectOptimizer(const SharedGraph& graph,
const SharedOrdering ordering = SharedOrdering()) :
NonlinearOptimizer(graph),
colamdOrdering_(!ordering || ordering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering) {}
};
} /* namespace gtsam */

View File

@ -26,7 +26,7 @@ using namespace std;
namespace gtsam {
NonlinearOptimizer::auto_ptr DoglegOptimizer::iterate() const {
NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const SharedState& current) const {
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
@ -59,13 +59,16 @@ NonlinearOptimizer::auto_ptr DoglegOptimizer::iterate() const {
}
// Update values
SharedValues newValues(new Values(values_->retract(result.dx_d, *ordering_)));
SharedValues newValues = boost::make_shared<Values>(values_->retract(result.dx_d, *ordering_));
// Create new optimizer with new values and new error
NonlinearOptimizer::auto_ptr newOptimizer(new DoglegOptimizer(
*this, newValues, result.f_error, result.Delta));
// Create new state with new values and new error
DoglegOptimizer::SharedState newState = boost::make_shared<DoglegState>();
newState->values = newValues;
newState->error = rsult.f_error;
newState->iterations = current->iterations + 1;
newState->Delta = result.Delta;
return newOptimizer;
return newState;
}
} /* namespace gtsam */

View File

@ -18,7 +18,7 @@
#pragma once
#include <gtsam/nonlinear/NonlinearOptimizer.h>
#include <gtsam/nonlinear/DirectOptimizer.h>
namespace gtsam {
@ -27,68 +27,49 @@ namespace gtsam {
* common to all nonlinear optimization algorithms. This class also contains
* all of those parameters.
*/
class DoglegParams : public NonlinearOptimizerParams {
class DoglegParams : public DirectOptimizerParams {
public:
/** See DoglegParams::elimination */
enum Elimination {
MULTIFRONTAL,
SEQUENTIAL
};
/** See DoglegParams::factorization */
enum Factorization {
LDL,
QR,
};
/** See DoglegParams::dlVerbosity */
enum DLVerbosity {
SILENT,
VERBOSE
};
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
Factorization factorization; ///< The numerical factorization (default: LDL)
double deltaInitial; ///< The initial trust region radius (default: 1.0)
DLVerbosity dlVerbosity; ///< The verbosity level for Dogleg (default: SILENT), see also NonlinearOptimizerParams::verbosity
DoglegParams() :
elimination(MULTIFRONTAL), factorization(LDL), deltaInitial(1.0), dlVerbosity(SILENT) {}
deltaInitial(1.0), dlVerbosity(SILENT) {}
virtual ~DoglegParams() {}
virtual void print(const std::string& str = "") const {
NonlinearOptimizerParams::print(str);
if(elimination == MULTIFRONTAL)
std::cout << " elimination method: MULTIFRONTAL\n";
else if(elimination == SEQUENTIAL)
std::cout << " elimination method: SEQUENTIAL\n";
else
std::cout << " elimination method: (invalid)\n";
if(factorization == LDL)
std::cout << " factorization method: LDL\n";
else if(factorization == QR)
std::cout << " factorization method: QR\n";
else
std::cout << " factorization method: (invalid)\n";
DirectOptimizerParams::print(str);
std::cout << " deltaInitial: " << deltaInitial << "\n";
std::cout.flush();
}
};
/**
* This class performs Dogleg nonlinear optimization
* TODO: use make_shared
* State for DoglegOptimizer
*/
class DoglegOptimizer : public NonlinearOptimizer {
class DoglegState : public NonlinearOptimizerState {
public:
double Delta;
};
/**
* This class performs Dogleg nonlinear optimization
*/
class DoglegOptimizer : public DirectOptimizer {
public:
typedef boost::shared_ptr<const DoglegParams> SharedDLParams;
typedef boost::shared_ptr<const Ordering> SharedOrdering;
typedef boost::shared_ptr<DoglegParams> SharedParams;
typedef boost::shared_ptr<DoglegState> SharedState;
typedef boost::shared_ptr<DoglegOptimizer> shared_ptr;
/// @name Standard interface
/// @{
@ -101,19 +82,11 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
DoglegOptimizer(const NonlinearFactorGraph& graph, const Values& values,
DoglegOptimizer(const NonlinearFactorGraph& graph,
const DoglegParams& params = DoglegParams(),
const Ordering& ordering = Ordering()) :
NonlinearOptimizer(
SharedGraph(new NonlinearFactorGraph(graph)),
SharedValues(new Values(values)),
SharedDLParams(new DoglegParams(params))),
dlParams_(boost::static_pointer_cast<const DoglegParams>(params_)),
colamdOrdering_(ordering.size() == 0),
ordering_(colamdOrdering_ ?
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))),
dimensions_(new vector<size_t>(values_->dims(*ordering_))),
delta_(dlParams_->deltaInitial) {}
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new DoglegParams(params)) {}
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters. For convenience this
@ -123,18 +96,10 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
DoglegOptimizer(const NonlinearFactorGraph& graph, const Values& values,
DoglegOptimizer(const NonlinearFactorGraph& graph,
const Ordering& ordering) :
NonlinearOptimizer(
SharedGraph(new NonlinearFactorGraph(graph)),
SharedValues(new Values(values)),
SharedDLParams(new DoglegParams())),
dlParams_(boost::static_pointer_cast<const DoglegParams>(params_)),
colamdOrdering_(ordering.size() == 0),
ordering_(colamdOrdering_ ?
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))),
dimensions_(new vector<size_t>(values_->dims(*ordering_))),
delta_(dlParams_->deltaInitial) {}
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new DoglegParams()) {}
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters.
@ -142,18 +107,17 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
DoglegOptimizer(const SharedGraph& graph, const SharedValues& values,
DoglegOptimizer(const SharedGraph& graph,
const DoglegParams& params = DoglegParams(),
const SharedOrdering& ordering = SharedOrdering()) :
NonlinearOptimizer(graph, values, SharedDLParams(new DoglegParams(params))),
dlParams_(boost::static_pointer_cast<const DoglegParams>(params_)),
colamdOrdering_(!ordering || ordering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering),
dimensions_(new vector<size_t>(values_->dims(*ordering_))),
delta_(dlParams_->deltaInitial) {}
DirectOptimizer(graph),
params_(new DoglegParams(params)) {}
/** Access the variable ordering used by this optimizer */
const SharedOrdering& ordering() const { return ordering_; }
/** Access the parameters */
virtual const NonlinearOptimizer::SharedParams& params() const { return params_; }
/** Access the parameters */
const DoglegOptimizer::SharedParams& params() const { return params_; }
/// @}
@ -167,102 +131,19 @@ public:
* containing the updated variable assignments, which may be retrieved with
* values().
*/
virtual auto_ptr iterate() const;
/** Update the graph, values, and/or parameters, leaving all other state
* the same. Any of these that are empty shared pointers are left unchanged
* in the returned optimizer object. Returns a new updated NonlinearOptimizer
* object, the original is not modified.
*/
virtual auto_ptr update(
const SharedGraph& newGraph,
const SharedValues& newValues = SharedValues(),
const SharedParams& newParams = SharedParams()) const {
return auto_ptr(new DoglegOptimizer(*this, newGraph, newValues,
boost::dynamic_pointer_cast<const DoglegParams>(newParams)));
}
/** Update the ordering, leaving all other state the same. If newOrdering
* is an empty pointer <emph>or</emph> contains an empty Ordering object
* (with zero size), a COLAMD ordering will be computed. Returns a new
* NonlinearOptimizer object, the original is not modified.
*/
virtual auto_ptr update(const SharedOrdering& newOrdering) const {
return auto_ptr(new DoglegOptimizer(*this, newOrdering)); }
/** Update the damping factor lambda, leaving all other state the same.
* Returns a new NonlinearOptimizer object, the original is not modified.
*/
virtual auto_ptr update(double newDelta) const {
return auto_ptr(new DoglegOptimizer(*this, newDelta)); }
virtual NonlinearOptimizer::SharedState iterate(const SharedState& current) const;
/** Create a copy of the NonlinearOptimizer */
virtual auto_ptr clone() const {
return auto_ptr(new DoglegOptimizer(*this));
}
virtual NonlinearOptimizer::shared_ptr clone() const {
return boost::make_shared<DoglegOptimizer>(*this); }
/// @}
protected:
typedef boost::shared_ptr<const std::vector<size_t> > SharedDimensions;
const SharedParams params_;
const SharedDLParams dlParams_;
const bool colamdOrdering_;
const SharedOrdering ordering_;
const SharedDimensions dimensions_;
const double delta_;
/** Protected constructor called by update() to modify the graph, values, or
* parameters. Computes a COLAMD ordering if the optimizer was originally
* constructed with an empty ordering, and if the graph is changing.
* Recomputes the dimensions if the values are changing.
*/
DoglegOptimizer(const DoglegOptimizer& original, const SharedGraph& newGraph,
const SharedValues& newValues, const SharedDLParams& newParams) :
NonlinearOptimizer(original, newGraph, newValues, newParams),
dlParams_(newParams ? newParams : original.dlParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(newGraph && colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : original.ordering_),
dimensions_(newValues ?
SharedDimensions(new std::vector<size_t>(values_->dims(*ordering_))) : original.dimensions_),
delta_(original.delta_) {}
/** Protected constructor called by update() to modify the ordering, computing
* a COLAMD ordering if the new ordering is empty, and also recomputing the
* dimensions.
*/
DoglegOptimizer(
const DoglegOptimizer& original, const SharedOrdering& newOrdering) :
NonlinearOptimizer(original),
dlParams_(original.dlParams_),
colamdOrdering_(!newOrdering || newOrdering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : newOrdering),
dimensions_(new std::vector<size_t>(values_->dims(*ordering_))),
delta_(original.delta_) {}
/** Protected constructor called by update() to modify lambda. */
DoglegOptimizer(
const DoglegOptimizer& original, double newDelta) :
NonlinearOptimizer(original),
dlParams_(original.dlParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(original.ordering_),
dimensions_(original.dimensions_),
delta_(newDelta) {}
private:
// Special constructor for completing an iteration, updates the values and
// error, and increments the iteration count.
DoglegOptimizer(const DoglegOptimizer& original,
const SharedValues& newValues, double newError, double newDelta) :
NonlinearOptimizer(original.graph_, newValues, original.params_, newError, original.iterations_+1),
dlParams_(original.dlParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(original.ordering_),
dimensions_(original.dimensions_),
delta_(newDelta) {}
virtual void setParams(const NonlinearOptimizer::SharedParams& newParams) { params_ = newParams; }
};
}

View File

@ -26,7 +26,7 @@ using namespace std;
namespace gtsam {
NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate() const {
NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate(const SharedState& current) const {
// Linearize graph
GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_);
@ -131,11 +131,14 @@ NonlinearOptimizer::auto_ptr LevenbergMarquardtOptimizer::iterate() const {
}
} // end while
// Create new optimizer with new values and new error
NonlinearOptimizer::auto_ptr newOptimizer(new LevenbergMarquardtOptimizer(
*this, next_values, next_error, lambda));
// Create new state with new values and new error
LevenbergMarquardtOptimizer::SharedState newState = boost::make_shared<LevenbergMarquardtState>();
newState->values = next_values;
newState->error = next_error;
newState->iterations = current->iterations + 1;
newState->Delta = lambda;
return newOptimizer;
return newState;
}
} /* namespace gtsam */

View File

@ -18,7 +18,7 @@
#pragma once
#include <gtsam/nonlinear/NonlinearOptimizer.h>
#include <gtsam/nonlinear/DirectOptimizer.h>
namespace gtsam {
@ -27,20 +27,8 @@ namespace gtsam {
* common to all nonlinear optimization algorithms. This class also contains
* all of those parameters.
*/
class LevenbergMarquardtParams : public NonlinearOptimizerParams {
class LevenbergMarquardtParams : public DirectOptimizerParams {
public:
/** See LevenbergMarquardtParams::elimination */
enum Elimination {
MULTIFRONTAL,
SEQUENTIAL
};
/** See LevenbergMarquardtParams::factorization */
enum Factorization {
LDL,
QR,
};
/** See LevenbergMarquardtParams::lmVerbosity */
enum LMVerbosity {
SILENT,
@ -51,53 +39,46 @@ public:
DAMPED
};
Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL)
Factorization factorization; ///< The numerical factorization (default: LDL)
double lambdaInitial; ///< The initial Levenberg-Marquardt damping term (default: 1e-5)
double lambdaFactor; ///< The amount by which to multiply or divide lambda when adjusting lambda (default: 10.0)
double lambdaUpperBound; ///< The maximum lambda to try before assuming the optimization has failed (default: 1e5)
LMVerbosity lmVerbosity; ///< The verbosity level for Levenberg-Marquardt (default: SILENT), see also NonlinearOptimizerParams::verbosity
LevenbergMarquardtParams() :
elimination(MULTIFRONTAL), factorization(LDL), lambdaInitial(1e-5),
lambdaFactor(10.0), lambdaUpperBound(1e5), lmVerbosity(SILENT) {}
lambdaInitial(1e-5), lambdaFactor(10.0), lambdaUpperBound(1e5), lmVerbosity(SILENT) {}
virtual ~LevenbergMarquardtParams() {}
virtual void print(const std::string& str = "") const {
NonlinearOptimizerParams::print(str);
if(elimination == MULTIFRONTAL)
std::cout << " elimination method: MULTIFRONTAL\n";
else if(elimination == SEQUENTIAL)
std::cout << " elimination method: SEQUENTIAL\n";
else
std::cout << " elimination method: (invalid)\n";
if(factorization == LDL)
std::cout << " factorization method: LDL\n";
else if(factorization == QR)
std::cout << " factorization method: QR\n";
else
std::cout << " factorization method: (invalid)\n";
DirectOptimizerParams::print(str);
std::cout << " lambdaInitial: " << lambdaInitial << "\n";
std::cout << " lambdaFactor: " << lambdaFactor << "\n";
std::cout << " lambdaUpperBound: " << lambdaUpperBound << "\n";
std::cout.flush();
}
};
/**
* State for LevenbergMarquardtOptimizer
*/
class LevenbergMarquardtState : public NonlinearOptimizerState {
public:
double lambda;
};
/**
* This class performs Levenberg-Marquardt nonlinear optimization
* TODO: use make_shared
*/
class LevenbergMarquardtOptimizer : public NonlinearOptimizer {
class LevenbergMarquardtOptimizer : public DirectOptimizer {
public:
typedef boost::shared_ptr<const LevenbergMarquardtParams> SharedLMParams;
typedef boost::shared_ptr<const Ordering> SharedOrdering;
typedef boost::shared_ptr<LevenbergMarquardtParams> SharedParams;
typedef boost::shared_ptr<LevenbergMarquardtState> SharedState;
typedef boost::shared_ptr<LevenbergMarquardtOptimizer> shared_ptr;
/// @name Standard interface
/// @{
@ -110,19 +91,11 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const Values& values,
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
const Ordering& ordering = Ordering()) :
NonlinearOptimizer(
SharedGraph(new NonlinearFactorGraph(graph)),
SharedValues(new Values(values)),
SharedLMParams(new LevenbergMarquardtParams(params))),
lmParams_(boost::static_pointer_cast<const LevenbergMarquardtParams>(params_)),
colamdOrdering_(ordering.size() == 0),
ordering_(colamdOrdering_ ?
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))),
dimensions_(new vector<size_t>(values_->dims(*ordering_))),
lambda_(lmParams_->lambdaInitial) {}
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new LevenbergMarquardtParams(params)) {}
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters. For convenience this
@ -132,18 +105,10 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const Values& values,
LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph,
const Ordering& ordering) :
NonlinearOptimizer(
SharedGraph(new NonlinearFactorGraph(graph)),
SharedValues(new Values(values)),
SharedLMParams(new LevenbergMarquardtParams())),
lmParams_(boost::static_pointer_cast<const LevenbergMarquardtParams>(params_)),
colamdOrdering_(ordering.size() == 0),
ordering_(colamdOrdering_ ?
graph_->orderingCOLAMD(*values_) : Ordering::shared_ptr(new Ordering(ordering))),
dimensions_(new vector<size_t>(values_->dims(*ordering_))),
lambda_(lmParams_->lambdaInitial) {}
DirectOptimizer(SharedGraph(new NonlinearFactorGraph(graph))),
params_(new LevenbergMarquardtParams()) {}
/** Standard constructor, requires a nonlinear factor graph, initial
* variable assignments, and optimization parameters.
@ -151,18 +116,17 @@ public:
* @param values The initial variable assignments
* @param params The optimization parameters
*/
LevenbergMarquardtOptimizer(const SharedGraph& graph, const SharedValues& values,
LevenbergMarquardtOptimizer(const SharedGraph& graph,
const LevenbergMarquardtParams& params = LevenbergMarquardtParams(),
const SharedOrdering& ordering = SharedOrdering()) :
NonlinearOptimizer(graph, values, SharedLMParams(new LevenbergMarquardtParams(params))),
lmParams_(boost::static_pointer_cast<const LevenbergMarquardtParams>(params_)),
colamdOrdering_(!ordering || ordering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : ordering),
dimensions_(new vector<size_t>(values_->dims(*ordering_))),
lambda_(lmParams_->lambdaInitial) {}
DirectOptimizer(graph),
params_(new LevenbergMarquardtParams(params)) {}
/** Access the variable ordering used by this optimizer */
const SharedOrdering& ordering() const { return ordering_; }
/** Access the parameters */
virtual const NonlinearOptimizer::SharedParams& params() const { return params_; }
/** Access the parameters */
const LevenbergMarquardtOptimizer::SharedParams& params() const { return params_; }
/// @}
@ -176,39 +140,11 @@ public:
* containing the updated variable assignments, which may be retrieved with
* values().
*/
virtual auto_ptr iterate() const;
/** Update the graph, values, and/or parameters, leaving all other state
* the same. Any of these that are empty shared pointers are left unchanged
* in the returned optimizer object. Returns a new updated NonlinearOptimizer
* object, the original is not modified.
*/
virtual auto_ptr update(
const SharedGraph& newGraph,
const SharedValues& newValues = SharedValues(),
const SharedParams& newParams = SharedParams()) const {
return auto_ptr(new LevenbergMarquardtOptimizer(*this, newGraph, newValues,
boost::dynamic_pointer_cast<const LevenbergMarquardtParams>(newParams)));
}
/** Update the ordering, leaving all other state the same. If newOrdering
* is an empty pointer <emph>or</emph> contains an empty Ordering object
* (with zero size), a COLAMD ordering will be computed. Returns a new
* NonlinearOptimizer object, the original is not modified.
*/
virtual auto_ptr update(const SharedOrdering& newOrdering) const {
return auto_ptr(new LevenbergMarquardtOptimizer(*this, newOrdering)); }
/** Update the damping factor lambda, leaving all other state the same.
* Returns a new NonlinearOptimizer object, the original is not modified.
*/
virtual auto_ptr update(double newLambda) const {
return auto_ptr(new LevenbergMarquardtOptimizer(*this, newLambda)); }
virtual NonlinearOptimizer::SharedState iterate(const SharedState& current) const;
/** Create a copy of the NonlinearOptimizer */
virtual auto_ptr clone() const {
return auto_ptr(new LevenbergMarquardtOptimizer(*this));
}
virtual NonlinearOptimizer::shared_ptr clone() const {
return boost::make_shared<LevenbergMarquardtOptimizer>(*this); }
/// @}
@ -216,62 +152,8 @@ protected:
typedef boost::shared_ptr<const std::vector<size_t> > SharedDimensions;
const SharedLMParams lmParams_;
const bool colamdOrdering_;
const SharedOrdering ordering_;
const SharedParams params_;
const SharedDimensions dimensions_;
const double lambda_;
/** Protected constructor called by update() to modify the graph, values, or
* parameters. Computes a COLAMD ordering if the optimizer was originally
* constructed with an empty ordering, and if the graph is changing.
* Recomputes the dimensions if the values are changing.
*/
LevenbergMarquardtOptimizer(const LevenbergMarquardtOptimizer& original, const SharedGraph& newGraph,
const SharedValues& newValues, const SharedLMParams& newParams) :
NonlinearOptimizer(original, newGraph, newValues, newParams),
lmParams_(newParams ? newParams : original.lmParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(newGraph && colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : original.ordering_),
dimensions_(newValues ?
SharedDimensions(new std::vector<size_t>(values_->dims(*ordering_))) : original.dimensions_),
lambda_(original.lambda_) {}
/** Protected constructor called by update() to modify the ordering, computing
* a COLAMD ordering if the new ordering is empty, and also recomputing the
* dimensions.
*/
LevenbergMarquardtOptimizer(
const LevenbergMarquardtOptimizer& original, const SharedOrdering& newOrdering) :
NonlinearOptimizer(original),
lmParams_(original.lmParams_),
colamdOrdering_(!newOrdering || newOrdering->size() == 0),
ordering_(colamdOrdering_ ? graph_->orderingCOLAMD(*values_) : newOrdering),
dimensions_(new std::vector<size_t>(values_->dims(*ordering_))),
lambda_(original.lambda_) {}
/** Protected constructor called by update() to modify lambda. */
LevenbergMarquardtOptimizer(
const LevenbergMarquardtOptimizer& original, double newLambda) :
NonlinearOptimizer(original),
lmParams_(original.lmParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(original.ordering_),
dimensions_(original.dimensions_),
lambda_(newLambda) {}
private:
// Special constructor for completing an iteration, updates the values and
// error, and increments the iteration count.
LevenbergMarquardtOptimizer(const LevenbergMarquardtOptimizer& original,
const SharedValues& newValues, double newError, double newLambda) :
NonlinearOptimizer(original.graph_, newValues, original.params_, newError, original.iterations_+1),
lmParams_(original.lmParams_),
colamdOrdering_(original.colamdOrdering_),
ordering_(original.ordering_),
dimensions_(original.dimensions_),
lambda_(newLambda) {}
};
}

View File

@ -25,49 +25,58 @@ using namespace std;
namespace gtsam {
/* ************************************************************************* */
NonlinearOptimizer::auto_ptr NonlinearOptimizer::defaultOptimize() const {
NonlinearOptimizer::shared_ptr NonlinearOptimizer::update(const NonlinearOptimizer::SharedGraph& newGraph) const {
shared_ptr result(this->clone());
result->graph_ = newGraph;
return result;
}
double currentError = this->error();
/* ************************************************************************* */
NonlinearOptimizer::shared_ptr NonlinearOptimizer::update(const NonlinearOptimizer::SharedParams& newParams) const {
shared_ptr result(this->clone());
result->params_ = newParams;
return result;
}
/* ************************************************************************* */
NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const SharedState& initial) const {
const SharedParams& params = this->params();
double currentError = initial->error();
// check if we're already close enough
if(currentError <= params_->errorTol) {
if (params_->verbosity >= NonlinearOptimizerParams::ERROR)
cout << "Exiting, as error = " << currentError << " < " << params_->errorTol << endl;
return this->clone();
if(currentError <= params->errorTol) {
if (params->verbosity >= NonlinearOptimizerParams::ERROR)
cout << "Exiting, as error = " << currentError << " < " << params->errorTol << endl;
return initial;
}
// Maybe show output
if (params_->verbosity >= NonlinearOptimizerParams::VALUES) this->values()->print("Initial values");
if (params_->verbosity >= NonlinearOptimizerParams::ERROR) cout << "Initial error: " << this->error() << endl;
if (params->verbosity >= NonlinearOptimizerParams::VALUES) this->values()->print("Initial values");
if (params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "Initial error: " << this->error() << endl;
// Return if we already have too many iterations
if(this->iterations() >= params_->maxIterations)
return this->clone();
if(this->iterations() >= params->maxIterations)
return initial;
// Iterative loop
auto_ptr next = this->iterate(); // First iteration happens here
while(next->iterations() < params_->maxIterations &&
!checkConvergence(params_->relativeErrorTol, params_->absoluteErrorTol,
params_->errorTol, currentError, next->error(), params_->verbosity)) {
SharedState next = initial;
do {
// Do next iteration
currentError = next->error;
next = this->iterate(next);
// Maybe show output
if (params_->verbosity >= NonlinearOptimizerParams::VALUES) next->values()->print("newValues");
if (params_->verbosity >= NonlinearOptimizerParams::ERROR) cout << "newError: " << next->error() << endl;
// Do next iteration
currentError = next->error();
next = next->iterate();
}
if (params->verbosity >= NonlinearOptimizerParams::VALUES) next->values->print("newValues");
if (params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "newError: " << next->error << endl;
} while(next->iterations < params->maxIterations &&
!checkConvergence(params->relativeErrorTol, params->absoluteErrorTol,
params->errorTol, currentError, next->error, params->verbosity));
// Printing if verbose
if (params_->verbosity >= NonlinearOptimizerParams::VALUES)
next->values()->print("final values");
if (params_->verbosity >= NonlinearOptimizerParams::ERROR &&
next->iterations() >= params_->maxIterations)
if (params->verbosity >= NonlinearOptimizerParams::ERROR &&
next->iterations >= params->maxIterations)
cout << "Terminating because reached maximum iterations" << endl;
if (params_->verbosity >= NonlinearOptimizerParams::ERROR)
cout << "final error: " << next->error() << endl;
// Return optimizer from final iteration
return next;

View File

@ -77,6 +77,9 @@ public:
/** The number of optimization iterations performed. */
unsigned int iterations;
/** Virtual destructor to enable RTTI */
virtual ~NonlinearOptimizerState() {}
};
@ -156,21 +159,18 @@ class NonlinearOptimizer {
public:
/** An auto pointer to this class */
typedef std::auto_ptr<const NonlinearOptimizer> auto_ptr;
/** A shared pointer to this class */
typedef boost::shared_ptr<const NonlinearOptimizer> shared_ptr;
/** A const shared_ptr to a NonlinearFactorGraph */
typedef boost::shared_ptr<const NonlinearFactorGraph> SharedGraph;
/** A const shared_ptr to a NonlinearFactorGraph */
typedef boost::shared_ptr<const Values> SharedValues;
/** A const shared_ptr to the parameters */
typedef boost::shared_ptr<const NonlinearOptimizerParams> SharedParams;
/** A shared_ptr to an optimizer state */
typedef boost::shared_ptr<NonlinearOptimizerState> SharedState;
/// @name Standard interface
/// @{
@ -183,26 +183,17 @@ public:
* process, you may call iterate() and check_convergence() yourself, and if
* needed modify the optimization state between iterations.
*/
virtual auto_ptr optimize() const { return defaultOptimize(); }
virtual SharedState optimize(const SharedState& initial) const { return defaultOptimize(initial); }
/** Shortcut to optimize and return the resulting Values of the maximum-
* likelihood estimate. To access statistics and information such as the
* final error and number of iterations, use optimize() instead.
* @return The maximum-likelihood estimate.
*/
virtual SharedValues optimized() const { return this->optimize()->values(); }
/** Retrieve the current variable assignment estimate. */
virtual const SharedValues& values() const { return values_; }
virtual SharedValues optimized(const SharedState& initial) const { return this->optimize(initial)->values(); }
/** Retrieve the parameters. */
virtual const SharedParams& params() const { return params_; }
/** Return the current factor graph error */
virtual double error() const { return error_; }
/** Return the number of iterations that have been performed */
virtual int iterations() const { return iterations_; }
virtual const SharedParams& params() const = 0;
/// @}
@ -216,60 +207,41 @@ public:
* containing the updated variable assignments, which may be retrieved with
* values().
*/
virtual auto_ptr iterate() const = 0;
virtual SharedState iterate(const SharedState& current) const = 0;
/** Update the graph, values, and/or parameters, leaving all other state
* the same. Any of these that are empty shared pointers are left unchanged
* in the returned optimizer object. Returns a new updated
* NonlinearOptimzier object, the original is not modified.
/** Update the graph, leaving all other parts of the optimizer unchanged,
* returns a new updated NonlinearOptimzier object, the original is not
* modified.
*/
virtual auto_ptr update(
const SharedGraph& newGraph = SharedGraph(),
const SharedValues& newValues = SharedValues(),
const SharedParams& newParams = SharedParams()) const = 0;
virtual shared_ptr update(const SharedGraph& newGraph) const;
/** Update the parameters, leaving all other parts of the optimizer unchanged,
* returns a new updated NonlinearOptimzier object, the original is not
* modified.
*/
const shared_ptr update(const SharedParams& newParams) const;
/** Create a copy of the NonlinearOptimizer */
virtual auto_ptr clone() const = 0;
virtual shared_ptr clone() const = 0;
/// @}
protected:
const SharedGraph graph_;
/** A default implementation of the optimization loop, which calls iterate()
* until checkConvergence returns true.
*/
auto_ptr defaultOptimize() const;
SharedState defaultOptimize(const SharedState& initial) const;
protected:
/** Modify the parameters in-place (not for external use) */
virtual void setParams(const NonlinearOptimizer::SharedParams& newParams);
const SharedGraph graph_;
const SharedValues values_;
const SharedParams params_;
const double error_;
const int iterations_;
/** Constructor for initial construction of base classes, computes error and
* sets iterations to zero.
/** Constructor for initial construction of base classes.
*/
NonlinearOptimizer(const SharedGraph& graph, const SharedValues& values,
const SharedParams& params) :
graph_(graph), values_(values), params_(params),
error_(graph_->error(*values_)), iterations_(0) {}
/** Constructor that specifies all parts of the state, used for updates */
NonlinearOptimizer(const SharedGraph& graph, const SharedValues& values,
const SharedParams& params, double error, int iterations) :
graph_(graph), values_(values), params_(params),
error_(error), iterations_(iterations) {}
/** Convenience constructor for modifying only some of the state. */
NonlinearOptimizer(const NonlinearOptimizer& original, const SharedGraph& newGraph,
const SharedValues& newValues, const SharedParams& newParams) :
graph_(newGraph ? newGraph : original.graph_),
values_(newValues ? newValues : original.values_),
params_(newParams ? newParams : original.params_),
error_(newGraph || newValues ? graph_->error(*values_) : original.error_),
iterations_(original.iterations_) {}
NonlinearOptimizer(const SharedGraph& graph) :
graph_(graph) {}
};