Updated the Doxygen style Comments on NonlinearOptimizer.h

release/4.3a0
Natesh Srinivasan 2011-09-09 02:34:29 +00:00
parent fd4c4fda5d
commit 3014daa140
1 changed files with 46 additions and 46 deletions

View File

@ -26,10 +26,10 @@ namespace gtsam {
class NullOptimizerWriter { class NullOptimizerWriter {
public: public:
NullOptimizerWriter(double error) {} NullOptimizerWriter(double error) {} ///Contructor
virtual ~NullOptimizerWriter() {} virtual ~NullOptimizerWriter() {}
virtual void write(double error) {} virtual void write(double error) {} ///Capturing the values of the parameters after the optimization
}; }; ///
/** /**
* The class NonlinearOptimizer encapsulates an optimization state. * The class NonlinearOptimizer encapsulates an optimization state.
@ -62,13 +62,13 @@ class NonlinearOptimizer {
public: public:
// For performance reasons in recursion, we store values in a shared_ptr // For performance reasons in recursion, we store values in a shared_ptr
typedef boost::shared_ptr<const T> shared_values; typedef boost::shared_ptr<const T> shared_values; ///Prevent memory leaks in Values
typedef boost::shared_ptr<const G> shared_graph; typedef boost::shared_ptr<const G> shared_graph; /// Prevent memory leaks in Graph
typedef boost::shared_ptr<L> shared_linear; typedef boost::shared_ptr<L> shared_linear; /// Not sure
typedef boost::shared_ptr<const Ordering> shared_ordering; typedef boost::shared_ptr<const Ordering> shared_ordering; ///ordering parameters
typedef boost::shared_ptr<GS> shared_solver; typedef boost::shared_ptr<GS> shared_solver; /// Solver
typedef NonlinearOptimizationParameters Parameters; typedef NonlinearOptimizationParameters Parameters; ///These take the parameters defined in NonLinearOptimizationParameters.h
typedef boost::shared_ptr<const Parameters> shared_parameters ; typedef boost::shared_ptr<const Parameters> shared_parameters ; ///
typedef boost::shared_ptr<VariableIndex> shared_structure; // TODO: make this const typedef boost::shared_ptr<VariableIndex> shared_structure; // TODO: make this const
private: private:
@ -76,8 +76,8 @@ private:
typedef NonlinearOptimizer<G, T, L, GS> This; typedef NonlinearOptimizer<G, T, L, GS> This;
typedef boost::shared_ptr<const std::vector<size_t> > shared_dimensions; typedef boost::shared_ptr<const std::vector<size_t> > shared_dimensions;
// keep a reference to const version of the graph /// keep a reference to const version of the graph
// These normally do not change /// These normally do not change
const shared_graph graph_; const shared_graph graph_;
// keep a values structure and its error // keep a values structure and its error
@ -234,12 +234,12 @@ public:
*/ */
NonlinearOptimizer iterate() const; NonlinearOptimizer iterate() const;
/** ///
* Optimize a solution for a non linear factor graph ///Optimize a solution for a non linear factor graph
* @param relativeTreshold ///param relativeTreshold
* @param absoluteTreshold ///@param absoluteTreshold
* @param verbosity Integer specifying how much output to provide ///@param verbosity Integer specifying how much output to provide
*/ ///
// suggested interface // suggested interface
NonlinearOptimizer gaussNewton() const; NonlinearOptimizer gaussNewton() const;
@ -252,31 +252,31 @@ public:
*/ */
NonlinearOptimizer iterateLM(); NonlinearOptimizer iterateLM();
/** ///
* Optimize using Levenberg-Marquardt. Really Levenberg's ///Optimize using Levenberg-Marquardt. Really Levenberg's
* algorithm at this moment, as we just add I*\lambda to Hessian ///algorithm at this moment, as we just add I*\lambda to Hessian
* H'H. The probabilistic explanation is very simple: every ///H'H. The probabilistic explanation is very simple: every
* variable gets an extra Gaussian prior that biases staying at ///variable gets an extra Gaussian prior that biases staying at
* current value, with variance 1/lambda. This is done very easily ///current value, with variance 1/lambda. This is done very easily
* (but perhaps wastefully) by adding a prior factor for each of ///(but perhaps wastefully) by adding a prior factor for each of
* the variables, after linearization. ///the variables, after linearization.
* ///
* @param relativeThreshold ///@param relativeThreshold
* @param absoluteThreshold ///@param absoluteThreshold
* @param verbosity Integer specifying how much output to provide ///@param verbosity Integer specifying how much output to provide
* @param lambdaFactor Factor by which to decrease/increase lambda ///@param lambdaFactor Factor by which to decrease/increase lambda
*/ ///
NonlinearOptimizer levenbergMarquardt(); NonlinearOptimizer levenbergMarquardt();
// static interfaces to LM and GN optimization techniques // static interfaces to LM and GN optimization techniques
/** ///
* Static interface to LM optimization using default ordering and thresholds ///Static interface to LM optimization using default ordering and thresholds
* @param graph Nonlinear factor graph to optimize ///@param graph Nonlinear factor graph to optimize
* @param values Initial values ///@param values Initial values
* @param verbosity Integer specifying how much output to provide ///@param verbosity Integer specifying how much output to provide
* @return an optimized values structure ///@return an optimized values structure
*/ ///
static shared_values optimizeLM(shared_graph graph, static shared_values optimizeLM(shared_graph graph,
shared_values values, shared_values values,
shared_parameters parameters = boost::make_shared<Parameters>()) { shared_parameters parameters = boost::make_shared<Parameters>()) {
@ -317,13 +317,13 @@ public:
verbosity); verbosity);
} }
/** ///
* Static interface to GN optimization using default ordering and thresholds ///Static interface to GN optimization using default ordering and thresholds
* @param graph Nonlinear factor graph to optimize ///@param graph Nonlinear factor graph to optimize
* @param values Initial values ///@param values Initial values
* @param verbosity Integer specifying how much output to provide ///@param verbosity Integer specifying how much output to provide
* @return an optimized values structure ///@return an optimized values structure
*/ ///
static shared_values optimizeGN(shared_graph graph, static shared_values optimizeGN(shared_graph graph,
shared_values values, shared_values values,
shared_parameters parameters = boost::make_shared<Parameters>()) { shared_parameters parameters = boost::make_shared<Parameters>()) {