diff --git a/gtsam/nonlinear/DoglegOptimizer.cpp b/gtsam/nonlinear/DoglegOptimizer.cpp index 9a7822c6f..f910c9661 100644 --- a/gtsam/nonlinear/DoglegOptimizer.cpp +++ b/gtsam/nonlinear/DoglegOptimizer.cpp @@ -27,11 +27,13 @@ using namespace std; namespace gtsam { /* ************************************************************************* */ -NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const { +NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimizer::SharedState& _current) const { + + const DoglegState& current = dynamic_cast(*_current); // Linearize graph - const Ordering& ordering = *ordering(current->values); - GaussianFactorGraph::shared_ptr linear = graph_->linearize(current->values, ordering); + const Ordering& ordering = this->ordering(current.values); + GaussianFactorGraph::shared_ptr linear = graph_->linearize(current.values, ordering); // Check whether to use QR bool useQR; @@ -50,24 +52,24 @@ NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimize if(params_->elimination == DoglegParams::MULTIFRONTAL) { GaussianBayesTree::shared_ptr bt = GaussianMultifrontalSolver(*linear, useQR).eliminate(); - result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, *values(), ordering, error(), dlVerbose); + result = DoglegOptimizerImpl::Iterate(current.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bt, *graph_, current.values, ordering, current.error, dlVerbose); } else if(params_->elimination == DoglegParams::SEQUENTIAL) { GaussianBayesNet::shared_ptr bn = GaussianSequentialSolver(*linear, useQR).eliminate(); - result = DoglegOptimizerImpl::Iterate(current->Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, *values(), ordering, error(), dlVerbose); + result = DoglegOptimizerImpl::Iterate(current.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, *graph_, current.values, ordering, current.error, dlVerbose); } else { throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination"); } + // Maybe show output + if(params_->verbosity >= NonlinearOptimizerParams::DELTA) result.dx_d.print("delta"); + // Create new state with new values and new error - DoglegOptimizer::SharedState newState = boost::make_shared(); - - // Update values - newState->values = current->values.retract(result.dx_d, ordering); - + SharedState newState = boost::make_shared(); + newState->values = current.values.retract(result.dx_d, ordering); newState->error = result.f_error; - newState->iterations = current->iterations + 1; + newState->iterations = current.iterations + 1; newState->Delta = result.Delta; return newState; @@ -76,7 +78,7 @@ NonlinearOptimizer::SharedState DoglegOptimizer::iterate(const NonlinearOptimize /* ************************************************************************* */ NonlinearOptimizer::SharedState DoglegOptimizer::initialState(const Values& initialValues) const { SharedState initial = boost::make_shared(); - defaultInitialState(initialValues); + defaultInitialState(initialValues, *initial); initial->Delta = params_->deltaInitial; return initial; } diff --git a/gtsam/nonlinear/DoglegOptimizer.h b/gtsam/nonlinear/DoglegOptimizer.h index 0ef9da536..badd021e1 100644 --- a/gtsam/nonlinear/DoglegOptimizer.h +++ b/gtsam/nonlinear/DoglegOptimizer.h @@ -67,7 +67,7 @@ class DoglegOptimizer : public SuccessiveLinearizationOptimizer { public: - typedef boost::shared_ptr SharedParams; + typedef boost::shared_ptr SharedParams; typedef boost::shared_ptr SharedState; typedef boost::shared_ptr shared_ptr; @@ -83,8 +83,7 @@ public: * @param params The optimization parameters */ DoglegOptimizer(const NonlinearFactorGraph& graph, - const DoglegParams& params = DoglegParams(), - const Ordering& ordering = Ordering()) : + const DoglegParams& params = DoglegParams()) : SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), params_(new DoglegParams(params)) {} @@ -98,7 +97,8 @@ public: */ DoglegOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) : SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), - params_(new DoglegParams()) {} + params_(new DoglegParams()) { + params_->ordering = ordering; } /** Standard constructor, requires a nonlinear factor graph, initial * variable assignments, and optimization parameters. @@ -107,8 +107,7 @@ public: * @param params The optimization parameters */ DoglegOptimizer(const SharedGraph& graph, - const DoglegParams& params = DoglegParams(), - const SharedOrdering& ordering = SharedOrdering()) : + const DoglegParams& params = DoglegParams()) : SuccessiveLinearizationOptimizer(graph), params_(new DoglegParams(params)) {} diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.cpp b/gtsam/nonlinear/GaussNewtonOptimizer.cpp index 3a5a7aee0..8efc6aead 100644 --- a/gtsam/nonlinear/GaussNewtonOptimizer.cpp +++ b/gtsam/nonlinear/GaussNewtonOptimizer.cpp @@ -25,25 +25,28 @@ using namespace std; namespace gtsam { /* ************************************************************************* */ -NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const { +NonlinearOptimizer::SharedState GaussNewtonOptimizer::iterate(const NonlinearOptimizer::SharedState& _current) const { + + const GaussNewtonState& current = dynamic_cast(*_current); // Linearize graph - GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_); + const Ordering& ordering = this->ordering(current.values); + GaussianFactorGraph::shared_ptr linear = graph_->linearize(current.values, ordering); // Check whether to use QR bool useQR; - if(gnParams_->factorization == GaussNewtonParams::LDL) + if(params_->factorization == GaussNewtonParams::LDL) useQR = false; - else if(gnParams_->factorization == GaussNewtonParams::QR) + else if(params_->factorization == GaussNewtonParams::QR) useQR = true; else throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::factorization"); // Optimize VectorValues::shared_ptr delta; - if(gnParams_->elimination == GaussNewtonParams::MULTIFRONTAL) + if(params_->elimination == GaussNewtonParams::MULTIFRONTAL) delta = GaussianMultifrontalSolver(*linear, useQR).optimize(); - else if(gnParams_->elimination == GaussNewtonParams::SEQUENTIAL) + else if(params_->elimination == GaussNewtonParams::SEQUENTIAL) delta = GaussianSequentialSolver(*linear, useQR).optimize(); else throw runtime_error("Optimization parameter is invalid: GaussNewtonParams::elimination"); @@ -51,21 +54,19 @@ NonlinearOptimizer::auto_ptr GaussNewtonOptimizer::iterate() const { // Maybe show output if(params_->verbosity >= NonlinearOptimizerParams::DELTA) delta->print("delta"); - // Update values - SharedValues newValues(new Values(values_->retract(*delta, *ordering_))); - double newError = graph_->error(*newValues); + // Create new state with new values and new error + SharedState newState = boost::make_shared(); + newState->values = current.values.retract(*delta, ordering); + newState->error = graph_->error(newState->values); + newState->iterations = current.iterations + 1; - // Create new optimizer with new values and new error - NonlinearOptimizer::auto_ptr newOptimizer(new GaussNewtonOptimizer( - *this, newValues, newError)); - - return newOptimizer; + return newState; } /* ************************************************************************* */ NonlinearOptimizer::SharedState GaussNewtonOptimizer::initialState(const Values& initialValues) const { SharedState initial = boost::make_shared(); - defaultInitialState(*initial); + defaultInitialState(initialValues, *initial); return initial; } diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.h b/gtsam/nonlinear/GaussNewtonOptimizer.h index 8b1d4fbe6..5e6266feb 100644 --- a/gtsam/nonlinear/GaussNewtonOptimizer.h +++ b/gtsam/nonlinear/GaussNewtonOptimizer.h @@ -39,7 +39,7 @@ class GaussNewtonOptimizer : public SuccessiveLinearizationOptimizer { public: - typedef boost::shared_ptr SharedParams; + typedef boost::shared_ptr SharedParams; /// @name Standard interface /// @{ @@ -53,8 +53,7 @@ public: * @param params The optimization parameters */ GaussNewtonOptimizer(const NonlinearFactorGraph& graph, - const GaussNewtonParams& params = GaussNewtonParams(), - const Ordering& ordering = Ordering()) : + const GaussNewtonParams& params = GaussNewtonParams()) : SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), params_(new GaussNewtonParams(params)) {} @@ -68,7 +67,8 @@ public: */ GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) : SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), - params_(new GaussNewtonParams()) {} + params_(new GaussNewtonParams()) { + params_->ordering = ordering; } /** Standard constructor, requires a nonlinear factor graph, initial * variable assignments, and optimization parameters. @@ -77,8 +77,7 @@ public: * @param params The optimization parameters */ GaussNewtonOptimizer(const SharedGraph& graph, - const GaussNewtonParams& params = GaussNewtonParams(), - const SharedOrdering& ordering = SharedOrdering()) : + const GaussNewtonParams& params = GaussNewtonParams()) : SuccessiveLinearizationOptimizer(graph), params_(new GaussNewtonParams(params)) {} diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp index 5657da572..0e641db29 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp @@ -27,29 +27,36 @@ using namespace std; namespace gtsam { /* ************************************************************************* */ -NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const NonlinearOptimizer::SharedState& current) const { +NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const NonlinearOptimizer::SharedState& _current) const { + + const LevenbergMarquardtState& current = dynamic_cast(*_current); // Linearize graph - GaussianFactorGraph::shared_ptr linear = graph_->linearize(*values_, *ordering_); + const Ordering& ordering = this->ordering(current.values); + GaussianFactorGraph::shared_ptr linear = graph_->linearize(current.values, ordering); // Check whether to use QR bool useQR; - if(lmParams_->factorization == LevenbergMarquardtParams::LDL) + if(params_->factorization == LevenbergMarquardtParams::LDL) useQR = false; - else if(lmParams_->factorization == LevenbergMarquardtParams::QR) + else if(params_->factorization == LevenbergMarquardtParams::QR) useQR = true; else throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::factorization"); // Pull out parameters we'll use const NonlinearOptimizerParams::Verbosity nloVerbosity = params_->verbosity; - const LevenbergMarquardtParams::LMVerbosity lmVerbosity = lmParams_->lmVerbosity; - const double lambdaFactor = lmParams_->lambdaFactor; + const LevenbergMarquardtParams::LMVerbosity lmVerbosity = params_->lmVerbosity; + const double lambdaFactor = params_->lambdaFactor; // Variables to update during try_lambda loop - double lambda = lambda_; - double next_error = error(); - SharedValues next_values = values(); + double lambda = current.lambda; + double next_error = current.error; + Values next_values = current.values; + + // Compute dimensions if we haven't done it yet + if(!dimensions_) + dimensions_.reset(new vector(current.values.dims(ordering))); // Keep increasing lambda until we make make progress while(true) { @@ -79,9 +86,9 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli // Optimize VectorValues::shared_ptr delta; - if(lmParams_->elimination == LevenbergMarquardtParams::MULTIFRONTAL) + if(params_->elimination == LevenbergMarquardtParams::MULTIFRONTAL) delta = GaussianMultifrontalSolver(dampedSystem, useQR).optimize(); - else if(lmParams_->elimination == LevenbergMarquardtParams::SEQUENTIAL) + else if(params_->elimination == LevenbergMarquardtParams::SEQUENTIAL) delta = GaussianSequentialSolver(dampedSystem, useQR).optimize(); else throw runtime_error("Optimization parameter is invalid: LevenbergMarquardtParams::elimination"); @@ -90,15 +97,15 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta->print("delta"); // update values - SharedValues newValues(new Values(values_->retract(*delta, *ordering_))); + Values newValues = current.values.retract(*delta, ordering); // create new optimization state with more adventurous lambda - double error = graph_->error(*newValues); + double error = graph_->error(newValues); if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "next error = " << error << endl; - if( error <= error_ ) { - next_values = newValues; + if(error <= current.error) { + next_values.swap(newValues); next_error = error; lambda /= lambdaFactor; break; @@ -106,7 +113,7 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli // Either we're not cautious, or the same lambda was worse than the current error. // The more adventurous lambda was worse too, so make lambda more conservative // and keep the same values. - if(lambda >= lmParams_->lambdaUpperBound) { + if(lambda >= params_->lambdaUpperBound) { if(nloVerbosity >= NonlinearOptimizerParams::ERROR) cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl; break; @@ -120,7 +127,7 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli // Either we're not cautious, or the same lambda was worse than the current error. // The more adventurous lambda was worse too, so make lambda more conservative // and keep the same values. - if(lambda >= lmParams_->lambdaUpperBound) { + if(lambda >= params_->lambdaUpperBound) { if(nloVerbosity >= NonlinearOptimizerParams::ERROR) cout << "Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" << endl; break; @@ -136,8 +143,8 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli LevenbergMarquardtOptimizer::SharedState newState = boost::make_shared(); newState->values = next_values; newState->error = next_error; - newState->iterations = current->iterations + 1; - newState->Delta = lambda; + newState->iterations = current.iterations + 1; + newState->lambda = lambda; return newState; } @@ -145,7 +152,7 @@ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::iterate(const Nonli /* ************************************************************************* */ NonlinearOptimizer::SharedState LevenbergMarquardtOptimizer::initialState(const Values& initialValues) const { SharedState initial = boost::make_shared(); - defaultInitialState(*initial); + defaultInitialState(initialValues, *initial); initial->lambda = params_->lambdaInitial; return initial; } diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h index 058bf347b..7787825a9 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h @@ -76,7 +76,7 @@ class LevenbergMarquardtOptimizer : public SuccessiveLinearizationOptimizer { public: - typedef boost::shared_ptr SharedParams; + typedef boost::shared_ptr SharedParams; typedef boost::shared_ptr SharedState; typedef boost::shared_ptr shared_ptr; @@ -92,8 +92,7 @@ public: * @param params The optimization parameters */ LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, - const LevenbergMarquardtParams& params = LevenbergMarquardtParams(), - const Ordering& ordering = Ordering()) : + const LevenbergMarquardtParams& params = LevenbergMarquardtParams()) : SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), params_(new LevenbergMarquardtParams(params)) {} @@ -105,10 +104,10 @@ public: * @param values The initial variable assignments * @param params The optimization parameters */ - LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, - const Ordering& ordering) : + LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const Ordering& ordering) : SuccessiveLinearizationOptimizer(SharedGraph(new NonlinearFactorGraph(graph))), - params_(new LevenbergMarquardtParams()) {} + params_(new LevenbergMarquardtParams()) { + params_->ordering = ordering; } /** Standard constructor, requires a nonlinear factor graph, initial * variable assignments, and optimization parameters. @@ -117,8 +116,7 @@ public: * @param params The optimization parameters */ LevenbergMarquardtOptimizer(const SharedGraph& graph, - const LevenbergMarquardtParams& params = LevenbergMarquardtParams(), - const SharedOrdering& ordering = SharedOrdering()) : + const LevenbergMarquardtParams& params = LevenbergMarquardtParams()) : SuccessiveLinearizationOptimizer(graph), params_(new LevenbergMarquardtParams(params)) {} @@ -151,7 +149,7 @@ protected: typedef boost::shared_ptr > SharedDimensions; SharedParams params_; - const SharedDimensions dimensions_; + mutable SharedDimensions dimensions_; // Mutable because we compute it when needed and cache it }; } diff --git a/gtsam/nonlinear/NonlinearOptimizer.cpp b/gtsam/nonlinear/NonlinearOptimizer.cpp index cfe8a4945..ef40aeb78 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearOptimizer.cpp @@ -24,25 +24,11 @@ using namespace std; namespace gtsam { -/* ************************************************************************* */ -NonlinearOptimizer::shared_ptr NonlinearOptimizer::update(const NonlinearOptimizer::SharedGraph& newGraph) const { - shared_ptr result(this->clone()); - result->graph_ = newGraph; - return result; -} - -/* ************************************************************************* */ -NonlinearOptimizer::shared_ptr NonlinearOptimizer::update(const NonlinearOptimizer::SharedParams& newParams) const { - shared_ptr result(this->clone()); - result->params_ = newParams; - return result; -} - /* ************************************************************************* */ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const SharedState& initial) const { const SharedParams& params = this->params(); - double currentError = initial->error(); + double currentError = initial->error; // check if we're already close enough if(currentError <= params->errorTol) { @@ -52,11 +38,11 @@ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const Shared } // Maybe show output - if (params->verbosity >= NonlinearOptimizerParams::VALUES) this->values()->print("Initial values"); - if (params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "Initial error: " << this->error() << endl; + if (params->verbosity >= NonlinearOptimizerParams::VALUES) initial->values.print("Initial values"); + if (params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "Initial error: " << initial->error << endl; // Return if we already have too many iterations - if(this->iterations() >= params->maxIterations) + if(initial->iterations >= params->maxIterations) return initial; // Iterative loop @@ -67,8 +53,8 @@ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const Shared next = this->iterate(next); // Maybe show output - if (params->verbosity >= NonlinearOptimizerParams::VALUES) next->values->print("newValues"); - if (params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "newError: " << next->error << endl; + if(params->verbosity >= NonlinearOptimizerParams::VALUES) next->values.print("newValues"); + if(params->verbosity >= NonlinearOptimizerParams::ERROR) cout << "newError: " << next->error << endl; } while(next->iterations < params->maxIterations && !checkConvergence(params->relativeErrorTol, params->absoluteErrorTol, params->errorTol, currentError, next->error, params->verbosity)); @@ -83,10 +69,10 @@ NonlinearOptimizer::SharedState NonlinearOptimizer::defaultOptimize(const Shared } /* ************************************************************************* */ -void NonlinearOptimizer::defaultInitialState(NonlinearOptimizerState& initial) const { - state.values = initialValues; - state.error = graph_->error(initialValues); - state.iterations = 0; +void NonlinearOptimizer::defaultInitialState(const Values& initialValues, NonlinearOptimizerState& initialState) const { + initialState.values = initialValues; + initialState.error = graph_->error(initialValues); + initialState.iterations = 0; } /* ************************************************************************* */ diff --git a/gtsam/nonlinear/NonlinearOptimizer.h b/gtsam/nonlinear/NonlinearOptimizer.h index c4f8be2fc..3b1e1dc58 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.h +++ b/gtsam/nonlinear/NonlinearOptimizer.h @@ -36,7 +36,7 @@ public: LINEAR }; - int maxIterations; ///< The maximum iterations to stop iterating (default 100) + size_t maxIterations; ///< The maximum iterations to stop iterating (default 100) double relativeErrorTol; ///< The maximum relative error decrease to stop iterating (default 1e-5) double absoluteErrorTol; ///< The maximum absolute error decrease to stop iterating (default 1e-5) double errorTol; ///< The maximum total error to stop iterating (default 0.0) @@ -80,9 +80,6 @@ public: /** Virtual destructor */ virtual ~NonlinearOptimizerState() {} - - /** Clone the state (i.e. make a copy of the derived class) */ - virtual boost::shared_ptr clone() = 0; }; @@ -233,7 +230,7 @@ protected: SharedState defaultOptimize(const SharedState& initial) const; /** Initialize a state, using the current error and 0 iterations */ - void defaultInitialState(SharedState& initial) const; + void defaultInitialState(const Values& initialValues, NonlinearOptimizerState& initialState) const; /** Constructor for initial construction of base classes. */ diff --git a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp index 2303c203a..120db6d62 100644 --- a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp +++ b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp @@ -22,20 +22,20 @@ namespace gtsam { /* ************************************************************************* */ -const SuccessiveLinearizationOptimizer::SharedOrdering& SuccessiveLinearizationOptimizer::ordering(const Values& values) const { +const Ordering& SuccessiveLinearizationOptimizer::ordering(const Values& values) const { if(!ordering_) { SharedParams params = - boost::dynamic_pointer_cast(params()); + boost::dynamic_pointer_cast(this->params()); // If we're using a COLAMD ordering, compute it - if(!params.ordering) - ordering_ = graph_->orderingCOLAMD(values); + if(params->ordering) + ordering_ = params->ordering; else - ordering_ = params.ordering; + ordering_ = *graph_->orderingCOLAMD(values); } - return ordering_; + return *ordering_; } } /* namespace gtsam */ diff --git a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h index e8c74d1a8..ad5128b07 100644 --- a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h +++ b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h @@ -24,6 +24,9 @@ namespace gtsam { +class SuccessiveLinearizationParams; +class SuccessiveLinearizationState; + /** * This is an intermediate base class for NonlinearOptimizers that use direct * solving, i.e. factorization. This class is here to reduce code duplication @@ -33,23 +36,18 @@ namespace gtsam { class SuccessiveLinearizationOptimizer : public NonlinearOptimizer { public: - class SuccessiveLinearizationParams; - class SuccessiveLinearizationState; - typedef boost::shared_ptr SharedParams; typedef boost::shared_ptr shared_ptr; protected: - typedef boost::shared_ptr SharedOrdering; - SuccessiveLinearizationOptimizer(const SharedGraph& graph) : NonlinearOptimizer(graph) {} - const SharedOrdering& ordering(const Values& values) const; + const Ordering& ordering(const Values& values) const; private: - SharedOrdering ordering_; + mutable boost::optional ordering_; // Mutable because we set/compute it when needed and cache it }; class SuccessiveLinearizationParams : public NonlinearOptimizerParams { @@ -66,12 +64,9 @@ public: QR, }; - /** See SuccessiveLinearizationParams::ordering */ - typedef boost::shared_ptr SharedOrdering; - Elimination elimination; ///< The elimination algorithm to use (default: MULTIFRONTAL) Factorization factorization; ///< The numerical factorization (default: LDL) - SharedOrdering ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty) + boost::optional ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty) SuccessiveLinearizationParams() : elimination(MULTIFRONTAL), factorization(LDL) {}