(in branch) first pass at Dogleg in ISAM2, bug(s) remaining
parent
dace9e213c
commit
f3de9e425f
|
@ -36,7 +36,7 @@
|
|||
namespace gtsam {
|
||||
|
||||
// Forward declaration of BayesTreeClique which is defined below BayesTree in this file
|
||||
template<class CONDITIONAL> class BayesTreeClique;
|
||||
template<class CONDITIONAL> struct BayesTreeClique;
|
||||
|
||||
/**
|
||||
* Bayes tree
|
||||
|
|
|
@ -39,7 +39,8 @@ namespace gtsam {
|
|||
* possible because all cliques in a BayesTree are the same type - if they
|
||||
* were not then we'd need a virtual class.
|
||||
*
|
||||
* @tparam The derived clique type.
|
||||
* @tparam DERIVED The derived clique type.
|
||||
* @tparam CONDITIONAL The conditional type.
|
||||
*/
|
||||
template<class DERIVED, class CONDITIONAL>
|
||||
struct BayesTreeCliqueBase {
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
#include <boost/foreach.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/make_shared.hpp>
|
||||
#include <boost/lambda/bind.hpp>
|
||||
#include <boost/lambda/lambda.hpp>
|
||||
#include <boost/tuple/tuple.hpp>
|
||||
#include <boost/bind.hpp>
|
||||
|
||||
|
@ -38,8 +36,6 @@
|
|||
|
||||
using namespace std;
|
||||
|
||||
using namespace boost::lambda;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -25,7 +25,7 @@ VectorValues DoglegOptimizerImpl::ComputeDoglegPoint(
|
|||
return x_d;
|
||||
} else if(DeltaSq < x_n_norm_sq) {
|
||||
// Trust region boundary is between steepest descent point and Newton's method point
|
||||
return ComputeBlend(Delta, dx_u, dx_n);
|
||||
return ComputeBlend(Delta, dx_u, dx_n, verbose);
|
||||
} else {
|
||||
assert(DeltaSq >= x_n_norm_sq);
|
||||
if(verbose) cout << "In pure Newton's method region" << endl;
|
||||
|
|
|
@ -155,7 +155,7 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
|
|||
bool stay = true;
|
||||
while(stay) {
|
||||
// Compute dog leg point
|
||||
result.dx_d = ComputeDoglegPoint(Delta, dx_u, dx_n);
|
||||
result.dx_d = ComputeDoglegPoint(Delta, dx_u, dx_n, verbose);
|
||||
|
||||
if(verbose) cout << "Delta = " << Delta << ", dx_d_norm = " << result.dx_d.vector().norm() << endl;
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
#include <gtsam/inference/FactorGraph.h>
|
||||
#include <gtsam/linear/JacobianFactor.h>
|
||||
#include <gtsam/nonlinear/GaussianISAM2.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
@ -37,7 +38,24 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
VectorValues gradient(const BayesTree<GaussianConditional, ISAM2Clique<GaussianConditional> >& bayesTree, const VectorValues& x0) {
|
||||
return gradient(FactorGraph<JacobianFactor>(bayesTree));
|
||||
return gradient(FactorGraph<JacobianFactor>(bayesTree), x0);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
static void gradientAtZeroTreeAdder(const boost::shared_ptr<ISAM2Clique<GaussianConditional> >& root, VectorValues& g) {
|
||||
// Loop through variables in each clique, adding contributions
|
||||
int variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = root->conditional()->begin(); jit != root->conditional()->end(); ++jit) {
|
||||
const int dim = root->conditional()->dim(jit);
|
||||
g[*jit] += root->gradientContribution().segment(variablePosition, dim);
|
||||
variablePosition += dim;
|
||||
}
|
||||
|
||||
// Recursively add contributions from children
|
||||
typedef boost::shared_ptr<ISAM2Clique<GaussianConditional> > sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& child, root->children()) {
|
||||
gradientAtZeroTreeAdder(child, g);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -46,16 +64,7 @@ namespace gtsam {
|
|||
g.setZero();
|
||||
|
||||
// Sum up contributions for each clique
|
||||
typedef boost::shared_ptr<ISAM2Clique<GaussianConditional> > sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& clique, bayesTree.nodes()) {
|
||||
// Loop through variables in each clique, adding contributions
|
||||
int variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = clique->conditional()->beginFrontals(); jit != clique->conditional()->endFrontals(); ++jit) {
|
||||
const int dim = clique->conditional()->dim(jit);
|
||||
x0[*jit] += clique->gradientContribution().segment(variablePosition, dim);
|
||||
variablePosition += dim;
|
||||
}
|
||||
}
|
||||
gradientAtZeroTreeAdder(bayesTree.root(), g);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -92,7 +92,7 @@ VectorValues gradient(const BayesTree<GaussianConditional>& bayesTree, const Vec
|
|||
* @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues
|
||||
* @return The gradient as a VectorValues
|
||||
*/
|
||||
VectorValues gradientAtZero(const BayesTree<GaussianConditional>& bayesTree, VectorValues& g);
|
||||
void gradientAtZero(const BayesTree<GaussianConditional>& bayesTree, VectorValues& g);
|
||||
|
||||
/**
|
||||
* Compute the gradient of the energy function,
|
||||
|
@ -118,7 +118,7 @@ VectorValues gradient(const BayesTree<GaussianConditional, ISAM2Clique<GaussianC
|
|||
* @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues
|
||||
* @return The gradient as a VectorValues
|
||||
*/
|
||||
VectorValues gradientAtZero(const BayesTree<GaussianConditional, ISAM2Clique<GaussianConditional> >& bayesTree, VectorValues& g);
|
||||
void gradientAtZero(const BayesTree<GaussianConditional, ISAM2Clique<GaussianConditional> >& bayesTree, VectorValues& g);
|
||||
|
||||
}/// namespace gtsam
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ using namespace boost::assign;
|
|||
#include <gtsam/linear/HessianFactor.h>
|
||||
|
||||
#include <gtsam/nonlinear/ISAM2-impl-inl.h>
|
||||
|
||||
#include <gtsam/nonlinear/DoglegOptimizerImpl.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -42,12 +42,18 @@ static const bool structuralLast = false;
|
|||
/* ************************************************************************* */
|
||||
template<class CONDITIONAL, class VALUES, class GRAPH>
|
||||
ISAM2<CONDITIONAL, VALUES, GRAPH>::ISAM2(const ISAM2Params& params):
|
||||
delta_(Permutation(), deltaUnpermuted_), params_(params) {}
|
||||
delta_(Permutation(), deltaUnpermuted_), params_(params) {
|
||||
if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams))
|
||||
doglegDelta_ = boost::get<ISAM2DoglegParams>(params_.optimizationParams).initialDelta;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class CONDITIONAL, class VALUES, class GRAPH>
|
||||
ISAM2<CONDITIONAL, VALUES, GRAPH>::ISAM2():
|
||||
delta_(Permutation(), deltaUnpermuted_) {}
|
||||
delta_(Permutation(), deltaUnpermuted_) {
|
||||
if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams))
|
||||
doglegDelta_ = boost::get<ISAM2DoglegParams>(params_.optimizationParams).initialDelta;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class CONDITIONAL, class VALUES, class GRAPH>
|
||||
|
@ -518,25 +524,37 @@ ISAM2Result ISAM2<CONDITIONAL, VALUES, GRAPH>::update(
|
|||
|
||||
tic(9,"solve");
|
||||
// 9. Solve
|
||||
if (params_.wildfireThreshold <= 0.0 || disableReordering) {
|
||||
VectorValues newDelta(theta_.dims(ordering_));
|
||||
optimize2(this->root(), newDelta);
|
||||
if(debug) newDelta.print("newDelta: ");
|
||||
assert(newDelta.size() == delta_.size());
|
||||
delta_.permutation() = Permutation::Identity(delta_.size());
|
||||
delta_.container() = newDelta;
|
||||
lastBacksubVariableCount = theta_.size();
|
||||
} else {
|
||||
vector<bool> replacedKeysMask(variableIndex_.size(), false);
|
||||
if(replacedKeys) {
|
||||
BOOST_FOREACH(const Index var, *replacedKeys) {
|
||||
replacedKeysMask[var] = true; } }
|
||||
lastBacksubVariableCount = optimize2(this->root(), params_.wildfireThreshold, replacedKeysMask, delta_); // modifies delta_
|
||||
if(params_.optimizationParams.type() == typeid(ISAM2GaussNewtonParams)) {
|
||||
const ISAM2GaussNewtonParams& gaussNewtonParams = boost::get<ISAM2GaussNewtonParams>(params_.optimizationParams);
|
||||
if (gaussNewtonParams.wildfireThreshold <= 0.0 || disableReordering) {
|
||||
VectorValues newDelta(theta_.dims(ordering_));
|
||||
optimize2(this->root(), newDelta);
|
||||
if(debug) newDelta.print("newDelta: ");
|
||||
assert(newDelta.size() == delta_.size());
|
||||
delta_.permutation() = Permutation::Identity(delta_.size());
|
||||
delta_.container() = newDelta;
|
||||
lastBacksubVariableCount = theta_.size();
|
||||
} else {
|
||||
vector<bool> replacedKeysMask(variableIndex_.size(), false);
|
||||
if(replacedKeys) {
|
||||
BOOST_FOREACH(const Index var, *replacedKeys) {
|
||||
replacedKeysMask[var] = true; } }
|
||||
lastBacksubVariableCount = optimize2(this->root(), gaussNewtonParams.wildfireThreshold, replacedKeysMask, delta_); // modifies delta_
|
||||
|
||||
#ifndef NDEBUG
|
||||
for(size_t j=0; j<delta_.container().size(); ++j)
|
||||
assert(delta_.container()[j].unaryExpr(&isfinite<double>).all());
|
||||
for(size_t j=0; j<delta_.container().size(); ++j)
|
||||
assert(delta_.container()[j].unaryExpr(&isfinite<double>).all());
|
||||
#endif
|
||||
}
|
||||
} else if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) {
|
||||
const ISAM2DoglegParams& doglegParams = boost::get<ISAM2DoglegParams>(params_.optimizationParams);
|
||||
// Do one Dogleg iteration
|
||||
DoglegOptimizerImpl::IterationResult doglegResult = DoglegOptimizerImpl::Iterate(
|
||||
*doglegDelta_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *this, nonlinearFactors_, theta_, ordering_, nonlinearFactors_.error(theta_), doglegParams.verbose);
|
||||
// Update Delta and linear step
|
||||
doglegDelta_ = doglegResult.Delta;
|
||||
delta_.permutation() = Permutation::Identity(delta_.size()); // Dogleg solves for the full delta so there is no permutation
|
||||
delta_.container() = doglegResult.dx_d; // Copy the VectorValues containing with the linear solution
|
||||
}
|
||||
toc(9,"solve");
|
||||
|
||||
|
@ -578,5 +596,16 @@ VALUES ISAM2<CONDITIONAL, VALUES, GRAPH>::calculateBestEstimate() const {
|
|||
return theta_.retract(delta, ordering_);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class CONDITIONAL, class VALUES, class GRAPH>
|
||||
VectorValues optimize(const ISAM2<CONDITIONAL,VALUES,GRAPH>& isam) {
|
||||
VectorValues delta = *allocateVectorValues(isam);
|
||||
for(Index j=0; j<isam.getDelta().size(); ++j)
|
||||
delta[j] = isam.getDelta()[j];
|
||||
delta.print("delta: ");
|
||||
assert_equal(isam.getDelta().container(), delta);
|
||||
return delta;
|
||||
}
|
||||
|
||||
}
|
||||
/// namespace gtsam
|
||||
|
|
|
@ -22,18 +22,59 @@
|
|||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||
#include <gtsam/inference/BayesTree.h>
|
||||
|
||||
#include <boost/variant.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* @defgroup ISAM2
|
||||
*/
|
||||
|
||||
/**
|
||||
* @ingroup ISAM2
|
||||
* Parameters for ISAM2 using Gauss-Newton optimization. Either this class or
|
||||
* ISAM2DoglegParams should be specified as the optimizationParams in
|
||||
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
|
||||
*/
|
||||
struct ISAM2GaussNewtonParams {
|
||||
double wildfireThreshold; ///< Continue updating the linear delta only when changes are above this threshold (default: 0.001)
|
||||
|
||||
/** Specify parameters as constructor arguments */
|
||||
ISAM2GaussNewtonParams(
|
||||
double _wildfireThreshold = 0.001 ///< see ISAM2GaussNewtonParams public variables, ISAM2GaussNewtonParams::wildfireThreshold
|
||||
) : wildfireThreshold(_wildfireThreshold) {}
|
||||
};
|
||||
|
||||
/**
|
||||
* @ingroup ISAM2
|
||||
* Parameters for ISAM2 using Dogleg optimization. Either this class or
|
||||
* ISAM2GaussNewtonParams should be specified as the optimizationParams in
|
||||
* ISAM2Params, which should in turn be passed to ISAM2(const ISAM2Params&).
|
||||
*/
|
||||
struct ISAM2DoglegParams {
|
||||
double initialDelta; ///< The initial trust region radius for Dogleg
|
||||
bool verbose; ///< Whether Dogleg prints iteration and convergence information
|
||||
|
||||
/** Specify parameters as constructor arguments */
|
||||
ISAM2DoglegParams(
|
||||
double _initialDelta = 1.0, ///< see ISAM2DoglegParams public variables, ISAM2DoglegParams::initialDelta
|
||||
bool _verbose = false ///< see ISAM2DoglegParams public variables, ISAM2DoglegParams::verbose
|
||||
) : initialDelta(_initialDelta), verbose(_verbose) {}
|
||||
};
|
||||
|
||||
/**
|
||||
* @ingroup ISAM2
|
||||
* Parameters for the ISAM2 algorithm. Default parameter values are listed below.
|
||||
*/
|
||||
struct ISAM2Params {
|
||||
double wildfireThreshold; ///< Continue updating the linear delta only when changes are above this threshold (default: 0.001)
|
||||
typedef boost::variant<ISAM2GaussNewtonParams, ISAM2DoglegParams> OptimizationParams; ///< Either ISAM2GaussNewtonParams or ISAM2DoglegParams
|
||||
/** Optimization parameters, this both selects the nonlinear optimization
|
||||
* method and specifies its parameters, either ISAM2GaussNewtonParams or
|
||||
* ISAM2DoglegParams. In the former, Gauss-Newton optimization will be used
|
||||
* with the specified parameters, and in the latter Powell's dog-leg
|
||||
* algorithm will be used with the specified parameters.
|
||||
*/
|
||||
OptimizationParams optimizationParams;
|
||||
double relinearizeThreshold; ///< Only relinearize variables whose linear delta magnitude is greater than this threshold (default: 0.1)
|
||||
int relinearizeSkip; ///< Only relinearize any variables every relinearizeSkip calls to ISAM2::update (default: 10)
|
||||
bool enableRelinearization; ///< Controls whether ISAM2 will ever relinearize any variables (default: true)
|
||||
|
@ -41,12 +82,12 @@ struct ISAM2Params {
|
|||
|
||||
/** Specify parameters as constructor arguments */
|
||||
ISAM2Params(
|
||||
double _wildfireThreshold = 0.001, ///< ISAM2Params::wildfireThreshold
|
||||
double _relinearizeThreshold = 0.1, ///< ISAM2Params::relinearizeThreshold
|
||||
int _relinearizeSkip = 10, ///< ISAM2Params::relinearizeSkip
|
||||
bool _enableRelinearization = true, ///< ISAM2Params::enableRelinearization
|
||||
bool _evaluateNonlinearError = false ///< ISAM2Params::evaluateNonlinearError
|
||||
) : wildfireThreshold(_wildfireThreshold), relinearizeThreshold(_relinearizeThreshold),
|
||||
OptimizationParams _optimizationParams = ISAM2GaussNewtonParams(), ///< see ISAM2Params public variables, ISAM2Params::optimizationParams
|
||||
double _relinearizeThreshold = 0.1, ///< see ISAM2Params public variables, ISAM2Params::relinearizeThreshold
|
||||
int _relinearizeSkip = 10, ///< see ISAM2Params public variables, ISAM2Params::relinearizeSkip
|
||||
bool _enableRelinearization = true, ///< see ISAM2Params public variables, ISAM2Params::enableRelinearization
|
||||
bool _evaluateNonlinearError = false ///< see ISAM2Params public variables, ISAM2Params::evaluateNonlinearError
|
||||
) : optimizationParams(_optimizationParams), relinearizeThreshold(_relinearizeThreshold),
|
||||
relinearizeSkip(_relinearizeSkip), enableRelinearization(_enableRelinearization),
|
||||
evaluateNonlinearError(_evaluateNonlinearError) {}
|
||||
};
|
||||
|
@ -153,8 +194,12 @@ struct ISAM2Clique : public BayesTreeCliqueBase<ISAM2Clique<CONDITIONAL>, CONDIT
|
|||
/** print this node */
|
||||
void print(const std::string& s = "") const {
|
||||
Base::print(s);
|
||||
if(cachedFactor_) cachedFactor_->print(s + "Cached: ");
|
||||
else cout << s << "Cached empty" << endl;
|
||||
if(cachedFactor_)
|
||||
cachedFactor_->print(s + "Cached: ");
|
||||
else
|
||||
cout << s << "Cached empty" << endl;
|
||||
if(gradientContribution_.rows() != 0)
|
||||
gtsam::print(gradientContribution_, "Gradient contribution: ");
|
||||
}
|
||||
|
||||
void permuteWithInverse(const Permutation& inversePermutation) {
|
||||
|
@ -224,6 +269,9 @@ protected:
|
|||
/** The current parameters */
|
||||
ISAM2Params params_;
|
||||
|
||||
/** The current Dogleg Delta (trust region radius) */
|
||||
boost::optional<double> doglegDelta_;
|
||||
|
||||
private:
|
||||
#ifndef NDEBUG
|
||||
std::vector<bool> lastRelinVariables_;
|
||||
|
@ -258,6 +306,7 @@ public:
|
|||
newISAM2->nonlinearFactors_ = nonlinearFactors_;
|
||||
newISAM2->ordering_ = ordering_;
|
||||
newISAM2->params_ = params_;
|
||||
newISAM2->doglegDelta_ = doglegDelta_;
|
||||
#ifndef NDEBUG
|
||||
newISAM2->lastRelinVariables_ = lastRelinVariables_;
|
||||
#endif
|
||||
|
@ -350,6 +399,10 @@ private:
|
|||
|
||||
}; // ISAM2
|
||||
|
||||
/** Get the linear delta for the ISAM2 object, unpermuted the delta returned by ISAM2::getDelta() */
|
||||
template<class CONDITIONAL, class VALUES, class GRAPH>
|
||||
VectorValues optimize(const ISAM2<CONDITIONAL,VALUES,GRAPH>& isam);
|
||||
|
||||
} /// namespace gtsam
|
||||
|
||||
#include <gtsam/nonlinear/ISAM2-inl.h>
|
||||
|
|
|
@ -210,7 +210,7 @@ TEST_UNSAFE(ISAM2, slamlike_solution)
|
|||
SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1));
|
||||
|
||||
// These variables will be reused and accumulate factors and values
|
||||
GaussianISAM2<planarSLAM::Values> isam(ISAM2Params(0.001, 0.0, 0, false));
|
||||
GaussianISAM2<planarSLAM::Values> isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false));
|
||||
planarSLAM::Values fullinit;
|
||||
planarSLAM::Graph fullgraph;
|
||||
|
||||
|
@ -297,9 +297,33 @@ TEST_UNSAFE(ISAM2, slamlike_solution)
|
|||
// Compare solutions
|
||||
EXPECT(isam_check(fullgraph, fullinit, isam));
|
||||
|
||||
// Check gradient at each node
|
||||
typedef GaussianISAM2<planarSLAM::Values>::sharedClique sharedClique;
|
||||
BOOST_FOREACH(const sharedClique& clique, isam.nodes()) {
|
||||
// Compute expected gradient
|
||||
FactorGraph<JacobianFactor> jfg;
|
||||
jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional())));
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(jfg, expectedGradient);
|
||||
// Compare with actual gradients
|
||||
int variablePosition = 0;
|
||||
for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) {
|
||||
const int dim = clique->conditional()->dim(jit);
|
||||
Vector actual = clique->gradientContribution().segment(variablePosition, dim);
|
||||
EXPECT(assert_equal(expectedGradient[*jit], actual));
|
||||
variablePosition += dim;
|
||||
}
|
||||
LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition);
|
||||
}
|
||||
|
||||
// Check gradient
|
||||
VectorValues expectedGradient(*allocateVectorValues(isam));
|
||||
gradient
|
||||
gradientAtZero(FactorGraph<JacobianFactor>(isam), expectedGradient);
|
||||
VectorValues expectedGradient2(gradient(FactorGraph<JacobianFactor>(isam), VectorValues::Zero(expectedGradient)));
|
||||
VectorValues actualGradient(*allocateVectorValues(isam));
|
||||
gradientAtZero(isam, actualGradient);
|
||||
EXPECT(assert_equal(expectedGradient2, expectedGradient));
|
||||
EXPECT(assert_equal(expectedGradient, actualGradient));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -314,7 +338,7 @@ TEST_UNSAFE(ISAM2, clone) {
|
|||
SharedDiagonal brNoise = sharedSigmas(Vector_(2, M_PI/100.0, 0.1));
|
||||
|
||||
// These variables will be reused and accumulate factors and values
|
||||
GaussianISAM2<planarSLAM::Values> isam(ISAM2Params(0.001, 0.0, 0, false, true));
|
||||
GaussianISAM2<planarSLAM::Values> isam(ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true));
|
||||
planarSLAM::Values fullinit;
|
||||
planarSLAM::Graph fullgraph;
|
||||
|
||||
|
|
Loading…
Reference in New Issue