linearize now always returns a shared pointer

release/4.3a0
Frank Dellaert 2010-02-21 21:17:47 +00:00
parent 517c82f62f
commit 694f6e4219
12 changed files with 46 additions and 50 deletions

View File

@ -278,7 +278,7 @@ namespace gtsam {
* linearize the non-linear graph around the current config
*/
boost::shared_ptr<GaussianFactorGraph> linearize(const NonlinearGraph& g, const Config& config) const {
return g.linearize_(config);
return g.linearize(config);
}
};
}

View File

@ -67,9 +67,10 @@ namespace gtsam {
/** Create a Bayes Tree from a nonlinear factor graph */
template<class Conditional, class Config>
ISAM2<Conditional, Config>::ISAM2(const NonlinearFactorGraph<Config>& nlfg, const Ordering& ordering, const Config& config)
: BayesTree<Conditional>(nlfg.linearize(config).eliminate(ordering)), theta_(config), thetaFuture_(config), nonlinearFactors_(nlfg) {
: BayesTree<Conditional>(nlfg.linearize(config)->eliminate(ordering)), theta_(config), thetaFuture_(config), nonlinearFactors_(nlfg) {
// todo: repeats calculation above, just to set "cached"
_eliminate_const(nlfg.linearize(config), cached_, ordering);
// De-referencing shared pointer can be quite expensive because creates temporary
_eliminate_const(*nlfg.linearize(config), cached_, ordering);
}
/* ************************************************************************* */
@ -90,7 +91,8 @@ namespace gtsam {
// retrieve all factors that ONLY contain the affected variables
// (note that the remaining stuff is summarized in the cached factors)
template<class Conditional, class Config>
FactorGraph<GaussianFactor> ISAM2<Conditional, Config>::relinearizeAffectedFactors(const set<Symbol>& affectedKeys) const {
boost::shared_ptr<GaussianFactorGraph> ISAM2<Conditional, Config>::relinearizeAffectedFactors
(const set<Symbol>& affectedKeys) const {
list<Symbol> affectedKeysList; // todo: shouldn't have to convert back to list...
affectedKeysList.insert(affectedKeysList.begin(), affectedKeys.begin(), affectedKeys.end());
@ -110,6 +112,7 @@ namespace gtsam {
nonlinearAffectedFactors.push_back(nonlinearFactors_[idx]);
}
// TODO: temporary might be expensive, return shared pointer ?
return nonlinearAffectedFactors.linearize(theta_);
}
@ -190,7 +193,7 @@ namespace gtsam {
//// 6 - find factors connected to affected variables
//// 7 - linearize
FactorGraph<GaussianFactor> factors;
boost::shared_ptr<GaussianFactorGraph> factors;
if (relinFromLast) {
// ordering provides all keys in conditionals, there cannot be others because path to root included
@ -207,25 +210,26 @@ namespace gtsam {
factors = relinearizeAffectedFactors(affectedKeys);
// Save number of affected factors
lastAffectedFactorCount = factors.size();
lastAffectedFactorCount = factors->size();
// add the cached intermediate results from the boundary of the orphans ...
FactorGraph<GaussianFactor> cachedBoundary = getCachedBoundaryFactors(orphans);
factors.push_back(cachedBoundary);
factors->push_back(cachedBoundary);
} else {
// reuse the old factors
FactorGraph<GaussianFactor> tmp(affectedBayesNet);
factors.push_back(tmp);
factors.push_back(newFactors.linearize(theta_));
factors.reset(new GaussianFactorGraph);
factors->push_back(tmp);
factors->push_back(*newFactors.linearize(theta_)); // avoid temporary ?
}
//// 8 - eliminate and add orphans back in
// create an ordering for the new and contaminated factors
Ordering ordering = factors.getOrdering();
Ordering ordering = factors->getOrdering();
// eliminate into a Bayes net
BayesNet<Conditional> bayesNet = _eliminate(factors, cached_, ordering);
BayesNet<Conditional> bayesNet = _eliminate(*factors, cached_, ordering);
// Create Index from ordering
IndexTable<Symbol> index(ordering);

View File

@ -96,7 +96,7 @@ namespace gtsam {
private:
std::list<int> getAffectedFactors(const std::list<Symbol>& keys) const;
FactorGraph<GaussianFactor> relinearizeAffectedFactors(const std::set<Symbol>& affectedKeys) const;
boost::shared_ptr<GaussianFactorGraph> relinearizeAffectedFactors(const std::set<Symbol>& affectedKeys) const;
FactorGraph<GaussianFactor> getCachedBoundaryFactors(Cliques& orphans);
}; // ISAM2

View File

@ -42,24 +42,22 @@ namespace gtsam {
/* ************************************************************************* */
template<class Config>
boost::shared_ptr<GaussianFactorGraph> NonlinearFactorGraph<Config>::linearize_(
boost::shared_ptr<GaussianFactorGraph> NonlinearFactorGraph<Config>::linearize(
const Config& config) const{
// create an empty linear FG
boost::shared_ptr<GaussianFactorGraph> linearFG(new GaussianFactorGraph);
boost::shared_ptr<GaussianFactorGraph> linearFG(new GaussianFactorGraph);
// linearize all factors
BOOST_FOREACH(typename NonlinearFactorGraph<Config>::sharedFactor factor, this->factors_) {
typedef typename NonlinearFactorGraph<Config>::sharedFactor Factor;
BOOST_FOREACH(const Factor& factor, this->factors_) {
boost::shared_ptr<GaussianFactor> lf = factor->linearize(config);
linearFG->push_back(lf);
}
return linearFG;
}
/* ************************************************************************* */
template<class Config>
GaussianFactorGraph NonlinearFactorGraph<Config>::linearize(
const Config& config) const {
return *linearize_(config);
}
} // namespace gtsam

View File

@ -49,13 +49,8 @@ namespace gtsam {
/**
* linearize a nonlinear factor graph
*/
GaussianFactorGraph linearize(const Config& config) const;
/**
* shared pointer versions for MATLAB
*/
boost::shared_ptr<GaussianFactorGraph>
linearize_(const Config& config) const;
linearize(const Config& config) const;
};

View File

@ -52,16 +52,18 @@ namespace gtsam {
/* ************************************************************************* */
template<class G, class T>
boost::shared_ptr<SubgraphPreconditioner> SubgraphPCG<G, T>::linearize(const G& g, const T& theta_bar) const {
SubgraphPreconditioner::sharedFG Ab1 = T_.linearize_(theta_bar);
SubgraphPreconditioner::sharedFG Ab2 = C_.linearize_(theta_bar);
SubgraphPreconditioner::sharedFG Ab1 = T_.linearize(theta_bar);
SubgraphPreconditioner::sharedFG Ab2 = C_.linearize(theta_bar);
#ifdef TIMING
SubgraphPreconditioner::sharedBayesNet Rc1;
SubgraphPreconditioner::sharedConfig xbar;
#else
GaussianFactorGraph sacrificialAb1 = T_.linearize(theta_bar); // duplicate !!!!!
GaussianFactorGraph sacrificialAb1 = *Ab1; // duplicate !!!!!
SubgraphPreconditioner::sharedBayesNet Rc1 = sacrificialAb1.eliminate_(*ordering_);
SubgraphPreconditioner::sharedConfig xbar = gtsam::optimize_(*Rc1);
#endif
// TODO: there does not seem to be a good reason to have Ab1_
// It seems only be used to provide an ordering for creating sparse matrices
return boost::shared_ptr<SubgraphPreconditioner>(new SubgraphPreconditioner(Ab1, Ab2, Rc1, xbar));
}

View File

@ -266,8 +266,7 @@ namespace example {
Config poses;
boost::tie(nlfg, poses) = createNonlinearSmoother(T);
GaussianFactorGraph lfg = nlfg.linearize(poses);
return lfg;
return *nlfg.linearize(poses);
}
/* ************************************************************************* */
@ -556,9 +555,7 @@ namespace example {
}
// linearize around zero
GaussianFactorGraph A = nlfg.linearize(zeros);
return make_pair(A, xtrue);
return make_pair(*nlfg.linearize(zeros), xtrue);
}
/* ************************************************************************* */

View File

@ -122,8 +122,8 @@ TEST( Iterative, conjugateGradientDescent_soft_constraint )
zeros.insert("x1",zero(3));
zeros.insert("x2",zero(3));
GaussianFactorGraph fg = graph.linearize(config);
VectorConfig actual = conjugateGradientDescent(fg, zeros, verbose, 1e-3, 1e-5, 100);
boost::shared_ptr<GaussianFactorGraph> fg = graph.linearize(config);
VectorConfig actual = conjugateGradientDescent(*fg, zeros, verbose, 1e-3, 1e-5, 100);
VectorConfig expected;
expected.insert("x1", zero(3));
@ -157,10 +157,10 @@ TEST( Iterative, subgraphPCG )
graph.split<Key, Pose2Factor>(tree, T, C);
// build the subgraph PCG system
GaussianFactorGraph Ab1_ = T.linearize(theta_bar);
SubgraphPreconditioner::sharedFG Ab1 = T.linearize_(theta_bar);
SubgraphPreconditioner::sharedFG Ab2 = C.linearize_(theta_bar);
SubgraphPreconditioner::sharedBayesNet Rc1 = Ab1_.eliminate_(ordering);
boost::shared_ptr<GaussianFactorGraph> Ab1_ = T.linearize(theta_bar);
SubgraphPreconditioner::sharedFG Ab1 = T.linearize(theta_bar);
SubgraphPreconditioner::sharedFG Ab2 = C.linearize(theta_bar);
SubgraphPreconditioner::sharedBayesNet Rc1 = Ab1_->eliminate_(ordering);
SubgraphPreconditioner::sharedConfig xbar = optimize_(*Rc1);
SubgraphPreconditioner system(Ab1, Ab2, Rc1, xbar);

View File

@ -73,9 +73,9 @@ TEST( Graph, linearize )
{
Graph fg = createNonlinearFactorGraph();
Config initial = createNoisyConfig();
GaussianFactorGraph linearized = fg.linearize(initial);
boost::shared_ptr<GaussianFactorGraph> linearized = fg.linearize(initial);
GaussianFactorGraph expected = createGaussianFactorGraph();
CHECK(assert_equal(expected,linearized)); // Needs correct linearizations
CHECK(assert_equal(expected,*linearized)); // Needs correct linearizations
}
/* ************************************************************************* */

View File

@ -63,8 +63,8 @@ TEST( Pose2Graph, linearization )
config.insert(1,p1);
config.insert(2,p2);
// Linearize
GaussianFactorGraph lfg_linearized = graph.linearize(config);
//lfg_linearized.print("lfg_actual");
boost::shared_ptr<GaussianFactorGraph> lfg_linearized = graph.linearize(config);
//lfg_linearized->print("lfg_actual");
// the expected linear factor
GaussianFactorGraph lfg_expected;
@ -83,7 +83,7 @@ TEST( Pose2Graph, linearization )
SharedDiagonal probModel1 = noiseModel::Unit::Create(3);
lfg_expected.add("x1", A1, "x2", A2, b, probModel1);
CHECK(assert_equal(lfg_expected, lfg_linearized));
CHECK(assert_equal(lfg_expected, *lfg_linearized));
}
/* ************************************************************************* */

View File

@ -430,16 +430,16 @@ TEST (SQP, two_pose ) {
Config2D state(*initialEstimate);
// linearize the graph
GaussianFactorGraph fg = graph->linearize(state);
boost::shared_ptr<GaussianFactorGraph> fg = graph->linearize(state);
if (verbose) fg.print("Linearized graph");
if (verbose) fg->print("Linearized graph");
// create an ordering
Ordering ordering;
ordering += "x1", "x2", "l1", "l2", "L1";
// optimize linear graph to get full delta config
GaussianBayesNet cbn = fg.eliminate(ordering);
GaussianBayesNet cbn = fg->eliminate(ordering);
if (verbose) cbn.print("ChordalBayesNet");
VectorConfig delta = optimize(cbn); //fg.optimize(ordering);

View File

@ -62,8 +62,8 @@ TEST( ProjectionFactor, error )
graph.push_back(factor);
GaussianFactorGraph expected_lfg;
expected_lfg.push_back(actual);
GaussianFactorGraph actual_lfg = graph.linearize(config);
CHECK(assert_equal(expected_lfg,actual_lfg));
boost::shared_ptr<GaussianFactorGraph> actual_lfg = graph.linearize(config);
CHECK(assert_equal(expected_lfg,*actual_lfg));
// expmap on a config
VectorConfig delta;