move ordering into the solver, and the nonlinear optimizer is now exact <G, T, L>

release/4.3a0
Kai Ni 2010-01-20 02:28:23 +00:00
parent 03ec3e3e62
commit 3bf15333af
11 changed files with 193 additions and 185 deletions

View File

@ -228,17 +228,18 @@ namespace gtsam {
*/ */
template <class NonlinearGraph, class Config> template <class NonlinearGraph, class Config>
class Factorization { class Factorization {
public: private:
Factorization() {} boost::shared_ptr<const Ordering> ordering_;
Factorization(const NonlinearGraph& g, const Config& config) {} public:
Factorization(boost::shared_ptr<const Ordering> ordering) : ordering_(ordering) {}
/** /**
* solve for the optimal displacement in the tangent space, and then solve * solve for the optimal displacement in the tangent space, and then solve
* the resulted linear system * the resulted linear system
*/ */
VectorConfig optimize(GaussianFactorGraph& fg, const Ordering& ordering) const { VectorConfig optimize(GaussianFactorGraph& fg) const {
return fg.optimize(ordering); return fg.optimize(*ordering_);
} }
/** /**

View File

@ -48,8 +48,8 @@ namespace gtsam {
/* ************************************************************************* */ /* ************************************************************************* */
template<class G, class C, class L, class S> template<class G, class C, class L, class S>
NonlinearOptimizer<G, C, L, S>::NonlinearOptimizer(shared_graph graph, NonlinearOptimizer<G, C, L, S>::NonlinearOptimizer(shared_graph graph,
shared_ordering ordering, shared_config config, shared_solver solver, double lambda) : shared_config config, shared_solver solver, double lambda) :
graph_(graph), ordering_(ordering), config_(config), error_(graph->error( graph_(graph), config_(config), error_(graph->error(
*config)), lambda_(lambda), solver_(solver) { *config)), lambda_(lambda), solver_(solver) {
} }
@ -59,7 +59,7 @@ namespace gtsam {
template<class G, class C, class L, class S> template<class G, class C, class L, class S>
VectorConfig NonlinearOptimizer<G, C, L, S>::linearizeAndOptimizeForDelta() const { VectorConfig NonlinearOptimizer<G, C, L, S>::linearizeAndOptimizeForDelta() const {
L linearized = solver_->linearize(*graph_, *config_); L linearized = solver_->linearize(*graph_, *config_);
return solver_->optimize(linearized, *ordering_); return solver_->optimize(linearized);
} }
/* ************************************************************************* */ /* ************************************************************************* */
@ -82,7 +82,7 @@ namespace gtsam {
if (verbosity >= CONFIG) if (verbosity >= CONFIG)
newConfig->print("newConfig"); newConfig->print("newConfig");
NonlinearOptimizer newOptimizer = NonlinearOptimizer(graph_, ordering_, newConfig, solver_); NonlinearOptimizer newOptimizer = NonlinearOptimizer(graph_, newConfig, solver_);
if (verbosity >= ERROR) if (verbosity >= ERROR)
cout << "error: " << newOptimizer.error_ << endl; cout << "error: " << newOptimizer.error_ << endl;
@ -128,7 +128,7 @@ namespace gtsam {
damped.print("damped"); damped.print("damped");
// solve // solve
VectorConfig delta = solver_->optimize(damped, *ordering_); VectorConfig delta = solver_->optimize(damped);
if (verbosity >= TRYDELTA) if (verbosity >= TRYDELTA)
delta.print("delta"); delta.print("delta");
@ -138,14 +138,14 @@ namespace gtsam {
newConfig->print("config"); newConfig->print("config");
// create new optimization state with more adventurous lambda // create new optimization state with more adventurous lambda
NonlinearOptimizer next(graph_, ordering_, newConfig, solver_, lambda_ / factor); NonlinearOptimizer next(graph_, newConfig, solver_, lambda_ / factor);
// if error decreased, return the new state // if error decreased, return the new state
if (next.error_ <= error_) if (next.error_ <= error_)
return next; return next;
else { else {
// TODO: can we avoid copying the config ? // TODO: can we avoid copying the config ?
NonlinearOptimizer cautious(graph_, ordering_, config_, solver_, lambda_ * factor); NonlinearOptimizer cautious(graph_, config_, solver_, lambda_ * factor);
return cautious.try_lambda(linear, verbosity, factor); return cautious.try_lambda(linear, verbosity, factor);
} }
} }

View File

@ -33,8 +33,8 @@ namespace gtsam {
// For performance reasons in recursion, we store configs in a shared_ptr // For performance reasons in recursion, we store configs in a shared_ptr
typedef boost::shared_ptr<const T> shared_config; typedef boost::shared_ptr<const T> shared_config;
typedef boost::shared_ptr<const G> shared_graph; typedef boost::shared_ptr<const G> shared_graph;
typedef boost::shared_ptr<const Ordering> shared_ordering;
typedef boost::shared_ptr<const S> shared_solver; typedef boost::shared_ptr<const S> shared_solver;
typedef const S solver;
enum verbosityLevel { enum verbosityLevel {
SILENT, SILENT,
@ -51,10 +51,9 @@ namespace gtsam {
private: private:
// keep a reference to const versions of the graph and ordering // keep a reference to const version of the graph
// These normally do not change // These normally do not change
const shared_graph graph_; const shared_graph graph_;
const shared_ordering ordering_;
// keep a configuration and its error // keep a configuration and its error
// These typically change once per iteration (in a functional way) // These typically change once per iteration (in a functional way)
@ -77,15 +76,14 @@ namespace gtsam {
/** /**
* Constructor * Constructor
*/ */
NonlinearOptimizer(shared_graph graph, shared_ordering ordering, NonlinearOptimizer(shared_graph graph, shared_config config, shared_solver solver,
shared_config config, shared_solver solver = shared_solver(new S),
double lambda = 1e-5); double lambda = 1e-5);
/** /**
* Copy constructor * Copy constructor
*/ */
NonlinearOptimizer(const NonlinearOptimizer<G, T, L, S> &optimizer) : NonlinearOptimizer(const NonlinearOptimizer<G, T, L, S> &optimizer) :
graph_(optimizer.graph_), ordering_(optimizer.ordering_), config_(optimizer.config_), graph_(optimizer.graph_), config_(optimizer.config_),
error_(optimizer.error_), lambda_(optimizer.lambda_), solver_(optimizer.solver_) {} error_(optimizer.error_), lambda_(optimizer.lambda_), solver_(optimizer.solver_) {}
/** /**

View File

@ -59,6 +59,8 @@ namespace gtsam {
Point2 t() const { return t_; } Point2 t() const { return t_; }
Rot2 r() const { return r_; } Rot2 r() const { return r_; }
static inline size_t dim() { return 3; };
}; // Pose2 }; // Pose2

View File

@ -24,7 +24,7 @@ namespace gtsam {
SubgraphPCG<G, T>::SubgraphPCG(const G& g, const T& theta0) : SubgraphPCG<G, T>::SubgraphPCG(const G& g, const T& theta0) :
maxIterations_(100), verbose_(false), epsilon_(1e-4), epsilon_abs_(1e-5) { maxIterations_(100), verbose_(false), epsilon_(1e-4), epsilon_abs_(1e-5) {
// generate spanning tree and create ordering // generate spanning tree
PredecessorMap<Key> tree = g.template findMinimumSpanningTree<Key, Constraint>(); PredecessorMap<Key> tree = g.template findMinimumSpanningTree<Key, Constraint>();
list<Key> keys = predecessorMap2Keys(tree); list<Key> keys = predecessorMap2Keys(tree);
@ -57,10 +57,10 @@ namespace gtsam {
/* ************************************************************************* */ /* ************************************************************************* */
template<class G, class T> template<class G, class T>
VectorConfig SubgraphPCG<G, T>::optimize(SubgraphPreconditioner& system, const Ordering& ordering) const { VectorConfig SubgraphPCG<G, T>::optimize(SubgraphPreconditioner& system) const {
//TODO: 3 is hard coded here //TODO: 3 is hard coded here
VectorConfig zeros; VectorConfig zeros;
BOOST_FOREACH(const Symbol& j, ordering) zeros.insert(j,zero(3)); BOOST_FOREACH(const Symbol& j, *ordering_) zeros.insert(j,zero(Pose::dim()));
// Solve the subgraph PCG // Solve the subgraph PCG
VectorConfig ybar = conjugateGradients<SubgraphPreconditioner, VectorConfig, VectorConfig ybar = conjugateGradients<SubgraphPreconditioner, VectorConfig,

View File

@ -90,9 +90,6 @@ namespace gtsam {
G T_, C_; G T_, C_;
public: public:
// kai: this constructor is for compatible with Factorization
SubgraphPCG() { throw std::runtime_error("SubgraphPCG: this constructor is only for compatibility!");}
SubgraphPCG(const G& g, const T& config); SubgraphPCG(const G& g, const T& config);
boost::shared_ptr<Ordering> ordering() const { return ordering_; } boost::shared_ptr<Ordering> ordering() const { return ordering_; }
@ -108,7 +105,7 @@ namespace gtsam {
* solve for the optimal displacement in the tangent space, and then solve * solve for the optimal displacement in the tangent space, and then solve
* the resulted linear system * the resulted linear system
*/ */
VectorConfig optimize(SubgraphPreconditioner& system, const Ordering& ordering) const; VectorConfig optimize(SubgraphPreconditioner& system) const;
}; };
} // nsamespace gtsam } // nsamespace gtsam

View File

@ -35,152 +35,157 @@ using namespace example;
typedef NonlinearOptimizer<Graph,Config> Optimizer; typedef NonlinearOptimizer<Graph,Config> Optimizer;
/* ************************************************************************* */ /* ************************************************************************* */
//TEST( NonlinearOptimizer, delta ) TEST( NonlinearOptimizer, delta )
//{ {
// shared_ptr<Graph> fg(new Graph( shared_ptr<Graph> fg(new Graph(
// createNonlinearFactorGraph())); createNonlinearFactorGraph()));
// Optimizer::shared_config initial = sharedNoisyConfig(); Optimizer::shared_config initial = sharedNoisyConfig();
//
// // Expected configuration is the difference between the noisy config // Expected configuration is the difference between the noisy config
// // and the ground-truth config. One step only because it's linear ! // and the ground-truth config. One step only because it's linear !
// VectorConfig expected; VectorConfig expected;
// Vector dl1(2); Vector dl1(2);
// dl1(0) = -0.1; dl1(0) = -0.1;
// dl1(1) = 0.1; dl1(1) = 0.1;
// expected.insert("l1", dl1); expected.insert("l1", dl1);
// Vector dx1(2); Vector dx1(2);
// dx1(0) = -0.1; dx1(0) = -0.1;
// dx1(1) = -0.1; dx1(1) = -0.1;
// expected.insert("x1", dx1); expected.insert("x1", dx1);
// Vector dx2(2); Vector dx2(2);
// dx2(0) = 0.1; dx2(0) = 0.1;
// dx2(1) = -0.2; dx2(1) = -0.2;
// expected.insert("x2", dx2); expected.insert("x2", dx2);
//
// // Check one ordering // Check one ordering
// shared_ptr<Ordering> ord1(new Ordering()); shared_ptr<Ordering> ord1(new Ordering());
// *ord1 += "x2","l1","x1"; *ord1 += "x2","l1","x1";
// Optimizer optimizer1(fg, ord1, initial); Optimizer::shared_solver solver;
// VectorConfig actual1 = optimizer1.linearizeAndOptimizeForDelta(); solver = Optimizer::shared_solver(new Optimizer::solver(ord1));
// CHECK(assert_equal(actual1,expected)); Optimizer optimizer1(fg, initial, solver);
// VectorConfig actual1 = optimizer1.linearizeAndOptimizeForDelta();
// // Check another CHECK(assert_equal(actual1,expected));
// shared_ptr<Ordering> ord2(new Ordering());
// *ord2 += "x1","x2","l1"; // Check another
// Optimizer optimizer2(fg, ord2, initial); shared_ptr<Ordering> ord2(new Ordering());
// VectorConfig actual2 = optimizer2.linearizeAndOptimizeForDelta(); *ord2 += "x1","x2","l1";
// CHECK(assert_equal(actual2,expected)); solver = Optimizer::shared_solver(new Optimizer::solver(ord2));
// Optimizer optimizer2(fg, initial, solver);
// // And yet another... VectorConfig actual2 = optimizer2.linearizeAndOptimizeForDelta();
// shared_ptr<Ordering> ord3(new Ordering()); CHECK(assert_equal(actual2,expected));
// *ord3 += "l1","x1","x2";
// Optimizer optimizer3(fg, ord3, initial); // And yet another...
// VectorConfig actual3 = optimizer3.linearizeAndOptimizeForDelta(); shared_ptr<Ordering> ord3(new Ordering());
// CHECK(assert_equal(actual3,expected)); *ord3 += "l1","x1","x2";
//} solver = Optimizer::shared_solver(new Optimizer::solver(ord3));
// Optimizer optimizer3(fg, initial, solver);
///* ************************************************************************* */ VectorConfig actual3 = optimizer3.linearizeAndOptimizeForDelta();
//TEST( NonlinearOptimizer, iterateLM ) CHECK(assert_equal(actual3,expected));
//{ }
// // really non-linear factor graph
// shared_ptr<Graph> fg(new Graph( /* ************************************************************************* */
// createReallyNonlinearFactorGraph())); TEST( NonlinearOptimizer, iterateLM )
// {
// // config far from minimum // really non-linear factor graph
// Point2 x0(3,0); shared_ptr<Graph> fg(new Graph(
// boost::shared_ptr<Config> config(new Config); createReallyNonlinearFactorGraph()));
// config->insert(simulated2D::PoseKey(1), x0);
// // config far from minimum
// // ordering Point2 x0(3,0);
// shared_ptr<Ordering> ord(new Ordering()); boost::shared_ptr<Config> config(new Config);
// ord->push_back("x1"); config->insert(simulated2D::PoseKey(1), x0);
//
// // create initial optimization state, with lambda=0 // ordering
// Optimizer::shared_solver solver(new Factorization< shared_ptr<Ordering> ord(new Ordering());
// Graph, Config> ); ord->push_back("x1");
// Optimizer optimizer(fg, ord, config, solver, 0.);
// // create initial optimization state, with lambda=0
// // normal iterate Optimizer::shared_solver solver(new Optimizer::solver(ord));
// Optimizer iterated1 = optimizer.iterate(); Optimizer optimizer(fg, config, solver, 0.);
//
// // LM iterate with lambda 0 should be the same // normal iterate
// Optimizer iterated2 = optimizer.iterateLM(); Optimizer iterated1 = optimizer.iterate();
//
// // Try successive iterates. TODO: ugly pointers, better way ? // LM iterate with lambda 0 should be the same
// Optimizer *pointer = new Optimizer(iterated2); Optimizer iterated2 = optimizer.iterateLM();
// for (int i=0;i<10;i++) {
// Optimizer* newOptimizer = new Optimizer(pointer->iterateLM()); // Try successive iterates. TODO: ugly pointers, better way ?
// delete pointer; Optimizer *pointer = new Optimizer(iterated2);
// pointer = newOptimizer; for (int i=0;i<10;i++) {
// } Optimizer* newOptimizer = new Optimizer(pointer->iterateLM());
// delete(pointer); delete pointer;
// pointer = newOptimizer;
// CHECK(assert_equal(*iterated1.config(), *iterated2.config(), 1e-9)); }
//} delete(pointer);
//
///* ************************************************************************* */ CHECK(assert_equal(*iterated1.config(), *iterated2.config(), 1e-9));
//TEST( NonlinearOptimizer, optimize ) }
//{
// shared_ptr<Graph> fg(new Graph( /* ************************************************************************* */
// createReallyNonlinearFactorGraph())); TEST( NonlinearOptimizer, optimize )
// {
// // test error at minimum shared_ptr<Graph> fg(new Graph(
// Point2 xstar(0,0); createReallyNonlinearFactorGraph()));
// Config cstar;
// cstar.insert(simulated2D::PoseKey(1), xstar); // test error at minimum
// DOUBLES_EQUAL(0.0,fg->error(cstar),0.0); Point2 xstar(0,0);
// Config cstar;
// // test error at initial = [(1-cos(3))^2 + (sin(3))^2]*50 = cstar.insert(simulated2D::PoseKey(1), xstar);
// Point2 x0(3,3); DOUBLES_EQUAL(0.0,fg->error(cstar),0.0);
// boost::shared_ptr<Config> c0(new Config);
// c0->insert(simulated2D::PoseKey(1), x0); // test error at initial = [(1-cos(3))^2 + (sin(3))^2]*50 =
// DOUBLES_EQUAL(199.0,fg->error(*c0),1e-3); Point2 x0(3,3);
// boost::shared_ptr<Config> c0(new Config);
// // optimize parameters c0->insert(simulated2D::PoseKey(1), x0);
// shared_ptr<Ordering> ord(new Ordering()); DOUBLES_EQUAL(199.0,fg->error(*c0),1e-3);
// ord->push_back("x1");
// double relativeThreshold = 1e-5; // optimize parameters
// double absoluteThreshold = 1e-5; shared_ptr<Ordering> ord(new Ordering());
// ord->push_back("x1");
// // initial optimization state is the same in both cases tested double relativeThreshold = 1e-5;
// Optimizer optimizer(fg, ord, c0); double absoluteThreshold = 1e-5;
//
// // Gauss-Newton // initial optimization state is the same in both cases tested
// Optimizer actual1 = optimizer.gaussNewton(relativeThreshold, Optimizer::shared_solver solver(new Optimizer::solver(ord));
// absoluteThreshold); Optimizer optimizer(fg, c0, solver);
// DOUBLES_EQUAL(0,fg->error(*(actual1.config())),1e-3);
// // Gauss-Newton
// // Levenberg-Marquardt Optimizer actual1 = optimizer.gaussNewton(relativeThreshold,
// Optimizer actual2 = optimizer.levenbergMarquardt(relativeThreshold, absoluteThreshold);
// absoluteThreshold, Optimizer::SILENT); DOUBLES_EQUAL(0,fg->error(*(actual1.config())),1e-3);
// DOUBLES_EQUAL(0,fg->error(*(actual2.config())),1e-3);
//} // Levenberg-Marquardt
// Optimizer actual2 = optimizer.levenbergMarquardt(relativeThreshold,
///* ************************************************************************* */ absoluteThreshold, Optimizer::SILENT);
//TEST( NonlinearOptimizer, Factorization ) DOUBLES_EQUAL(0,fg->error(*(actual2.config())),1e-3);
//{ }
// typedef NonlinearOptimizer<Pose2Graph, Pose2Config, GaussianFactorGraph, Factorization<Pose2Graph, Pose2Config> > Optimizer;
// /* ************************************************************************* */
// boost::shared_ptr<Pose2Config> config(new Pose2Config); TEST( NonlinearOptimizer, Factorization )
// config->insert(1, Pose2(0.,0.,0.)); {
// config->insert(2, Pose2(1.5,0.,0.)); typedef NonlinearOptimizer<Pose2Graph, Pose2Config, GaussianFactorGraph, Factorization<Pose2Graph, Pose2Config> > Optimizer;
//
// boost::shared_ptr<Pose2Graph> graph(new Pose2Graph); boost::shared_ptr<Pose2Config> config(new Pose2Config);
// graph->addPrior(1, Pose2(0.,0.,0.), Isotropic::Sigma(3, 1e-10)); config->insert(1, Pose2(0.,0.,0.));
// graph->addConstraint(1,2, Pose2(1.,0.,0.), Isotropic::Sigma(3, 1)); config->insert(2, Pose2(1.5,0.,0.));
//
// boost::shared_ptr<Ordering> ordering(new Ordering); boost::shared_ptr<Pose2Graph> graph(new Pose2Graph);
// ordering->push_back(Pose2Config::Key(1)); graph->addPrior(1, Pose2(0.,0.,0.), Isotropic::Sigma(3, 1e-10));
// ordering->push_back(Pose2Config::Key(2)); graph->addConstraint(1,2, Pose2(1.,0.,0.), Isotropic::Sigma(3, 1));
//
// Optimizer optimizer(graph, ordering, config); boost::shared_ptr<Ordering> ordering(new Ordering);
// Optimizer optimized = optimizer.iterateLM(); ordering->push_back(Pose2Config::Key(1));
// ordering->push_back(Pose2Config::Key(2));
// Pose2Config expected; Optimizer::shared_solver solver(new Factorization<Pose2Graph, Pose2Config>(ordering));
// expected.insert(1, Pose2(0.,0.,0.));
// expected.insert(2, Pose2(1.,0.,0.)); Optimizer optimizer(graph, config, solver);
// CHECK(assert_equal(expected, *optimized.config(), 1e-5)); Optimizer optimized = optimizer.iterateLM();
//}
Pose2Config expected;
expected.insert(1, Pose2(0.,0.,0.));
expected.insert(2, Pose2(1.,0.,0.));
CHECK(assert_equal(expected, *optimized.config(), 1e-5));
}
/* ************************************************************************* */ /* ************************************************************************* */
TEST( NonlinearOptimizer, SubgraphPCG ) TEST( NonlinearOptimizer, SubgraphPCG )
@ -195,14 +200,10 @@ TEST( NonlinearOptimizer, SubgraphPCG )
graph->addPrior(1, Pose2(0.,0.,0.), Isotropic::Sigma(3, 1e-10)); graph->addPrior(1, Pose2(0.,0.,0.), Isotropic::Sigma(3, 1e-10));
graph->addConstraint(1,2, Pose2(1.,0.,0.), Isotropic::Sigma(3, 1)); graph->addConstraint(1,2, Pose2(1.,0.,0.), Isotropic::Sigma(3, 1));
boost::shared_ptr<Ordering> ordering(new Ordering);
ordering->push_back(Pose2Config::Key(1));
ordering->push_back(Pose2Config::Key(2));
double relativeThreshold = 1e-5; double relativeThreshold = 1e-5;
double absoluteThreshold = 1e-5; double absoluteThreshold = 1e-5;
Optimizer::shared_solver solver(new SubgraphPCG<Pose2Graph, Pose2Config>(*graph, *config)); Optimizer::shared_solver solver(new SubgraphPCG<Pose2Graph, Pose2Config>(*graph, *config));
Optimizer optimizer(graph, ordering, config, solver); Optimizer optimizer(graph, config, solver);
Optimizer optimized = optimizer.gaussNewton(relativeThreshold, absoluteThreshold, Optimizer::SILENT); Optimizer optimized = optimizer.gaussNewton(relativeThreshold, absoluteThreshold, Optimizer::SILENT);
Pose2Config expected; Pose2Config expected;

View File

@ -101,7 +101,8 @@ TEST(Pose2Graph, optimize) {
shared_ptr<Ordering> ordering(new Ordering); shared_ptr<Ordering> ordering(new Ordering);
*ordering += "x0","x1"; *ordering += "x0","x1";
typedef NonlinearOptimizer<Pose2Graph, Pose2Config> Optimizer; typedef NonlinearOptimizer<Pose2Graph, Pose2Config> Optimizer;
Optimizer optimizer0(fg, ordering, initial); Optimizer::shared_solver solver(new Optimizer::solver(ordering));
Optimizer optimizer0(fg, initial, solver);
Optimizer::verbosityLevel verbosity = Optimizer::SILENT; Optimizer::verbosityLevel verbosity = Optimizer::SILENT;
//Optimizer::verbosityLevel verbosity = Optimizer::ERROR; //Optimizer::verbosityLevel verbosity = Optimizer::ERROR;
Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity);
@ -145,7 +146,8 @@ TEST(Pose2Graph, optimizeCircle) {
shared_ptr<Ordering> ordering(new Ordering); shared_ptr<Ordering> ordering(new Ordering);
*ordering += "x0","x1","x2","x3","x4","x5"; *ordering += "x0","x1","x2","x3","x4","x5";
typedef NonlinearOptimizer<Pose2Graph, Pose2Config> Optimizer; typedef NonlinearOptimizer<Pose2Graph, Pose2Config> Optimizer;
Optimizer optimizer0(fg, ordering, initial); Optimizer::shared_solver solver(new Optimizer::solver(ordering));
Optimizer optimizer0(fg, initial, solver);
Optimizer::verbosityLevel verbosity = Optimizer::SILENT; Optimizer::verbosityLevel verbosity = Optimizer::SILENT;
// Optimizer::verbosityLevel verbosity = Optimizer::ERROR; // Optimizer::verbosityLevel verbosity = Optimizer::ERROR;
Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity);

View File

@ -58,7 +58,8 @@ TEST(Pose3Graph, optimizeCircle) {
shared_ptr<Ordering> ordering(new Ordering); shared_ptr<Ordering> ordering(new Ordering);
*ordering += "x0","x1","x2","x3","x4","x5"; *ordering += "x0","x1","x2","x3","x4","x5";
typedef NonlinearOptimizer<Pose3Graph, Pose3Config> Optimizer; typedef NonlinearOptimizer<Pose3Graph, Pose3Config> Optimizer;
Optimizer optimizer0(fg, ordering, initial); Optimizer::shared_solver solver(new Optimizer::solver(ordering));
Optimizer optimizer0(fg, initial, solver);
Optimizer::verbosityLevel verbosity = Optimizer::SILENT; Optimizer::verbosityLevel verbosity = Optimizer::SILENT;
// Optimizer::verbosityLevel verbosity = Optimizer::ERROR; // Optimizer::verbosityLevel verbosity = Optimizer::ERROR;
Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity); Optimizer optimizer = optimizer0.levenbergMarquardt(1e-15, 1e-15, verbosity);

View File

@ -296,7 +296,8 @@ TEST (SQP, two_pose_truth ) {
// optimize the graph // optimize the graph
shared_ptr<Ordering> ordering(new Ordering()); shared_ptr<Ordering> ordering(new Ordering());
*ordering += "x1", "x2", "l1"; *ordering += "x1", "x2", "l1";
Optimizer optimizer(graph, ordering, initialEstimate); Optimizer::shared_solver solver(new Optimizer::solver(ordering));
Optimizer optimizer(graph, initialEstimate, solver);
// display solution // display solution
double relativeThreshold = 1e-5; double relativeThreshold = 1e-5;
@ -533,7 +534,8 @@ TEST (SQP, stereo_truth ) {
*ord += "x1", "x2", "l1"; *ord += "x1", "x2", "l1";
// create optimizer // create optimizer
VOptimizer optimizer(graph, ord, truthConfig); VOptimizer::shared_solver solver(new VOptimizer::solver(ord));
VOptimizer optimizer(graph, truthConfig, solver);
// optimize // optimize
VOptimizer afterOneIteration = optimizer.iterate(); VOptimizer afterOneIteration = optimizer.iterate();
@ -609,7 +611,8 @@ TEST (SQP, stereo_truth_noisy ) {
*ord += "x1", "x2", "l1"; *ord += "x1", "x2", "l1";
// create optimizer // create optimizer
VOptimizer optimizer0(graph, ord, noisyConfig); VOptimizer::shared_solver solver(new VOptimizer::solver(ord));
VOptimizer optimizer0(graph, noisyConfig, solver);
if (verbose) if (verbose)
cout << "Initial Error: " << optimizer0.error() << endl; cout << "Initial Error: " << optimizer0.error() << endl;

View File

@ -100,7 +100,8 @@ TEST( Graph, optimizeLM)
// Create an optimizer and check its error // Create an optimizer and check its error
// We expect the initial to be zero because config is the ground truth // We expect the initial to be zero because config is the ground truth
Optimizer optimizer(graph, ordering, initialEstimate); Optimizer::shared_solver solver(new Optimizer::solver(ordering));
Optimizer optimizer(graph, initialEstimate, solver);
DOUBLES_EQUAL(0.0, optimizer.error(), 1e-9); DOUBLES_EQUAL(0.0, optimizer.error(), 1e-9);
// Iterate once, and the config should not have changed because we started // Iterate once, and the config should not have changed because we started
@ -144,7 +145,8 @@ TEST( Graph, optimizeLM2)
// Create an optimizer and check its error // Create an optimizer and check its error
// We expect the initial to be zero because config is the ground truth // We expect the initial to be zero because config is the ground truth
Optimizer optimizer(graph, ordering, initialEstimate); Optimizer::shared_solver solver(new Optimizer::solver(ordering));
Optimizer optimizer(graph, initialEstimate, solver);
DOUBLES_EQUAL(0.0, optimizer.error(), 1e-9); DOUBLES_EQUAL(0.0, optimizer.error(), 1e-9);
// Iterate once, and the config should not have changed because we started // Iterate once, and the config should not have changed because we started
@ -179,7 +181,8 @@ TEST( Graph, CHECK_ORDERING)
// Create an optimizer and check its error // Create an optimizer and check its error
// We expect the initial to be zero because config is the ground truth // We expect the initial to be zero because config is the ground truth
Optimizer optimizer(graph, ordering, initialEstimate); Optimizer::shared_solver solver(new Optimizer::solver(ordering));
Optimizer optimizer(graph, initialEstimate, solver);
DOUBLES_EQUAL(0.0, optimizer.error(), 1e-9); DOUBLES_EQUAL(0.0, optimizer.error(), 1e-9);
// Iterate once, and the config should not have changed because we started // Iterate once, and the config should not have changed because we started