Merge branch 'feature/refinementsForPoseToPointFactor' into fix/gncOptimizer

release/4.3a0
lcarlone 2022-01-12 21:45:22 -05:00
commit f10dad2e39
15 changed files with 205 additions and 244 deletions

View File

@ -179,5 +179,6 @@ namespace gtsam {
};
// AlgebraicDecisionTree
template<typename T> struct traits<AlgebraicDecisionTree<T>> : public Testable<AlgebraicDecisionTree<T>> {};
}
// namespace gtsam

View File

@ -34,12 +34,13 @@ namespace gtsam {
/* ******************************************************************************** */
DecisionTreeFactor::DecisionTreeFactor(const DiscreteKeys& keys,
const ADT& potentials) :
DiscreteFactor(keys.indices()), Potentials(keys, potentials) {
DiscreteFactor(keys.indices()), ADT(potentials),
cardinalities_(keys.cardinalities()) {
}
/* *************************************************************************/
DecisionTreeFactor::DecisionTreeFactor(const DiscreteConditional& c) :
DiscreteFactor(c.keys()), Potentials(c) {
DiscreteFactor(c.keys()), AlgebraicDecisionTree<Key>(c), cardinalities_(c.cardinalities_) {
}
/* ************************************************************************* */
@ -48,16 +49,24 @@ namespace gtsam {
return false;
}
else {
const DecisionTreeFactor& f(static_cast<const DecisionTreeFactor&>(other));
return Potentials::equals(f, tol);
const auto& f(static_cast<const DecisionTreeFactor&>(other));
return ADT::equals(f, tol);
}
}
/* ************************************************************************* */
double DecisionTreeFactor::safe_div(const double &a, const double &b) {
// The use for safe_div is when we divide the product factor by the sum
// factor. If the product or sum is zero, we accord zero probability to the
// event.
return (a == 0 || b == 0) ? 0 : (a / b);
}
/* ************************************************************************* */
void DecisionTreeFactor::print(const string& s,
const KeyFormatter& formatter) const {
cout << s;
Potentials::print("Potentials:",formatter);
ADT::print("Potentials:",formatter);
}
/* ************************************************************************* */
@ -162,20 +171,20 @@ namespace gtsam {
void DecisionTreeFactor::dot(std::ostream& os,
const KeyFormatter& keyFormatter,
bool showZero) const {
Potentials::dot(os, keyFormatter, valueFormatter, showZero);
ADT::dot(os, keyFormatter, valueFormatter, showZero);
}
/** output to graphviz format, open a file */
void DecisionTreeFactor::dot(const std::string& name,
const KeyFormatter& keyFormatter,
bool showZero) const {
Potentials::dot(name, keyFormatter, valueFormatter, showZero);
ADT::dot(name, keyFormatter, valueFormatter, showZero);
}
/** output to graphviz format string */
std::string DecisionTreeFactor::dot(const KeyFormatter& keyFormatter,
bool showZero) const {
return Potentials::dot(keyFormatter, valueFormatter, showZero);
return ADT::dot(keyFormatter, valueFormatter, showZero);
}
/* ************************************************************************* */
@ -209,5 +218,15 @@ namespace gtsam {
return ss.str();
}
DecisionTreeFactor::DecisionTreeFactor(const DiscreteKeys &keys, const vector<double> &table) :
DiscreteFactor(keys.indices()), AlgebraicDecisionTree<Key>(keys, table),
cardinalities_(keys.cardinalities()) {
}
DecisionTreeFactor::DecisionTreeFactor(const DiscreteKeys &keys, const string &table) :
DiscreteFactor(keys.indices()), AlgebraicDecisionTree<Key>(keys, table),
cardinalities_(keys.cardinalities()) {
}
/* ************************************************************************* */
} // namespace gtsam

View File

@ -19,7 +19,8 @@
#pragma once
#include <gtsam/discrete/DiscreteFactor.h>
#include <gtsam/discrete/Potentials.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/discrete/AlgebraicDecisionTree.h>
#include <gtsam/inference/Ordering.h>
#include <boost/shared_ptr.hpp>
@ -35,7 +36,7 @@ namespace gtsam {
/**
* A discrete probabilistic factor
*/
class GTSAM_EXPORT DecisionTreeFactor: public DiscreteFactor, public Potentials {
class GTSAM_EXPORT DecisionTreeFactor: public DiscreteFactor, public AlgebraicDecisionTree<Key> {
public:
@ -43,6 +44,10 @@ namespace gtsam {
typedef DecisionTreeFactor This;
typedef DiscreteFactor Base; ///< Typedef to base class
typedef boost::shared_ptr<DecisionTreeFactor> shared_ptr;
typedef AlgebraicDecisionTree<Key> ADT;
protected:
std::map<Key,size_t> cardinalities_;
public:
@ -55,11 +60,11 @@ namespace gtsam {
/** Constructor from Indices, Ordering, and AlgebraicDecisionDiagram */
DecisionTreeFactor(const DiscreteKeys& keys, const ADT& potentials);
/** Constructor from Indices and (string or doubles) */
template<class SOURCE>
DecisionTreeFactor(const DiscreteKeys& keys, SOURCE table) :
DiscreteFactor(keys.indices()), Potentials(keys, table) {
}
/** Constructor from doubles */
DecisionTreeFactor(const DiscreteKeys& keys, const std::vector<double>& table);
/** Constructor from string */
DecisionTreeFactor(const DiscreteKeys& keys, const std::string& table);
/// Single-key specialization
template <class SOURCE>
@ -71,7 +76,7 @@ namespace gtsam {
: DecisionTreeFactor(DiscreteKeys{key}, row) {}
/** Construct from a DiscreteConditional type */
DecisionTreeFactor(const DiscreteConditional& c);
explicit DecisionTreeFactor(const DiscreteConditional& c);
/// @}
/// @name Testable
@ -90,7 +95,7 @@ namespace gtsam {
/// Value is just look up in AlgebraicDecisonTree
double operator()(const DiscreteValues& values) const override {
return Potentials::operator()(values);
return ADT::operator()(values);
}
/// multiply two factors
@ -98,6 +103,10 @@ namespace gtsam {
return apply(f, ADT::Ring::mul);
}
static double safe_div(const double& a, const double& b);
size_t cardinality(Key j) const { return cardinalities_.at(j);}
/// divide by factor f (safely)
DecisionTreeFactor operator/(const DecisionTreeFactor& f) const {
return apply(f, safe_div);

View File

@ -80,7 +80,7 @@ void DiscreteConditional::print(const string& s,
}
}
cout << ")";
Potentials::print("");
ADT::print("");
cout << endl;
}

View File

@ -128,7 +128,7 @@ public:
/// Evaluate, just look up in AlgebraicDecisonTree
double operator()(const DiscreteValues& values) const override {
return Potentials::operator()(values);
return ADT::operator()(values);
}
/** Convert to a factor */

View File

@ -1,96 +0,0 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file Potentials.cpp
* @date March 24, 2011
* @author Frank Dellaert
*/
#include <gtsam/discrete/DecisionTree-inl.h>
#include <gtsam/discrete/Potentials.h>
#include <boost/format.hpp>
#include <string>
using namespace std;
namespace gtsam {
/* ************************************************************************* */
double Potentials::safe_div(const double& a, const double& b) {
// cout << boost::format("%g / %g = %g\n") % a % b % ((a == 0) ? 0 : (a / b));
// The use for safe_div is when we divide the product factor by the sum
// factor. If the product or sum is zero, we accord zero probability to the
// event.
return (a == 0 || b == 0) ? 0 : (a / b);
}
/* ********************************************************************************
*/
Potentials::Potentials() : ADT(1.0) {}
/* ********************************************************************************
*/
Potentials::Potentials(const DiscreteKeys& keys, const ADT& decisionTree)
: ADT(decisionTree), cardinalities_(keys.cardinalities()) {}
/* ************************************************************************* */
bool Potentials::equals(const Potentials& other, double tol) const {
return ADT::equals(other, tol);
}
/* ************************************************************************* */
void Potentials::print(const string& s, const KeyFormatter& formatter) const {
cout << s << "\n Cardinalities: { ";
for (const std::pair<const Key,size_t>& key : cardinalities_)
cout << formatter(key.first) << ":" << key.second << ", ";
cout << "}" << endl;
ADT::print(" ", formatter);
}
//
// /* ************************************************************************* */
// template<class P>
// void Potentials::remapIndices(const P& remapping) {
// // Permute the _cardinalities (TODO: Inefficient Consider Improving)
// DiscreteKeys keys;
// map<Key, Key> ordering;
//
// // Get the original keys from cardinalities_
// for(const DiscreteKey& key: cardinalities_)
// keys & key;
//
// // Perform Permutation
// for(DiscreteKey& key: keys) {
// ordering[key.first] = remapping[key.first];
// key.first = ordering[key.first];
// }
//
// // Change *this
// AlgebraicDecisionTree<Key> permuted((*this), ordering);
// *this = permuted;
// cardinalities_ = keys.cardinalities();
// }
//
// /* ************************************************************************* */
// void Potentials::permuteWithInverse(const Permutation& inversePermutation) {
// remapIndices(inversePermutation);
// }
//
// /* ************************************************************************* */
// void Potentials::reduceWithInverse(const internal::Reduction& inverseReduction) {
// remapIndices(inverseReduction);
// }
/* ************************************************************************* */
} // namespace gtsam

View File

@ -1,97 +0,0 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file Potentials.h
* @date March 24, 2011
* @author Frank Dellaert
*/
#pragma once
#include <gtsam/discrete/AlgebraicDecisionTree.h>
#include <gtsam/discrete/DiscreteKey.h>
#include <gtsam/inference/Key.h>
#include <boost/shared_ptr.hpp>
#include <set>
namespace gtsam {
/**
* A base class for both DiscreteFactor and DiscreteConditional
*/
class GTSAM_EXPORT Potentials: public AlgebraicDecisionTree<Key> {
public:
typedef AlgebraicDecisionTree<Key> ADT;
protected:
/// Cardinality for each key, used in combine
std::map<Key,size_t> cardinalities_;
/** Constructor from ColumnIndex, and ADT */
Potentials(const ADT& potentials) :
ADT(potentials) {
}
// Safe division for probabilities
static double safe_div(const double& a, const double& b);
// // Apply either a permutation or a reduction
// template<class P>
// void remapIndices(const P& remapping);
public:
/** Default constructor for I/O */
Potentials();
/** Constructor from Indices and ADT */
Potentials(const DiscreteKeys& keys, const ADT& decisionTree);
/** Constructor from Indices and (string or doubles) */
template<class SOURCE>
Potentials(const DiscreteKeys& keys, SOURCE table) :
ADT(keys, table), cardinalities_(keys.cardinalities()) {
}
// Testable
bool equals(const Potentials& other, double tol = 1e-9) const;
void print(const std::string& s = "Potentials: ",
const KeyFormatter& formatter = DefaultKeyFormatter) const;
size_t cardinality(Key j) const { return cardinalities_.at(j);}
// /**
// * @brief Permutes the keys in Potentials
// *
// * This permutes the Indices and performs necessary re-ordering of ADD.
// * This is virtual so that derived types e.g. DecisionTreeFactor can
// * re-implement it.
// */
// GTSAM_EXPORT virtual void permuteWithInverse(const Permutation& inversePermutation);
//
// /**
// * Apply a reduction, which is a remapping of variable indices.
// */
// GTSAM_EXPORT virtual void reduceWithInverse(const internal::Reduction& inverseReduction);
}; // Potentials
// traits
template<> struct traits<Potentials> : public Testable<Potentials> {};
template<> struct traits<Potentials::ADT> : public Testable<Potentials::ADT> {};
} // namespace gtsam

View File

@ -41,21 +41,23 @@ using namespace gtsam;
static const DiscreteKey Asia(0, 2), Smoking(4, 2), Tuberculosis(3, 2),
LungCancer(6, 2), Bronchitis(7, 2), Either(5, 2), XRay(2, 2), Dyspnea(1, 2);
using ADT = AlgebraicDecisionTree<Key>;
/* ************************************************************************* */
TEST(DiscreteBayesNet, bayesNet) {
DiscreteBayesNet bayesNet;
DiscreteKey Parent(0, 2), Child(1, 2);
auto prior = boost::make_shared<DiscreteConditional>(Parent % "6/4");
CHECK(assert_equal(Potentials::ADT({Parent}, "0.6 0.4"),
(Potentials::ADT)*prior));
CHECK(assert_equal(ADT({Parent}, "0.6 0.4"),
(ADT)*prior));
bayesNet.push_back(prior);
auto conditional =
boost::make_shared<DiscreteConditional>(Child | Parent = "7/3 8/2");
EXPECT_LONGS_EQUAL(1, *(conditional->beginFrontals()));
Potentials::ADT expected(Child & Parent, "0.7 0.8 0.3 0.2");
CHECK(assert_equal(expected, (Potentials::ADT)*conditional));
ADT expected(Child & Parent, "0.7 0.8 0.3 0.2");
CHECK(assert_equal(expected, (ADT)*conditional));
bayesNet.push_back(conditional);
DiscreteFactorGraph fg(bayesNet);

View File

@ -182,6 +182,94 @@ TEST(triangulation, fourPoses) {
#endif
}
//******************************************************************************
TEST(triangulation, threePoses_robustNoiseModel) {
Pose3 pose3 = pose1 * Pose3(Rot3::Ypr(0.1, 0.2, 0.1), Point3(0.1, -2, -.1));
PinholeCamera<Cal3_S2> camera3(pose3, *sharedCal);
Point2 z3 = camera3.project(landmark);
vector<Pose3> poses;
Point2Vector measurements;
poses += pose1, pose2, pose3;
measurements += z1, z2, z3;
// noise free, so should give exactly the landmark
boost::optional<Point3> actual =
triangulatePoint3<Cal3_S2>(poses, sharedCal, measurements);
EXPECT(assert_equal(landmark, *actual, 1e-2));
// Add outlier
measurements.at(0) += Point2(100, 120); // very large pixel noise!
// now estimate does not match landmark
boost::optional<Point3> actual2 = //
triangulatePoint3<Cal3_S2>(poses, sharedCal, measurements);
// DLT is surprisingly robust, but still off (actual error is around 0.26m):
EXPECT( (landmark - *actual2).norm() >= 0.2);
EXPECT( (landmark - *actual2).norm() <= 0.5);
// Again with nonlinear optimization
boost::optional<Point3> actual3 =
triangulatePoint3<Cal3_S2>(poses, sharedCal, measurements, 1e-9, true);
// result from nonlinear (but non-robust optimization) is close to DLT and still off
EXPECT(assert_equal(*actual2, *actual3, 0.1));
// Again with nonlinear optimization, this time with robust loss
auto model = noiseModel::Robust::Create(
noiseModel::mEstimator::Huber::Create(1.345), noiseModel::Unit::Create(2));
boost::optional<Point3> actual4 = triangulatePoint3<Cal3_S2>(
poses, sharedCal, measurements, 1e-9, true, model);
// using the Huber loss we now have a quite small error!! nice!
EXPECT(assert_equal(landmark, *actual4, 0.05));
}
//******************************************************************************
TEST(triangulation, fourPoses_robustNoiseModel) {
Pose3 pose3 = pose1 * Pose3(Rot3::Ypr(0.1, 0.2, 0.1), Point3(0.1, -2, -.1));
PinholeCamera<Cal3_S2> camera3(pose3, *sharedCal);
Point2 z3 = camera3.project(landmark);
vector<Pose3> poses;
Point2Vector measurements;
poses += pose1, pose1, pose2, pose3; // 2 measurements from pose 1
measurements += z1, z1, z2, z3;
// noise free, so should give exactly the landmark
boost::optional<Point3> actual =
triangulatePoint3<Cal3_S2>(poses, sharedCal, measurements);
EXPECT(assert_equal(landmark, *actual, 1e-2));
// Add outlier
measurements.at(0) += Point2(100, 120); // very large pixel noise!
// add noise on other measurements:
measurements.at(1) += Point2(0.1, 0.2); // small noise
measurements.at(2) += Point2(0.2, 0.2);
measurements.at(3) += Point2(0.3, 0.1);
// now estimate does not match landmark
boost::optional<Point3> actual2 = //
triangulatePoint3<Cal3_S2>(poses, sharedCal, measurements);
// DLT is surprisingly robust, but still off (actual error is around 0.17m):
EXPECT( (landmark - *actual2).norm() >= 0.1);
EXPECT( (landmark - *actual2).norm() <= 0.5);
// Again with nonlinear optimization
boost::optional<Point3> actual3 =
triangulatePoint3<Cal3_S2>(poses, sharedCal, measurements, 1e-9, true);
// result from nonlinear (but non-robust optimization) is close to DLT and still off
EXPECT(assert_equal(*actual2, *actual3, 0.1));
// Again with nonlinear optimization, this time with robust loss
auto model = noiseModel::Robust::Create(
noiseModel::mEstimator::Huber::Create(1.345), noiseModel::Unit::Create(2));
boost::optional<Point3> actual4 = triangulatePoint3<Cal3_S2>(
poses, sharedCal, measurements, 1e-9, true, model);
// using the Huber loss we now have a quite small error!! nice!
EXPECT(assert_equal(landmark, *actual4, 0.05));
}
//******************************************************************************
TEST(triangulation, fourPoses_distinct_Ks) {
Cal3_S2 K1(1500, 1200, 0, 640, 480);

View File

@ -14,6 +14,7 @@
* @brief Functions for triangulation
* @date July 31, 2013
* @author Chris Beall
* @author Luca Carlone
*/
#pragma once
@ -105,18 +106,18 @@ template<class CALIBRATION>
std::pair<NonlinearFactorGraph, Values> triangulationGraph(
const std::vector<Pose3>& poses, boost::shared_ptr<CALIBRATION> sharedCal,
const Point2Vector& measurements, Key landmarkKey,
const Point3& initialEstimate) {
const Point3& initialEstimate,
const SharedNoiseModel& model = nullptr) {
Values values;
values.insert(landmarkKey, initialEstimate); // Initial landmark value
NonlinearFactorGraph graph;
static SharedNoiseModel unit2(noiseModel::Unit::Create(2));
static SharedNoiseModel prior_model(noiseModel::Isotropic::Sigma(6, 1e-6));
for (size_t i = 0; i < measurements.size(); i++) {
const Pose3& pose_i = poses[i];
typedef PinholePose<CALIBRATION> Camera;
Camera camera_i(pose_i, sharedCal);
graph.emplace_shared<TriangulationFactor<Camera> > //
(camera_i, measurements[i], unit2, landmarkKey);
(camera_i, measurements[i], model? model : unit2, landmarkKey);
}
return std::make_pair(graph, values);
}
@ -134,7 +135,8 @@ template<class CAMERA>
std::pair<NonlinearFactorGraph, Values> triangulationGraph(
const CameraSet<CAMERA>& cameras,
const typename CAMERA::MeasurementVector& measurements, Key landmarkKey,
const Point3& initialEstimate) {
const Point3& initialEstimate,
const SharedNoiseModel& model = nullptr) {
Values values;
values.insert(landmarkKey, initialEstimate); // Initial landmark value
NonlinearFactorGraph graph;
@ -143,7 +145,7 @@ std::pair<NonlinearFactorGraph, Values> triangulationGraph(
for (size_t i = 0; i < measurements.size(); i++) {
const CAMERA& camera_i = cameras[i];
graph.emplace_shared<TriangulationFactor<CAMERA> > //
(camera_i, measurements[i], unit, landmarkKey);
(camera_i, measurements[i], model? model : unit, landmarkKey);
}
return std::make_pair(graph, values);
}
@ -169,13 +171,14 @@ GTSAM_EXPORT Point3 optimize(const NonlinearFactorGraph& graph,
template<class CALIBRATION>
Point3 triangulateNonlinear(const std::vector<Pose3>& poses,
boost::shared_ptr<CALIBRATION> sharedCal,
const Point2Vector& measurements, const Point3& initialEstimate) {
const Point2Vector& measurements, const Point3& initialEstimate,
const SharedNoiseModel& model = nullptr) {
// Create a factor graph and initial values
Values values;
NonlinearFactorGraph graph;
boost::tie(graph, values) = triangulationGraph<CALIBRATION> //
(poses, sharedCal, measurements, Symbol('p', 0), initialEstimate);
(poses, sharedCal, measurements, Symbol('p', 0), initialEstimate, model);
return optimize(graph, values, Symbol('p', 0));
}
@ -190,13 +193,14 @@ Point3 triangulateNonlinear(const std::vector<Pose3>& poses,
template<class CAMERA>
Point3 triangulateNonlinear(
const CameraSet<CAMERA>& cameras,
const typename CAMERA::MeasurementVector& measurements, const Point3& initialEstimate) {
const typename CAMERA::MeasurementVector& measurements, const Point3& initialEstimate,
const SharedNoiseModel& model = nullptr) {
// Create a factor graph and initial values
Values values;
NonlinearFactorGraph graph;
boost::tie(graph, values) = triangulationGraph<CAMERA> //
(cameras, measurements, Symbol('p', 0), initialEstimate);
(cameras, measurements, Symbol('p', 0), initialEstimate, model);
return optimize(graph, values, Symbol('p', 0));
}
@ -239,7 +243,8 @@ template<class CALIBRATION>
Point3 triangulatePoint3(const std::vector<Pose3>& poses,
boost::shared_ptr<CALIBRATION> sharedCal,
const Point2Vector& measurements, double rank_tol = 1e-9,
bool optimize = false) {
bool optimize = false,
const SharedNoiseModel& model = nullptr) {
assert(poses.size() == measurements.size());
if (poses.size() < 2)
@ -254,7 +259,7 @@ Point3 triangulatePoint3(const std::vector<Pose3>& poses,
// Then refine using non-linear optimization
if (optimize)
point = triangulateNonlinear<CALIBRATION> //
(poses, sharedCal, measurements, point);
(poses, sharedCal, measurements, point, model);
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
// verify that the triangulated point lies in front of all cameras
@ -284,7 +289,8 @@ template<class CAMERA>
Point3 triangulatePoint3(
const CameraSet<CAMERA>& cameras,
const typename CAMERA::MeasurementVector& measurements, double rank_tol = 1e-9,
bool optimize = false) {
bool optimize = false,
const SharedNoiseModel& model = nullptr) {
size_t m = cameras.size();
assert(measurements.size() == m);
@ -298,7 +304,7 @@ Point3 triangulatePoint3(
// The n refine using non-linear optimization
if (optimize)
point = triangulateNonlinear<CAMERA>(cameras, measurements, point);
point = triangulateNonlinear<CAMERA>(cameras, measurements, point, model);
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
// verify that the triangulated point lies in front of all cameras
@ -317,9 +323,10 @@ template<class CALIBRATION>
Point3 triangulatePoint3(
const CameraSet<PinholeCamera<CALIBRATION> >& cameras,
const Point2Vector& measurements, double rank_tol = 1e-9,
bool optimize = false) {
bool optimize = false,
const SharedNoiseModel& model = nullptr) {
return triangulatePoint3<PinholeCamera<CALIBRATION> > //
(cameras, measurements, rank_tol, optimize);
(cameras, measurements, rank_tol, optimize, model);
}
struct GTSAM_EXPORT TriangulationParameters {
@ -341,20 +348,25 @@ struct GTSAM_EXPORT TriangulationParameters {
*/
double dynamicOutlierRejectionThreshold;
SharedNoiseModel noiseModel; ///< used in the nonlinear triangulation
/**
* Constructor
* @param rankTol tolerance used to check if point triangulation is degenerate
* @param enableEPI if true refine triangulation with embedded LM iterations
* @param landmarkDistanceThreshold flag as degenerate if point further than this
* @param dynamicOutlierRejectionThreshold or if average error larger than this
* @param noiseModel noise model to use during nonlinear triangulation
*
*/
TriangulationParameters(const double _rankTolerance = 1.0,
const bool _enableEPI = false, double _landmarkDistanceThreshold = -1,
double _dynamicOutlierRejectionThreshold = -1) :
double _dynamicOutlierRejectionThreshold = -1,
const SharedNoiseModel& _noiseModel = nullptr) :
rankTolerance(_rankTolerance), enableEPI(_enableEPI), //
landmarkDistanceThreshold(_landmarkDistanceThreshold), //
dynamicOutlierRejectionThreshold(_dynamicOutlierRejectionThreshold) {
dynamicOutlierRejectionThreshold(_dynamicOutlierRejectionThreshold),
noiseModel(_noiseModel){
}
// stream to output
@ -366,6 +378,7 @@ struct GTSAM_EXPORT TriangulationParameters {
<< std::endl;
os << "dynamicOutlierRejectionThreshold = "
<< p.dynamicOutlierRejectionThreshold << std::endl;
os << "noise model" << std::endl;
return os;
}
@ -468,8 +481,9 @@ TriangulationResult triangulateSafe(const CameraSet<CAMERA>& cameras,
else
// We triangulate the 3D position of the landmark
try {
Point3 point = triangulatePoint3<CAMERA>(cameras, measured,
params.rankTolerance, params.enableEPI);
Point3 point =
triangulatePoint3<CAMERA>(cameras, measured, params.rankTolerance,
params.enableEPI, params.noiseModel);
// Check landmark distance and re-projection errors to avoid outliers
size_t i = 0;

View File

@ -143,7 +143,7 @@ namespace gtsam {
const Nodes& nodes() const { return nodes_; }
/** Access node by variable */
const sharedNode operator[](Key j) const { return nodes_.at(j); }
sharedClique operator[](Key j) const { return nodes_.at(j); }
/** return root cliques */
const Roots& roots() const { return roots_; }

View File

@ -206,9 +206,11 @@ class GTSAM_EXPORT GncOptimizer {
std::cout << "GNC Optimizer stopped because all measurements are already known to be inliers or outliers"
<< std::endl;
}
if (params_.verbosity >= GncParameters::Verbosity::MU) {
std::cout << "mu: " << mu << std::endl;
}
if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
result.print("result\n");
std::cout << "mu: " << mu << std::endl;
}
return result;
}
@ -217,12 +219,16 @@ class GTSAM_EXPORT GncOptimizer {
for (iter = 0; iter < params_.maxIterations; iter++) {
// display info
if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
if (params_.verbosity >= GncParameters::Verbosity::MU) {
std::cout << "iter: " << iter << std::endl;
result.print("result\n");
std::cout << "mu: " << mu << std::endl;
}
if (params_.verbosity >= GncParameters::Verbosity::WEIGHTS) {
std::cout << "weights: " << weights_ << std::endl;
}
if (params_.verbosity >= GncParameters::Verbosity::VALUES) {
result.print("result\n");
}
// weights update
weights_ = calculateWeights(result, mu);
@ -253,10 +259,12 @@ class GTSAM_EXPORT GncOptimizer {
if (params_.verbosity >= GncParameters::Verbosity::SUMMARY) {
std::cout << "final iterations: " << iter << std::endl;
std::cout << "final mu: " << mu << std::endl;
std::cout << "final weights: " << weights_ << std::endl;
std::cout << "previous cost: " << prev_cost << std::endl;
std::cout << "current cost: " << cost << std::endl;
}
if (params_.verbosity >= GncParameters::Verbosity::WEIGHTS) {
std::cout << "final weights: " << weights_ << std::endl;
}
return result;
}
@ -291,6 +299,9 @@ class GTSAM_EXPORT GncOptimizer {
std::min(mu_init, barcSq_[k] / (2 * rk - barcSq_[k]) ) : mu_init;
}
}
if (mu_init >= 0 && mu_init < 1e-6)
mu_init = 1e-6; // if mu ~ 0 (but positive), that means we have measurements with large errors,
// i.e., rk > barcSq_[k] and rk very large, hence we threshold to 1e-6 to avoid mu = 0
return mu_init > 0 && !std::isinf(mu_init) ? mu_init : -1; // if mu <= 0 or mu = inf, return -1,
// which leads to termination of the main gnc loop. In this case, all residuals are already below the threshold
// and there is no need to robustify (TLS = least squares)
@ -338,8 +349,10 @@ class GTSAM_EXPORT GncOptimizer {
bool checkCostConvergence(const double cost, const double prev_cost) const {
bool costConverged = std::fabs(cost - prev_cost) / std::max(prev_cost, 1e-7)
< params_.relativeCostTol;
if (costConverged && params_.verbosity >= GncParameters::Verbosity::SUMMARY)
std::cout << "checkCostConvergence = true " << std::endl;
if (costConverged && params_.verbosity >= GncParameters::Verbosity::SUMMARY){
std::cout << "checkCostConvergence = true (prev. cost = " << prev_cost
<< ", curr. cost = " << cost << ")" << std::endl;
}
return costConverged;
}

View File

@ -48,6 +48,8 @@ class GTSAM_EXPORT GncParams {
enum Verbosity {
SILENT = 0,
SUMMARY,
MU,
WEIGHTS,
VALUES
};

View File

@ -130,9 +130,9 @@ void Scheduler::addStudentSpecificConstraints(size_t i,
// get all constraints then specialize to slot
size_t dummyIndex = maxNrStudents_ * 3 + maxNrStudents_;
DiscreteKey dummy(dummyIndex, nrTimeSlots());
Potentials::ADT p(dummy & areaKey,
AlgebraicDecisionTree<Key> p(dummy & areaKey,
available_); // available_ is Doodle string
Potentials::ADT q = p.choose(dummyIndex, *slot);
auto q = p.choose(dummyIndex, *slot);
CSP::add(areaKey, q);
} else {
DiscreteKeys keys {s.key_, areaKey};

View File

@ -61,6 +61,12 @@ class PoseToPointFactor : public NoiseModelFactor2<POSE, POINT> {
traits<POINT>::Equals(this->measured_, e->measured_, tol);
}
/// @return a deep copy of this factor
gtsam::NonlinearFactor::shared_ptr clone() const override {
return boost::static_pointer_cast<gtsam::NonlinearFactor>(
gtsam::NonlinearFactor::shared_ptr(new This(*this)));
}
/** implement functions needed to derive from Factor */
/** vector of errors