diff --git a/gtsam/base/StringFormatter.cpp b/gtsam/base/StringFormatter.cpp deleted file mode 100644 index 6674d3f5a..000000000 --- a/gtsam/base/StringFormatter.cpp +++ /dev/null @@ -1,23 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file StringFormatter.cpp - * @brief - * @author Richard Roberts - * @date Feb 19, 2012 - */ - -#include "StringFormatter.h" - -namespace gtsam { - -} /* namespace gtsam */ diff --git a/gtsam/base/StringFormatter.h b/gtsam/base/StringFormatter.h deleted file mode 100644 index 26cea8881..000000000 --- a/gtsam/base/StringFormatter.h +++ /dev/null @@ -1,36 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file StringFormatter.h - * @brief - * @author Richard Roberts - * @date Feb 19, 2012 - */ - -#pragma once - -namespace gtsam { - - /** - * - */ - class StringFormatter { - - public: - - virtual ~StringFormatter() {} - - virtual std::string keyToString(Key key) = 0; - - }; - -} /* namespace gtsam */ diff --git a/gtsam/base/Testable.h b/gtsam/base/Testable.h index a1130dfd9..bcc6c8957 100644 --- a/gtsam/base/Testable.h +++ b/gtsam/base/Testable.h @@ -23,7 +23,7 @@ * void print(const std::string& name) const = 0; * * equality up to tolerance - * tricky to implement, see NonlinearFactor1 for an example + * tricky to implement, see NoiseModelFactor1 for an example * equals is not supposed to print out *anything*, just return true|false * bool equals(const Derived& expected, double tol) const = 0; * diff --git a/gtsam/base/types.h b/gtsam/base/types.h index f3f1ae465..b6c5617a5 100644 --- a/gtsam/base/types.h +++ b/gtsam/base/types.h @@ -26,9 +26,6 @@ namespace gtsam { /// Integer variable index type typedef size_t Index; - /// Integer nonlinear key type - typedef size_t Key; - /** * Helper class that uses templates to select between two types based on * whether TEST_TYPE is const or not. diff --git a/gtsam/inference/tests/testJunctionTree.cpp b/gtsam/inference/tests/testJunctionTree.cpp index 7374bbad8..16309abda 100644 --- a/gtsam/inference/tests/testJunctionTree.cpp +++ b/gtsam/inference/tests/testJunctionTree.cpp @@ -24,10 +24,6 @@ using namespace boost::assign; #include #include -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h -#define GTSAM_MAGIC_KEY - -#include #include #include #include diff --git a/gtsam/inference/tests/testSerializationInference.cpp b/gtsam/inference/tests/testSerializationInference.cpp index 2db7014f1..d4c8ea62a 100644 --- a/gtsam/inference/tests/testSerializationInference.cpp +++ b/gtsam/inference/tests/testSerializationInference.cpp @@ -16,12 +16,9 @@ * @date Feb 7, 2012 */ -#define GTSAM_MAGIC_KEY - #include #include #include -#include #include #include @@ -32,13 +29,12 @@ using namespace gtsam::serializationTestHelpers; /* ************************************************************************* */ TEST (Serialization, symbolic_graph) { - Ordering o; o += "x1","l1","x2"; // construct expected symbolic graph SymbolicFactorGraph sfg; - sfg.push_factor(o["x1"]); - sfg.push_factor(o["x1"],o["x2"]); - sfg.push_factor(o["x1"],o["l1"]); - sfg.push_factor(o["l1"],o["x2"]); + sfg.push_factor(0); + sfg.push_factor(0,1); + sfg.push_factor(0,2); + sfg.push_factor(2,1); EXPECT(equalsObj(sfg)); EXPECT(equalsXML(sfg)); @@ -46,11 +42,9 @@ TEST (Serialization, symbolic_graph) { /* ************************************************************************* */ TEST (Serialization, symbolic_bn) { - Ordering o; o += "x2","l1","x1"; - - IndexConditional::shared_ptr x2(new IndexConditional(o["x2"], o["l1"], o["x1"])); - IndexConditional::shared_ptr l1(new IndexConditional(o["l1"], o["x1"])); - IndexConditional::shared_ptr x1(new IndexConditional(o["x1"])); + IndexConditional::shared_ptr x2(new IndexConditional(1, 2, 0)); + IndexConditional::shared_ptr l1(new IndexConditional(2, 0)); + IndexConditional::shared_ptr x1(new IndexConditional(0)); SymbolicBayesNet sbn; sbn.push_back(x2); diff --git a/gtsam/nonlinear/ISAM2-impl-inl.h b/gtsam/nonlinear/ISAM2-impl-inl.h index 0abea8e7f..c2483488c 100644 --- a/gtsam/nonlinear/ISAM2-impl-inl.h +++ b/gtsam/nonlinear/ISAM2-impl-inl.h @@ -44,8 +44,10 @@ struct ISAM2::Impl { * @param delta Current linear delta to be augmented with zeros * @param ordering Current ordering to be augmented with new variables * @param nodes Current BayesTree::Nodes index to be augmented with slots for new variables + * @param keyFormatter Formatter for printing nonlinear keys during debugging */ - static void AddVariables(const Values& newTheta, Values& theta, Permuted& delta, Ordering& ordering, typename Base::Nodes& nodes); + static void AddVariables(const Values& newTheta, Values& theta, Permuted& delta, Ordering& ordering, + typename Base::Nodes& nodes, const KeyFormatter& keyFormatter = DefaultKeyFormatter); /** * Extract the set of variable indices from a NonlinearFactorGraph. For each Symbol @@ -61,10 +63,12 @@ struct ISAM2::Impl { * Any variables in the VectorValues delta whose vector magnitude is greater than * or equal to relinearizeThreshold are returned. * @param delta The linear delta to check against the threshold + * @param keyFormatter Formatter for printing nonlinear keys during debugging * @return The set of variable indices in delta whose magnitude is greater than or * equal to relinearizeThreshold */ - static FastSet CheckRelinearization(const Permuted& delta, const Ordering& ordering, const ISAM2Params::RelinearizationThreshold& relinearizeThreshold); + static FastSet CheckRelinearization(const Permuted& delta, const Ordering& ordering, + const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter); /** * Recursively search this clique and its children for marked keys appearing @@ -94,10 +98,12 @@ struct ISAM2::Impl { * expmapped deltas will be set to an invalid value (infinity) to catch bugs * where we might expmap something twice, or expmap it but then not * recalculate its delta. + * @param keyFormatter Formatter for printing nonlinear keys during debugging */ static void ExpmapMasked(Values& values, const Permuted& delta, const Ordering& ordering, const std::vector& mask, - boost::optional&> invalidateIfDebug = boost::optional&>()); + boost::optional&> invalidateIfDebug = boost::optional&>(), + const KeyFormatter& keyFormatter = DefaultKeyFormatter); /** * Reorder and eliminate factors. These factors form a subset of the full @@ -120,7 +126,7 @@ struct ISAM2::Impl { /* ************************************************************************* */ template void ISAM2::Impl::AddVariables( - const Values& newTheta, Values& theta, Permuted& delta, Ordering& ordering, typename Base::Nodes& nodes) { + const Values& newTheta, Values& theta, Permuted& delta, Ordering& ordering, typename Base::Nodes& nodes, const KeyFormatter& keyFormatter) { const bool debug = ISDEBUG("ISAM2 AddVariables"); theta.insert(newTheta); @@ -139,7 +145,7 @@ void ISAM2::Impl::AddVariables( BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) { delta.permutation()[nextVar] = nextVar; ordering.insert(key_value.first, nextVar); - if(debug) cout << "Adding variable " << (string)key_value.first << " with order " << nextVar << endl; + if(debug) cout << "Adding variable " << keyFormatter(key_value.first) << " with order " << nextVar << endl; ++ nextVar; } assert(delta.permutation().size() == delta.container().size()); @@ -164,7 +170,8 @@ FastSet ISAM2::Impl::IndicesFromFactors(const Ordering /* ************************************************************************* */ template -FastSet ISAM2::Impl::CheckRelinearization(const Permuted& delta, const Ordering& ordering, const ISAM2Params::RelinearizationThreshold& relinearizeThreshold) { +FastSet ISAM2::Impl::CheckRelinearization(const Permuted& delta, const Ordering& ordering, + const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) { FastSet relinKeys; if(relinearizeThreshold.type() == typeid(double)) { @@ -178,7 +185,7 @@ FastSet ISAM2::Impl::CheckRelinearization(const Permut } else if(relinearizeThreshold.type() == typeid(FastMap)) { const FastMap& thresholds = boost::get >(relinearizeThreshold); BOOST_FOREACH(const Ordering::value_type& key_index, ordering) { - const Vector& threshold = thresholds.find(key_index.first.chr())->second; + const Vector& threshold = thresholds.find(Symbol(key_index.first).chr())->second; Index j = key_index.second; if(threshold.rows() != delta[j].rows()) throw std::invalid_argument("Relinearization threshold vector dimensionality passed into iSAM2 parameters does not match actual variable dimensionality"); @@ -213,8 +220,8 @@ void ISAM2::Impl::FindAll(typename ISAM2Clique:: /* ************************************************************************* */ template -void ISAM2::Impl::ExpmapMasked(Values& values, const Permuted& delta, - const Ordering& ordering, const vector& mask, boost::optional&> invalidateIfDebug) { +void ISAM2::Impl::ExpmapMasked(Values& values, const Permuted& delta, const Ordering& ordering, + const vector& mask, boost::optional&> invalidateIfDebug, const KeyFormatter& keyFormatter) { // If debugging, invalidate if requested, otherwise do not invalidate. // Invalidating means setting expmapped entries to Inf, to trigger assertions // if we try to re-use them. @@ -231,9 +238,9 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const Permute const Index var = key_index->second; if(ISDEBUG("ISAM2 update verbose")) { if(mask[var]) - cout << "expmap " << (string)key_value->first << " (j = " << var << "), delta = " << delta[var].transpose() << endl; + cout << "expmap " << keyFormatter(key_value->first) << " (j = " << var << "), delta = " << delta[var].transpose() << endl; else - cout << " " << (string)key_value->first << " (j = " << var << "), delta = " << delta[var].transpose() << endl; + cout << " " << keyFormatter(key_value->first) << " (j = " << var << "), delta = " << delta[var].transpose() << endl; } assert(delta[var].size() == (int)key_value->second.dim()); assert(delta[var].unaryExpr(&isfinite).all()); diff --git a/gtsam/nonlinear/ISAM2.h b/gtsam/nonlinear/ISAM2.h index 505aa2de5..4c8df2997 100644 --- a/gtsam/nonlinear/ISAM2.h +++ b/gtsam/nonlinear/ISAM2.h @@ -106,16 +106,19 @@ struct ISAM2Params { bool evaluateNonlinearError; ///< Whether to evaluate the nonlinear error before and after the update, to return in ISAM2Result from update() + KeyFormatter keyFormatter; ///< A KeyFormatter for when keys are printed during debugging (default: DefaultKeyFormatter) + /** Specify parameters as constructor arguments */ ISAM2Params( - OptimizationParams _optimizationParams = ISAM2GaussNewtonParams(), ///< see ISAM2Params public variables, ISAM2Params::optimizationParams - RelinearizationThreshold _relinearizeThreshold = 0.1, ///< see ISAM2Params public variables, ISAM2Params::relinearizeThreshold - int _relinearizeSkip = 10, ///< see ISAM2Params public variables, ISAM2Params::relinearizeSkip - bool _enableRelinearization = true, ///< see ISAM2Params public variables, ISAM2Params::enableRelinearization - bool _evaluateNonlinearError = false ///< see ISAM2Params public variables, ISAM2Params::evaluateNonlinearError + OptimizationParams _optimizationParams = ISAM2GaussNewtonParams(), ///< see ISAM2Params::optimizationParams + RelinearizationThreshold _relinearizeThreshold = 0.1, ///< see ISAM2Params::relinearizeThreshold + int _relinearizeSkip = 10, ///< see ISAM2Params::relinearizeSkip + bool _enableRelinearization = true, ///< see ISAM2Params::enableRelinearization + bool _evaluateNonlinearError = false, ///< see ISAM2Params::evaluateNonlinearError + const KeyFormatter& _keyFormatter = DefaultKeyFormatter ///< see ISAM2::Params::keyFormatter ) : optimizationParams(_optimizationParams), relinearizeThreshold(_relinearizeThreshold), relinearizeSkip(_relinearizeSkip), enableRelinearization(_enableRelinearization), - evaluateNonlinearError(_evaluateNonlinearError) {} + evaluateNonlinearError(_evaluateNonlinearError), keyFormatter(_keyFormatter) {} }; /** diff --git a/gtsam/nonlinear/Key.cpp b/gtsam/nonlinear/Key.cpp new file mode 100644 index 000000000..821285558 --- /dev/null +++ b/gtsam/nonlinear/Key.cpp @@ -0,0 +1,34 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file Key.h + * @brief + * @author Richard Roberts + * @date Feb 20, 2012 + */ + +#include + +#include +#include + +namespace gtsam { + +std::string _defaultKeyFormatter(Key key) { + const Symbol asSymbol(key); + if(asSymbol.chr() > 0) + return (std::string)asSymbol; + else + return boost::lexical_cast(key); + } + +} diff --git a/gtsam/nonlinear/Key.h b/gtsam/nonlinear/Key.h new file mode 100644 index 000000000..65a65c6d4 --- /dev/null +++ b/gtsam/nonlinear/Key.h @@ -0,0 +1,40 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file Key.h + * @brief + * @author Richard Roberts + * @date Feb 20, 2012 + */ +#pragma once + +#include +#include + +namespace gtsam { + + /// Integer nonlinear key type + typedef size_t Key; + + /// Typedef for a function to format a key, i.e. to convert it to a string + typedef boost::function KeyFormatter; + + // Helper function for DefaultKeyFormatter + std::string _defaultKeyFormatter(Key key); + + /// The default KeyFormatter, which is used if no KeyFormatter is passed to + /// a nonlinear 'print' function. Automatically detects plain integer keys + /// and Symbol keys. + static const KeyFormatter DefaultKeyFormatter = &_defaultKeyFormatter; + +} + diff --git a/gtsam/nonlinear/NonlinearEquality.h b/gtsam/nonlinear/NonlinearEquality.h index 3a37fa53d..a1c685668 100644 --- a/gtsam/nonlinear/NonlinearEquality.h +++ b/gtsam/nonlinear/NonlinearEquality.h @@ -108,8 +108,8 @@ namespace gtsam { /// @name Testable /// @{ - virtual void print(const std::string& s = "") const { - std::cout << "Constraint: " << s << " on [" << (std::string)this->key() << "]\n"; + virtual void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + std::cout << "Constraint: " << s << " on [" << keyFormatter(this->key()) << "]\n"; gtsam::print(feasible_,"Feasible Point"); std::cout << "Variable Dimension: " << feasible_.dim() << std::endl; } @@ -147,7 +147,7 @@ namespace gtsam { return zero(nj); // set error to zero if equal } else { if (H) throw std::invalid_argument( - "Linearization point not feasible for " + (std::string)(this->key()) + "!"); + "Linearization point not feasible for " + DefaultKeyFormatter(this->key()) + "!"); return repeat(nj, std::numeric_limits::infinity()); // set error to infinity if not equal } } @@ -217,9 +217,9 @@ namespace gtsam { } /** Print */ - virtual void print(const std::string& s = "") const { + virtual void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { std::cout << s << ": NonlinearEquality1(" - << (std::string) this->key() << "),"<< "\n"; + << keyFormatter(this->key()) << "),"<< "\n"; this->noiseModel_->print(); value_.print("Value"); } diff --git a/gtsam/nonlinear/NonlinearFactor.h b/gtsam/nonlinear/NonlinearFactor.h index 60f29cbf6..e260f24db 100644 --- a/gtsam/nonlinear/NonlinearFactor.h +++ b/gtsam/nonlinear/NonlinearFactor.h @@ -79,9 +79,10 @@ public: /// @{ /** print */ - virtual void print(const std::string& s = "", - const boost::function& keyFormatter = &Symbol::format) const { - std::cout << s << ": NonlinearFactor\n"; + virtual void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + std::cout << s << "keys = { "; + BOOST_FOREACH(Key key, this->keys()) { std::cout << keyFormatter(key) << " "; } + std::cout << "}" << endl; } /** Check if two factors are equal */ @@ -187,11 +188,8 @@ protected: public: /** Print */ - virtual void print(const std::string& s = "") const { - std::cout << s << ": NoiseModelFactor\n"; - std::cout << " "; - BOOST_FOREACH(Key key, this->keys()) { std::cout << (std::string)key << " "; } - std::cout << "\n"; + virtual void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + Base::print(s, keyFormatter); this->noiseModel_->print(" noise model: "); } @@ -341,12 +339,6 @@ public: } } - /** Print */ - virtual void print(const std::string& s = "") const { - std::cout << s << ": NonlinearFactor1(" << (std::string) keys_[0] << ")\n"; - this->noiseModel_->print(" noise model: "); - } - /** * Override this method to finish implementing a unary factor. * If the optional Matrix reference argument is specified, it should compute @@ -425,14 +417,6 @@ public: } } - /** Print */ - virtual void print(const std::string& s = "") const { - std::cout << s << ": NonlinearFactor2(" - << (std::string) keys_[0] << "," - << (std::string) keys_[1] << ")\n"; - this->noiseModel_->print(" noise model: "); - } - /** * Override this method to finish implementing a binary factor. * If any of the optional Matrix reference arguments are specified, it should compute @@ -512,16 +496,6 @@ public: } } - /** Print */ - virtual void print(const std::string& s = "") const { - std::cout << s << ": NonlinearFactor3(" - << (std::string) this->keys_[0] << "," - << (std::string) this->keys_[1] << "," - << (std::string) this->keys_[2] << ")\n"; - this->noiseModel_->print(" noise model: "); - } - - /** * Override this method to finish implementing a trinary factor. * If any of the optional Matrix reference arguments are specified, it should compute @@ -607,16 +581,6 @@ public: } } - /** Print */ - virtual void print(const std::string& s = "") const { - std::cout << s << ": NonlinearFactor4(" - << (std::string) this->keys_[0] << "," - << (std::string) this->keys_[1] << "," - << (std::string) this->keys_[2] << "," - << (std::string) this->keys_[3] << ")\n"; - this->noiseModel_->print(" noise model: "); - } - /** * Override this method to finish implementing a 4-way factor. * If any of the optional Matrix reference arguments are specified, it should compute @@ -707,17 +671,6 @@ public: } } - /** Print */ - virtual void print(const std::string& s = "") const { - std::cout << s << ": NonlinearFactor5(" - << (std::string) this->keys_[0] << "," - << (std::string) this->keys_[1] << "," - << (std::string) this->keys_[2] << "," - << (std::string) this->keys_[3] << "," - << (std::string) this->keys_[4] << ")\n"; - this->noiseModel_->print(" noise model: "); - } - /** * Override this method to finish implementing a 5-way factor. * If any of the optional Matrix reference arguments are specified, it should compute @@ -813,18 +766,6 @@ public: } } - /** Print */ - virtual void print(const std::string& s = "") const { - std::cout << s << ": NonlinearFactor6(" - << (std::string) this->keys_[0] << "," - << (std::string) this->keys_[1] << "," - << (std::string) this->keys_[2] << "," - << (std::string) this->keys_[3] << "," - << (std::string) this->keys_[4] << "," - << (std::string) this->keys_[5] << ")\n"; - this->noiseModel_->print(" noise model: "); - } - /** * Override this method to finish implementing a 6-way factor. * If any of the optional Matrix reference arguments are specified, it should compute diff --git a/gtsam/nonlinear/NonlinearFactorGraph.cpp b/gtsam/nonlinear/NonlinearFactorGraph.cpp index 0c005ce3e..647afa2d5 100644 --- a/gtsam/nonlinear/NonlinearFactorGraph.cpp +++ b/gtsam/nonlinear/NonlinearFactorGraph.cpp @@ -33,8 +33,13 @@ namespace gtsam { } /* ************************************************************************* */ - void NonlinearFactorGraph::print(const std::string& str) const { - Base::print(str); + void NonlinearFactorGraph::print(const std::string& str, const KeyFormatter& keyFormatter) const { + cout << str << "size: " << size() << endl; + for (size_t i = 0; i < factors_.size(); i++) { + stringstream ss; + ss << "factor " << i << ": "; + if (factors_[i] != NULL) factors_[i]->print(ss.str(), keyFormatter); + } } /* ************************************************************************* */ diff --git a/gtsam/nonlinear/NonlinearFactorGraph.h b/gtsam/nonlinear/NonlinearFactorGraph.h index c2cd844be..3caeec680 100644 --- a/gtsam/nonlinear/NonlinearFactorGraph.h +++ b/gtsam/nonlinear/NonlinearFactorGraph.h @@ -44,7 +44,7 @@ namespace gtsam { typedef boost::shared_ptr sharedFactor; /** print just calls base class */ - void print(const std::string& str = "NonlinearFactorGraph: ") const; + void print(const std::string& str = "NonlinearFactorGraph: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; /** return keys in some random order */ std::set keys() const; diff --git a/gtsam/nonlinear/Ordering.cpp b/gtsam/nonlinear/Ordering.cpp index c35b07f43..10c3370cf 100644 --- a/gtsam/nonlinear/Ordering.cpp +++ b/gtsam/nonlinear/Ordering.cpp @@ -40,12 +40,12 @@ void Ordering::permuteWithInverse(const Permutation& inversePermutation) { } /* ************************************************************************* */ -void Ordering::print(const string& str) const { +void Ordering::print(const string& str, const KeyFormatter& keyFormatter) const { cout << str << " "; BOOST_FOREACH(const Ordering::value_type& key_order, *this) { if(key_order != *begin()) cout << ", "; - cout << (string)key_order.first << ":" << key_order.second; + cout << keyFormatter(key_order.first) << ":" << key_order.second; } cout << endl; } diff --git a/gtsam/nonlinear/Ordering.h b/gtsam/nonlinear/Ordering.h index 084d69ebd..fcb3d5bfb 100644 --- a/gtsam/nonlinear/Ordering.h +++ b/gtsam/nonlinear/Ordering.h @@ -20,6 +20,7 @@ #include #include #include +#include #include #include @@ -207,7 +208,7 @@ public: /// @{ /** print (from Testable) for testing and debugging */ - void print(const std::string& str = "Ordering:") const; + void print(const std::string& str = "Ordering:", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; /** equals (from Testable) for testing and debugging */ bool equals(const Ordering& rhs, double tol = 0.0) const; diff --git a/gtsam/nonlinear/Symbol.h b/gtsam/nonlinear/Symbol.h index 6a9d612f7..854e18203 100644 --- a/gtsam/nonlinear/Symbol.h +++ b/gtsam/nonlinear/Symbol.h @@ -27,6 +27,8 @@ #include #endif +#include + #define ALPHA '\224' namespace gtsam { @@ -94,7 +96,7 @@ public: const size_t indexBytes = keyBytes - chrBytes; const Key chrMask = std::numeric_limits::max() << indexBytes; const Key indexMask = ~chrMask; - c_ = key & chrMask; + c_ = (key & chrMask) >> indexBytes; j_ = key & indexMask; } @@ -119,11 +121,6 @@ public: return (*this) == expected; } - /** Format function that can be passed to print functions in nonlinear */ - static std::string format(Key key) { - return (std::string)Symbol(key); - } - /** Retrieve key character */ unsigned char chr() const { return c_; diff --git a/gtsam/nonlinear/Values.cpp b/gtsam/nonlinear/Values.cpp index 4c8d03d9d..00b42730b 100644 --- a/gtsam/nonlinear/Values.cpp +++ b/gtsam/nonlinear/Values.cpp @@ -40,10 +40,10 @@ namespace gtsam { } /* ************************************************************************* */ - void Values::print(const string& str) const { + void Values::print(const string& str, const KeyFormatter& keyFormatter) const { cout << str << "Values with " << size() << " values:\n" << endl; for(const_iterator key_value = begin(); key_value != end(); ++key_value) { - cout << " " << (string)key_value->first << ": "; + cout << " " << keyFormatter(key_value->first) << ": "; key_value->second.print(""); } } @@ -197,7 +197,7 @@ namespace gtsam { const char* ValuesKeyAlreadyExists::what() const throw() { if(message_.empty()) message_ = - "Attempting to add a key-value pair with key \"" + (std::string)key_ + "\", key already exists."; + "Attempting to add a key-value pair with key \"" + DefaultKeyFormatter(key_) + "\", key already exists."; return message_.c_str(); } @@ -206,7 +206,7 @@ namespace gtsam { if(message_.empty()) message_ = "Attempting to " + std::string(operation_) + " the key \"" + - (std::string)key_ + "\", which does not exist in the Values."; + DefaultKeyFormatter(key_) + "\", which does not exist in the Values."; return message_.c_str(); } @@ -214,7 +214,7 @@ namespace gtsam { const char* ValuesIncorrectType::what() const throw() { if(message_.empty()) message_ = - "Attempting to retrieve value with key \"" + (std::string)key_ + "\", type stored in Values is " + + "Attempting to retrieve value with key \"" + DefaultKeyFormatter(key_) + "\", type stored in Values is " + std::string(storedTypeId_.name()) + " but requested type was " + std::string(requestedTypeId_.name()); return message_.c_str(); } diff --git a/gtsam/nonlinear/Values.h b/gtsam/nonlinear/Values.h index 5074c2360..659779858 100644 --- a/gtsam/nonlinear/Values.h +++ b/gtsam/nonlinear/Values.h @@ -121,7 +121,7 @@ namespace gtsam { /// @{ /** print method for testing and debugging */ - void print(const std::string& str = "") const; + void print(const std::string& str = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; /** Test whether the sets of keys and values are identical */ bool equals(const Values& other, double tol=1e-9) const; diff --git a/gtsam/nonlinear/tests/testKey.cpp b/gtsam/nonlinear/tests/testKey.cpp index 19c3b6325..78688192f 100644 --- a/gtsam/nonlinear/tests/testKey.cpp +++ b/gtsam/nonlinear/tests/testKey.cpp @@ -24,6 +24,15 @@ using namespace boost::assign; using namespace std; using namespace gtsam; +/* ************************************************************************* */ +TEST(Key, KeySymbolConversion) { + Symbol expected('j', 4); + Key key(expected); + Symbol actual(key); + + EXPECT(assert_equal(expected, actual)) +} + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } /* ************************************************************************* */ diff --git a/gtsam/nonlinear/tests/testValues.cpp b/gtsam/nonlinear/tests/testValues.cpp index 77e1155a7..1ea70e213 100644 --- a/gtsam/nonlinear/tests/testValues.cpp +++ b/gtsam/nonlinear/tests/testValues.cpp @@ -23,6 +23,7 @@ using namespace boost::assign; #define GTSAM_MAGIC_KEY #include +#include #include #include #include @@ -31,7 +32,7 @@ using namespace gtsam; using namespace std; static double inf = std::numeric_limits::infinity(); -Key key1("v1"), key2("v2"), key3("v3"), key4("v4"); +Key key1(Symbol("v1")), key2(Symbol("v2")), key3(Symbol("v3")), key4(Symbol("v4")); /* ************************************************************************* */ TEST( Values, equals1 ) @@ -201,8 +202,8 @@ TEST(Values, expmap_d) CHECK(config0.equals(config0)); Values poseconfig; - poseconfig.insert("p1", Pose2(1,2,3)); - poseconfig.insert("p2", Pose2(0.3, 0.4, 0.5)); + poseconfig.insert(key1, Pose2(1,2,3)); + poseconfig.insert(key2, Pose2(0.3, 0.4, 0.5)); CHECK(equal(config0, config0)); CHECK(config0.equals(config0)); @@ -213,19 +214,19 @@ TEST(Values, extract_keys) { Values config; - config.insert("x1", Pose2()); - config.insert("x2", Pose2()); - config.insert("x4", Pose2()); - config.insert("x5", Pose2()); + config.insert(key1, Pose2()); + config.insert(key2, Pose2()); + config.insert(key3, Pose2()); + config.insert(key4, Pose2()); FastList expected, actual; - expected += "x1", "x2", "x4", "x5"; + expected += key1, key2, key3, key4; actual = config.keys(); CHECK(actual.size() == expected.size()); FastList::const_iterator itAct = actual.begin(), itExp = expected.begin(); for (; itAct != actual.end() && itExp != expected.end(); ++itAct, ++itExp) { - CHECK(assert_equal(*itExp, *itAct)); + LONGS_EQUAL(*itExp, *itAct); } } diff --git a/gtsam/slam/AntiFactor.h b/gtsam/slam/AntiFactor.h index 8ed7f284f..633360fed 100644 --- a/gtsam/slam/AntiFactor.h +++ b/gtsam/slam/AntiFactor.h @@ -53,9 +53,9 @@ namespace gtsam { /** implement functions needed for Testable */ /** print */ - virtual void print(const std::string& s) const { + virtual void print(const std::string& s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { std::cout << s << "AntiFactor version of:" << std::endl; - factor_->print(s); + factor_->print(s, keyFormatter); } /** equals */ diff --git a/gtsam/slam/BearingRangeFactor.h b/gtsam/slam/BearingRangeFactor.h index 4ff792e23..4c170c811 100644 --- a/gtsam/slam/BearingRangeFactor.h +++ b/gtsam/slam/BearingRangeFactor.h @@ -58,10 +58,10 @@ namespace gtsam { virtual ~BearingRangeFactor() {} /** Print */ - virtual void print(const std::string& s = "") const { + virtual void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { std::cout << s << ": BearingRangeFactor(" - << (std::string) this->key1() << "," - << (std::string) this->key2() << ")\n"; + << keyFormatter(this->key1()) << "," + << keyFormatter(this->key2()) << ")\n"; measuredBearing_.print(" measured bearing"); std::cout << " measured range: " << measuredRange_ << std::endl; this->noiseModel_->print(" noise model"); diff --git a/gtsam/slam/BetweenFactor.h b/gtsam/slam/BetweenFactor.h index c88af0d42..2df0b98de 100644 --- a/gtsam/slam/BetweenFactor.h +++ b/gtsam/slam/BetweenFactor.h @@ -65,10 +65,10 @@ namespace gtsam { /** implement functions needed for Testable */ /** print */ - virtual void print(const std::string& s) const { + virtual void print(const std::string& s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { std::cout << s << "BetweenFactor(" - << (std::string) this->key1() << "," - << (std::string) this->key2() << ")\n"; + << keyFormatter(this->key1()) << "," + << keyFormatter(this->key2()) << ")\n"; measured_.print(" measured"); this->noiseModel_->print(" noise model"); } diff --git a/gtsam/slam/GeneralSFMFactor.h b/gtsam/slam/GeneralSFMFactor.h index 4a8b76739..333465b71 100644 --- a/gtsam/slam/GeneralSFMFactor.h +++ b/gtsam/slam/GeneralSFMFactor.h @@ -65,8 +65,8 @@ namespace gtsam { * print * @param s optional string naming the factor */ - void print(const std::string& s = "SFMFactor") const { - Base::print(s); + void print(const std::string& s = "SFMFactor", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + Base::print(s, keyFormatter); measured_.print(s + ".z"); } diff --git a/gtsam/slam/PartialPriorFactor.h b/gtsam/slam/PartialPriorFactor.h index a7c39cf77..4c7069174 100644 --- a/gtsam/slam/PartialPriorFactor.h +++ b/gtsam/slam/PartialPriorFactor.h @@ -89,8 +89,8 @@ namespace gtsam { /** implement functions needed for Testable */ /** print */ - virtual void print(const std::string& s) const { - Base::print(s); + virtual void print(const std::string& s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + Base::print(s, keyFormatter); gtsam::print(prior_, "prior"); } diff --git a/gtsam/slam/PriorFactor.h b/gtsam/slam/PriorFactor.h index fd0cc590e..2ab011bf5 100644 --- a/gtsam/slam/PriorFactor.h +++ b/gtsam/slam/PriorFactor.h @@ -59,8 +59,8 @@ namespace gtsam { /** implement functions needed for Testable */ /** print */ - virtual void print(const std::string& s) const { - std::cout << s << "PriorFactor(" << (std::string) this->key() << ")\n"; + virtual void print(const std::string& s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + std::cout << s << "PriorFactor(" << keyFormatter(this->key()) << ")\n"; prior_.print(" prior"); this->noiseModel_->print(" noise model"); } diff --git a/gtsam/slam/ProjectionFactor.h b/gtsam/slam/ProjectionFactor.h index 1eb298134..fcde33629 100644 --- a/gtsam/slam/ProjectionFactor.h +++ b/gtsam/slam/ProjectionFactor.h @@ -71,8 +71,8 @@ namespace gtsam { * print * @param s optional string naming the factor */ - void print(const std::string& s = "ProjectionFactor") const { - Base::print(s); + void print(const std::string& s = "ProjectionFactor", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + Base::print(s, keyFormatter); measured_.print(s + ".z"); } @@ -92,8 +92,8 @@ namespace gtsam { } catch( CheiralityException& e) { if (H1) *H1 = zeros(2,6); if (H2) *H2 = zeros(2,3); - cout << e.what() << ": Landmark "<< this->key2().index() << - " moved behind camera " << this->key1().index() << endl; + cout << e.what() << ": Landmark "<< DefaultKeyFormatter(this->key2()) << + " moved behind camera " << DefaultKeyFormatter(this->key1()) << endl; return ones(2) * 2.0 * K_->fx(); } } diff --git a/gtsam/slam/RangeFactor.h b/gtsam/slam/RangeFactor.h index 29fdb9160..489f751db 100644 --- a/gtsam/slam/RangeFactor.h +++ b/gtsam/slam/RangeFactor.h @@ -69,8 +69,8 @@ namespace gtsam { } /** print contents */ - void print(const std::string& s="") const { - Base::print(s + std::string(" range: ") + boost::lexical_cast(measured_)); + void print(const std::string& s="", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + Base::print(s + std::string(" range: ") + boost::lexical_cast(measured_), keyFormatter); } private: diff --git a/gtsam/slam/StereoFactor.h b/gtsam/slam/StereoFactor.h index d18f2ed22..195056615 100644 --- a/gtsam/slam/StereoFactor.h +++ b/gtsam/slam/StereoFactor.h @@ -60,8 +60,8 @@ public: * print * @param s optional string naming the factor */ - void print(const std::string& s) const { - Base::print(s); + void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + Base::print(s, keyFormatter); measured_.print(s + ".z"); } diff --git a/gtsam/slam/dataset.cpp b/gtsam/slam/dataset.cpp index 3f0551863..dae437b79 100644 --- a/gtsam/slam/dataset.cpp +++ b/gtsam/slam/dataset.cpp @@ -156,7 +156,7 @@ void save2D(const pose2SLAM::Graph& graph, const Values& config, const SharedDia // save poses BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, config) { const Pose2& pose = dynamic_cast(key_value.second); - stream << "VERTEX2 " << key_value.first.index() << " " << pose.x() << " " << pose.y() << " " << pose.theta() << endl; + stream << "VERTEX2 " << key_value.first << " " << pose.x() << " " << pose.y() << " " << pose.theta() << endl; } // save edges @@ -167,7 +167,7 @@ void save2D(const pose2SLAM::Graph& graph, const Values& config, const SharedDia if (!factor) continue; Pose2 pose = factor->measured().inverse(); - stream << "EDGE2 " << factor->key2().index() << " " << factor->key1().index() + stream << "EDGE2 " << factor->key2() << " " << factor->key1() << " " << pose.x() << " " << pose.y() << " " << pose.theta() << " " << RR(0, 0) << " " << RR(0, 1) << " " << RR(1, 1) << " " << RR(2, 2) << " " << RR(0, 2) << " " << RR(1, 2) << endl; diff --git a/gtsam/slam/smallExample.cpp b/gtsam/slam/smallExample.cpp index 722e5ff2a..f48d363b5 100644 --- a/gtsam/slam/smallExample.cpp +++ b/gtsam/slam/smallExample.cpp @@ -24,7 +24,7 @@ using namespace std; -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h +// Magically casts strings like Symbol("x3") to a Symbol('x',3) key, see Symbol.h #define GTSAM_MAGIC_KEY #include @@ -121,18 +121,18 @@ namespace example { /* ************************************************************************* */ VectorValues createCorrectDelta(const Ordering& ordering) { VectorValues c(vector(3,2)); - c[ordering["l1"]] = Vector_(2, -0.1, 0.1); - c[ordering["x1"]] = Vector_(2, -0.1, -0.1); - c[ordering["x2"]] = Vector_(2, 0.1, -0.2); + c[ordering[Symbol(Symbol("l1"))]] = Vector_(2, -0.1, 0.1); + c[ordering[Symbol("x1")]] = Vector_(2, -0.1, -0.1); + c[ordering[Symbol("x2")]] = Vector_(2, 0.1, -0.2); return c; } /* ************************************************************************* */ VectorValues createZeroDelta(const Ordering& ordering) { VectorValues c(vector(3,2)); - c[ordering["l1"]] = zero(2); - c[ordering["x1"]] = zero(2); - c[ordering["x2"]] = zero(2); + c[ordering[Symbol(Symbol("l1"))]] = zero(2); + c[ordering[Symbol("x1")]] = zero(2); + c[ordering[Symbol("x2")]] = zero(2); return c; } @@ -144,16 +144,16 @@ namespace example { SharedDiagonal unit2 = noiseModel::Unit::Create(2); // linearized prior on x1: c[_x1_]+x1=0 i.e. x1=-c[_x1_] - fg.add(ordering["x1"], 10*eye(2), -1.0*ones(2), unit2); + fg.add(ordering[Symbol("x1")], 10*eye(2), -1.0*ones(2), unit2); // odometry between x1 and x2: x2-x1=[0.2;-0.1] - fg.add(ordering["x1"], -10*eye(2),ordering["x2"], 10*eye(2), Vector_(2, 2.0, -1.0), unit2); + fg.add(ordering[Symbol("x1")], -10*eye(2),ordering[Symbol("x2")], 10*eye(2), Vector_(2, 2.0, -1.0), unit2); // measurement between x1 and l1: l1-x1=[0.0;0.2] - fg.add(ordering["x1"], -5*eye(2), ordering["l1"], 5*eye(2), Vector_(2, 0.0, 1.0), unit2); + fg.add(ordering[Symbol("x1")], -5*eye(2), ordering[Symbol("l1")], 5*eye(2), Vector_(2, 0.0, 1.0), unit2); // measurement between x2 and l1: l1-x2=[-0.2;0.3] - fg.add(ordering["x2"], -5*eye(2), ordering["l1"], 5*eye(2), Vector_(2, -1.0, 1.5), unit2); + fg.add(ordering[Symbol("x2")], -5*eye(2), ordering[Symbol("l1")], 5*eye(2), Vector_(2, -1.0, 1.5), unit2); return *fg.dynamicCastFactors >(); } diff --git a/gtsam/slam/tests/testGeneralSFMFactor.cpp b/gtsam/slam/tests/testGeneralSFMFactor.cpp index 1f1e616f1..7833bce18 100644 --- a/gtsam/slam/tests/testGeneralSFMFactor.cpp +++ b/gtsam/slam/tests/testGeneralSFMFactor.cpp @@ -89,14 +89,14 @@ TEST( GeneralSFMFactor, equals ) TEST( GeneralSFMFactor, error ) { Point2 z(3.,0.); const SharedNoiseModel sigma(noiseModel::Unit::Create(1)); - boost::shared_ptr factor(new Projection(z, sigma, "x1", "l1")); + boost::shared_ptr factor(new Projection(z, sigma, Symbol("x1"), Symbol("l1"))); // For the following configuration, the factor predicts 320,240 Values values; Rot3 R; Point3 t1(0,0,-6); Pose3 x1(R,t1); - values.insert("x1", GeneralCamera(x1)); - Point3 l1; values.insert("l1", l1); + values.insert(Symbol("x1"), GeneralCamera(x1)); + Point3 l1; values.insert(Symbol("l1"), l1); EXPECT(assert_equal(Vector_(2, -3.0, 0.0), factor->unwhitenedError(values))); } @@ -137,10 +137,9 @@ vector genCameraVariableCalibration() { } shared_ptr getOrdering(const vector& X, const vector& L) { - list keys; - for ( size_t i = 0 ; i < L.size() ; ++i ) keys.push_back(Symbol('l', i)) ; - for ( size_t i = 0 ; i < X.size() ; ++i ) keys.push_back(Symbol('x', i)) ; - shared_ptr ordering(new Ordering(keys)); + shared_ptr ordering(new Ordering); + for ( size_t i = 0 ; i < L.size() ; ++i ) ordering->push_back(Symbol('l', i)) ; + for ( size_t i = 0 ; i < X.size() ; ++i ) ordering->push_back(Symbol('x', i)) ; return ordering ; } diff --git a/gtsam/slam/tests/testGeneralSFMFactor_Cal3Bundler.cpp b/gtsam/slam/tests/testGeneralSFMFactor_Cal3Bundler.cpp index 89dbf15ea..3f6d698d5 100644 --- a/gtsam/slam/tests/testGeneralSFMFactor_Cal3Bundler.cpp +++ b/gtsam/slam/tests/testGeneralSFMFactor_Cal3Bundler.cpp @@ -91,14 +91,14 @@ TEST( GeneralSFMFactor, error ) { Point2 z(3.,0.); const SharedNoiseModel sigma(noiseModel::Unit::Create(1)); boost::shared_ptr - factor(new Projection(z, sigma, "x1", "l1")); + factor(new Projection(z, sigma, Symbol("x1"), Symbol("l1"))); // For the following configuration, the factor predicts 320,240 Values values; Rot3 R; Point3 t1(0,0,-6); Pose3 x1(R,t1); - values.insert("x1", GeneralCamera(x1)); - Point3 l1; values.insert("l1", l1); + values.insert(Symbol("x1"), GeneralCamera(x1)); + Point3 l1; values.insert(Symbol("l1"), l1); EXPECT(assert_equal(Vector_(2, -3.0, 0.0), factor->unwhitenedError(values))); } @@ -140,10 +140,9 @@ vector genCameraVariableCalibration() { } shared_ptr getOrdering(const vector& X, const vector& L) { - list keys; - for ( size_t i = 0 ; i < L.size() ; ++i ) keys.push_back(Symbol('l', i)) ; - for ( size_t i = 0 ; i < X.size() ; ++i ) keys.push_back(Symbol('x', i)) ; - shared_ptr ordering(new Ordering(keys)); + shared_ptr ordering(new Ordering); + for ( size_t i = 0 ; i < L.size() ; ++i ) ordering->push_back(Symbol('l', i)) ; + for ( size_t i = 0 ; i < X.size() ; ++i ) ordering->push_back(Symbol('x', i)) ; return ordering ; } diff --git a/gtsam/slam/tests/testPose2SLAM.cpp b/gtsam/slam/tests/testPose2SLAM.cpp index 4646bb5d7..3bccccca3 100644 --- a/gtsam/slam/tests/testPose2SLAM.cpp +++ b/gtsam/slam/tests/testPose2SLAM.cpp @@ -46,6 +46,8 @@ static noiseModel::Gaussian::shared_ptr covariance( ))); //static noiseModel::Gaussian::shared_ptr I3(noiseModel::Unit::Create(3)); +const Key kx0 = Symbol("x0"), kx1 = Symbol("x1"), kx2 = Symbol("x2"), kx3 = Symbol("x3"), kx4 = Symbol("x4"), kx5 = Symbol("x5"), kl1 = Symbol("l1"); + /* ************************************************************************* */ // Test constraint, small displacement Vector f1(const Pose2& pose1, const Pose2& pose2) { @@ -140,7 +142,7 @@ TEST( Pose2SLAM, linearization ) Vector b = Vector_(3,-0.1/sx,0.1/sy,0.0); SharedDiagonal probModel1 = noiseModel::Unit::Create(3); - lfg_expected.push_back(JacobianFactor::shared_ptr(new JacobianFactor(ordering["x1"], A1, ordering["x2"], A2, b, probModel1))); + lfg_expected.push_back(JacobianFactor::shared_ptr(new JacobianFactor(ordering[kx1], A1, ordering[kx2], A2, b, probModel1))); CHECK(assert_equal(lfg_expected, *lfg_linearized)); } @@ -160,7 +162,7 @@ TEST(Pose2Graph, optimize) { // Choose an ordering and optimize shared_ptr ordering(new Ordering); - *ordering += "x0","x1"; + *ordering += kx0, kx1; typedef NonlinearOptimizer Optimizer; NonlinearOptimizationParameters::shared_ptr params = NonlinearOptimizationParameters::newDecreaseThresholds(1e-15, 1e-15); @@ -198,7 +200,7 @@ TEST(Pose2Graph, optimizeThreePoses) { // Choose an ordering shared_ptr ordering(new Ordering); - *ordering += "x0","x1","x2"; + *ordering += kx0,kx1,kx2; // optimize NonlinearOptimizationParameters::shared_ptr params = NonlinearOptimizationParameters::newDecreaseThresholds(1e-15, 1e-15); @@ -241,7 +243,7 @@ TEST_UNSAFE(Pose2SLAM, optimizeCircle) { // Choose an ordering shared_ptr ordering(new Ordering); - *ordering += "x0","x1","x2","x3","x4","x5"; + *ordering += kx0,kx1,kx2,kx3,kx4,kx5; // optimize NonlinearOptimizationParameters::shared_ptr params = NonlinearOptimizationParameters::newDecreaseThresholds(1e-15, 1e-15); @@ -400,14 +402,14 @@ TEST( Pose2Prior, error ) // Check error at x0, i.e. delta = zero ! VectorValues delta(VectorValues::Zero(x0.dims(ordering))); - delta[ordering["x1"]] = zero(3); + delta[ordering[kx1]] = zero(3); Vector error_at_zero = Vector_(3,0.0,0.0,0.0); CHECK(assert_equal(error_at_zero,factor.whitenedError(x0))); CHECK(assert_equal(-error_at_zero,linear->error_vector(delta))); // Check error after increasing p2 VectorValues addition(VectorValues::Zero(x0.dims(ordering))); - addition[ordering["x1"]] = Vector_(3, 0.1, 0.0, 0.0); + addition[ordering[kx1]] = Vector_(3, 0.1, 0.0, 0.0); VectorValues plus = delta + addition; pose2SLAM::Values x1 = x0.retract(plus, ordering); Vector error_at_plus = Vector_(3,0.1/sx,0.0,0.0); // h(x)-z = 0.1 ! @@ -441,7 +443,7 @@ TEST( Pose2Prior, linearize ) // Test with numerical derivative Matrix numericalH = numericalDerivative11(hprior, priorVal); - CHECK(assert_equal(numericalH,actual->getA(actual->find(ordering["x1"])))); + CHECK(assert_equal(numericalH,actual->getA(actual->find(ordering[kx1])))); } /* ************************************************************************* */ @@ -466,15 +468,15 @@ TEST( Pose2Factor, error ) // Check error at x0, i.e. delta = zero ! VectorValues delta(x0.dims(ordering)); - delta[ordering["x1"]] = zero(3); - delta[ordering["x2"]] = zero(3); + delta[ordering[kx1]] = zero(3); + delta[ordering[kx2]] = zero(3); Vector error_at_zero = Vector_(3,0.0,0.0,0.0); CHECK(assert_equal(error_at_zero,factor.unwhitenedError(x0))); CHECK(assert_equal(-error_at_zero, linear->error_vector(delta))); // Check error after increasing p2 VectorValues plus = delta; - plus[ordering["x2"]] = Vector_(3, 0.1, 0.0, 0.0); + plus[ordering[kx2]] = Vector_(3, 0.1, 0.0, 0.0); pose2SLAM::Values x1 = x0.retract(plus, ordering); Vector error_at_plus = Vector_(3,0.1/sx,0.0,0.0); // h(x)-z = 0.1 ! CHECK(assert_equal(error_at_plus,factor.whitenedError(x1))); @@ -542,7 +544,7 @@ TEST( Pose2Factor, linearize ) // expected linear factor Ordering ordering(*x0.orderingArbitrary()); SharedDiagonal probModel1 = noiseModel::Unit::Create(3); - JacobianFactor expected(ordering["x1"], expectedH1, ordering["x2"], expectedH2, expected_b, probModel1); + JacobianFactor expected(ordering[kx1], expectedH1, ordering[kx2], expectedH2, expected_b, probModel1); // Actual linearization boost::shared_ptr actual = diff --git a/gtsam/slam/tests/testPose3SLAM.cpp b/gtsam/slam/tests/testPose3SLAM.cpp index 9eb6b3c3c..c04bc2191 100644 --- a/gtsam/slam/tests/testPose3SLAM.cpp +++ b/gtsam/slam/tests/testPose3SLAM.cpp @@ -43,6 +43,8 @@ static Matrix covariance = eye(6); const double tol=1e-5; +const Key kx0 = Symbol("x0"), kx1 = Symbol("x1"), kx2 = Symbol("x2"), kx3 = Symbol("x3"), kx4 = Symbol("x4"), kx5 = Symbol("x5"); + /* ************************************************************************* */ // test optimization with 6 poses arranged in a hexagon and a loop closure TEST(Pose3Graph, optimizeCircle) { @@ -76,7 +78,7 @@ TEST(Pose3Graph, optimizeCircle) { // Choose an ordering and optimize shared_ptr ordering(new Ordering); - *ordering += "x0","x1","x2","x3","x4","x5"; + *ordering += kx0,kx1,kx2,kx3,kx4,kx5; NonlinearOptimizationParameters::shared_ptr params = NonlinearOptimizationParameters::newDecreaseThresholds(1e-15, 1e-15); pose3SLAM::Optimizer optimizer0(fg, initial, ordering, params); pose3SLAM::Optimizer optimizer = optimizer0.levenbergMarquardt(); diff --git a/gtsam/slam/tests/testProjectionFactor.cpp b/gtsam/slam/tests/testProjectionFactor.cpp index 00c41aaad..ab8621cac 100644 --- a/gtsam/slam/tests/testProjectionFactor.cpp +++ b/gtsam/slam/tests/testProjectionFactor.cpp @@ -40,6 +40,8 @@ static Cal3_S2 K(fov,w,h); static SharedNoiseModel sigma(noiseModel::Unit::Create(1)); static shared_ptrK sK(new Cal3_S2(K)); +const Key kx1 = Symbol("x1"), kl1 = Symbol("l1"); + // make cameras /* ************************************************************************* */ @@ -62,12 +64,12 @@ TEST( ProjectionFactor, error ) DOUBLES_EQUAL(4.5,factor->error(config),1e-9); // Check linearize - Ordering ordering; ordering += "x1","l1"; + Ordering ordering; ordering += kx1,kl1; Matrix Ax1 = Matrix_(2, 6, 0., -554.256, 0., -92.376, 0., 0., 554.256, 0., 0., 0., -92.376, 0.); Matrix Al1 = Matrix_(2, 3, 92.376, 0., 0., 0., 92.376, 0.); Vector b = Vector_(2,3.,0.); SharedDiagonal probModel1 = noiseModel::Unit::Create(2); - JacobianFactor expected(ordering["x1"], Ax1, ordering["l1"], Al1, b, probModel1); + JacobianFactor expected(ordering[kx1], Ax1, ordering[kl1], Al1, b, probModel1); JacobianFactor::shared_ptr actual = boost::dynamic_pointer_cast(factor->linearize(config, ordering)); CHECK(assert_equal(expected,*actual,1e-3)); @@ -85,8 +87,8 @@ TEST( ProjectionFactor, error ) Point3 t2(1,1,-5); Pose3 x2(R,t2); expected_config.insert(PoseKey(1), x2); Point3 l2(1,2,3); expected_config.insert(PointKey(1), l2); VectorValues delta(expected_config.dims(ordering)); - delta[ordering["x1"]] = Vector_(6, 0.,0.,0., 1.,1.,1.); - delta[ordering["l1"]] = Vector_(3, 1.,2.,3.); + delta[ordering[kx1]] = Vector_(6, 0.,0.,0., 1.,1.,1.); + delta[ordering[kl1]] = Vector_(3, 1.,2.,3.); Values actual_config = config.retract(delta, ordering); CHECK(assert_equal(expected_config,actual_config,1e-9)); } diff --git a/gtsam/slam/tests/testSerializationSLAM.cpp b/gtsam/slam/tests/testSerializationSLAM.cpp index c75e0a148..afd759110 100644 --- a/gtsam/slam/tests/testSerializationSLAM.cpp +++ b/gtsam/slam/tests/testSerializationSLAM.cpp @@ -51,7 +51,7 @@ BOOST_CLASS_EXPORT_GUID(gtsam::SharedDiagonal, "gtsam_SharedDiagonal"); TEST (Serialization, smallExample_linear) { using namespace example; - Ordering ordering; ordering += "x1","x2","l1"; + Ordering ordering; ordering += Symbol("x1"),Symbol("x2"),Symbol("l1"); GaussianFactorGraph fg = createGaussianFactorGraph(ordering); EXPECT(equalsObj(ordering)); EXPECT(equalsXML(ordering)); diff --git a/gtsam/slam/tests/testVSLAM.cpp b/gtsam/slam/tests/testVSLAM.cpp index fb53d4b65..c4779a1cf 100644 --- a/gtsam/slam/tests/testVSLAM.cpp +++ b/gtsam/slam/tests/testVSLAM.cpp @@ -101,7 +101,7 @@ TEST( Graph, optimizeLM) // Create an ordering of the variables shared_ptr ordering(new Ordering); - *ordering += "l1","l2","l3","l4","x1","x2"; + *ordering += PointKey(1),PointKey(2),PointKey(3),PointKey(4),PoseKey(1),PoseKey(2); // Create an optimizer and check its error // We expect the initial to be zero because config is the ground truth @@ -138,7 +138,7 @@ TEST( Graph, optimizeLM2) // Create an ordering of the variables shared_ptr ordering(new Ordering); - *ordering += "l1","l2","l3","l4","x1","x2"; + *ordering += PointKey(1),PointKey(2),PointKey(3),PointKey(4),PoseKey(1),PoseKey(2); // Create an optimizer and check its error // We expect the initial to be zero because config is the ground truth @@ -204,11 +204,11 @@ TEST( Values, update_with_large_delta) { Ordering largeOrdering; Values largeValues = init; largeValues.insert(PoseKey(2), Pose3()); - largeOrdering += "x1","l1","x2"; + largeOrdering += PoseKey(1),PointKey(1),PoseKey(2); VectorValues delta(largeValues.dims(largeOrdering)); - delta[largeOrdering["x1"]] = Vector_(6, 0.0, 0.0, 0.0, 0.1, 0.1, 0.1); - delta[largeOrdering["l1"]] = Vector_(3, 0.1, 0.1, 0.1); - delta[largeOrdering["x2"]] = Vector_(6, 0.0, 0.0, 0.0, 100.1, 4.1, 9.1); + delta[largeOrdering[PoseKey(1)]] = Vector_(6, 0.0, 0.0, 0.0, 0.1, 0.1, 0.1); + delta[largeOrdering[PointKey(1)]] = Vector_(3, 0.1, 0.1, 0.1); + delta[largeOrdering[PoseKey(2)]] = Vector_(6, 0.0, 0.0, 0.0, 100.1, 4.1, 9.1); Values actual = init.retract(delta, largeOrdering); CHECK(assert_equal(expected,actual)); diff --git a/gtsam/slam/visualSLAM.h b/gtsam/slam/visualSLAM.h index 40e5eafcc..5416e2142 100644 --- a/gtsam/slam/visualSLAM.h +++ b/gtsam/slam/visualSLAM.h @@ -67,8 +67,8 @@ namespace visualSLAM { } /// print out graph - void print(const std::string& s = "") const { - NonlinearFactorGraph::print(s); + void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + NonlinearFactorGraph::print(s, keyFormatter); } /// check if two graphs are equal diff --git a/tests/testExtendedKalmanFilter.cpp b/tests/testExtendedKalmanFilter.cpp index 4d3279250..b38e7e3fe 100644 --- a/tests/testExtendedKalmanFilter.cpp +++ b/tests/testExtendedKalmanFilter.cpp @@ -155,8 +155,8 @@ public: /* print */ virtual void print(const std::string& s = "") const { std::cout << s << ": NonlinearMotionModel\n"; - std::cout << " TestKey1: " << (std::string) key1() << std::endl; - std::cout << " TestKey2: " << (std::string) key2() << std::endl; + std::cout << " TestKey1: " << DefaultKeyFormatter(key1()) << std::endl; + std::cout << " TestKey2: " << DefaultKeyFormatter(key2()) << std::endl; } /** Check if two factors are equal. Note type is IndexFactor and needs cast. */ @@ -293,7 +293,7 @@ public: /* print */ virtual void print(const std::string& s = "") const { std::cout << s << ": NonlinearMeasurementModel\n"; - std::cout << " TestKey: " << (std::string) key() << std::endl; + std::cout << " TestKey: " << DefaultKeyFormatter(key()) << std::endl; } /** Check if two factors are equal. Note type is IndexFactor and needs cast. */ diff --git a/tests/testGaussianFactor.cpp b/tests/testGaussianFactor.cpp index f6cf7d54b..f39af5c3d 100644 --- a/tests/testGaussianFactor.cpp +++ b/tests/testGaussianFactor.cpp @@ -26,7 +26,7 @@ using namespace boost::assign; #include -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h +// Magically casts strings like kx3 to a Symbol('x',3) key, see Symbol.h #define GTSAM_MAGIC_KEY #include @@ -44,19 +44,21 @@ static SharedDiagonal sigma0_1 = sharedSigma(2,0.1), sigma_02 = sharedSigma(2,0.2), constraintModel = noiseModel::Constrained::All(2); +const Key kx1 = Symbol("x1"), kx2 = Symbol("x2"), kl1 = Symbol("l1"); + /* ************************************************************************* */ TEST( GaussianFactor, linearFactor ) { - Ordering ordering; ordering += "x1","x2","l1"; + Ordering ordering; ordering += kx1,kx2,kl1; Matrix I = eye(2); Vector b = Vector_(2, 2.0, -1.0); - JacobianFactor expected(ordering["x1"], -10*I,ordering["x2"], 10*I, b, noiseModel::Unit::Create(2)); + JacobianFactor expected(ordering[kx1], -10*I,ordering[kx2], 10*I, b, noiseModel::Unit::Create(2)); // create a small linear factor graph FactorGraph fg = createGaussianFactorGraph(ordering); - // get the factor "f2" from the factor graph + // get the factor kf2 from the factor graph JacobianFactor::shared_ptr lf = fg[1]; // check if the two factors are the same @@ -66,26 +68,26 @@ TEST( GaussianFactor, linearFactor ) ///* ************************************************************************* */ // SL-FIX TEST( GaussianFactor, keys ) //{ -// // get the factor "f2" from the small linear factor graph -// Ordering ordering; ordering += "x1","x2","l1"; +// // get the factor kf2 from the small linear factor graph +// Ordering ordering; ordering += kx1,kx2,kl1; // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // GaussianFactor::shared_ptr lf = fg[1]; // list expected; -// expected.push_back("x1"); -// expected.push_back("x2"); +// expected.push_back(kx1); +// expected.push_back(kx2); // EXPECT(lf->keys() == expected); //} ///* ************************************************************************* */ // SL-FIX TEST( GaussianFactor, dimensions ) //{ -// // get the factor "f2" from the small linear factor graph -// Ordering ordering; ordering += "x1","x2","l1"; +// // get the factor kf2 from the small linear factor graph +// Ordering ordering; ordering += kx1,kx2,kl1; // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // // // Check a single factor // Dimensions expected; -// insert(expected)("x1", 2)("x2", 2); +// insert(expected)(kx1, 2)(kx2, 2); // Dimensions actual = fg[1]->dimensions(); // EXPECT(expected==actual); //} @@ -94,12 +96,12 @@ TEST( GaussianFactor, linearFactor ) TEST( GaussianFactor, getDim ) { // get a factor - Ordering ordering; ordering += "x1","x2","l1"; + Ordering ordering; ordering += kx1,kx2,kl1; GaussianFactorGraph fg = createGaussianFactorGraph(ordering); GaussianFactor::shared_ptr factor = fg[0]; // get the size of a variable - size_t actual = factor->getDim(factor->find(ordering["x1"])); + size_t actual = factor->getDim(factor->find(ordering[kx1])); // verify size_t expected = 2; @@ -110,7 +112,7 @@ TEST( GaussianFactor, getDim ) // SL-FIX TEST( GaussianFactor, combine ) //{ // // create a small linear factor graph -// Ordering ordering; ordering += "x1","x2","l1"; +// Ordering ordering; ordering += kx1,kx2,kl1; // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // // // get two factors from it and insert the factors into a vector @@ -155,9 +157,9 @@ TEST( GaussianFactor, getDim ) // // // use general constructor for making arbitrary factors // vector > meas; -// meas.push_back(make_pair("x2", Ax2)); -// meas.push_back(make_pair("l1", Al1)); -// meas.push_back(make_pair("x1", Ax1)); +// meas.push_back(make_pair(kx2, Ax2)); +// meas.push_back(make_pair(kl1, Al1)); +// meas.push_back(make_pair(kx1, Ax1)); // GaussianFactor expected(meas, b2, noiseModel::Diagonal::Sigmas(ones(4))); // EXPECT(assert_equal(expected,combined)); //} @@ -166,7 +168,7 @@ TEST( GaussianFactor, getDim ) TEST( GaussianFactor, error ) { // create a small linear factor graph - Ordering ordering; ordering += "x1","x2","l1"; + Ordering ordering; ordering += kx1,kx2,kl1; GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // get the first factor from the factor graph @@ -175,7 +177,7 @@ TEST( GaussianFactor, error ) // check the error of the first factor with noisy config VectorValues cfg = createZeroDelta(ordering); - // calculate the error from the factor "f1" + // calculate the error from the factor kf1 // note the error is the same as in testNonlinearFactor double actual = lf->error(cfg); DOUBLES_EQUAL( 1.0, actual, 0.00000001 ); @@ -185,7 +187,7 @@ TEST( GaussianFactor, error ) // SL-FIX TEST( GaussianFactor, eliminate ) //{ // // create a small linear factor graph -// Ordering ordering; ordering += "x1","x2","l1"; +// Ordering ordering; ordering += kx1,kx2,kl1; // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // // // get two factors from it and insert the factors into a vector @@ -199,7 +201,7 @@ TEST( GaussianFactor, error ) // // eliminate the combined factor // GaussianConditional::shared_ptr actualCG; // GaussianFactor::shared_ptr actualLF; -// boost::tie(actualCG,actualLF) = combined.eliminate("x2"); +// boost::tie(actualCG,actualLF) = combined.eliminate(kx2); // // // create expected Conditional Gaussian // Matrix I = eye(2)*sqrt(125.0); @@ -208,14 +210,14 @@ TEST( GaussianFactor, error ) // // // Check the conditional Gaussian // GaussianConditional -// expectedCG("x2", d, R11, "l1", S12, "x1", S13, repeat(2, 1.0)); +// expectedCG(kx2, d, R11, kl1, S12, kx1, S13, repeat(2, 1.0)); // // // the expected linear factor // I = eye(2)/0.2236; // Matrix Bl1 = I, Bx1 = -I; // Vector b1 = I*Vector_(2,0.0,0.2); // -// GaussianFactor expectedLF("l1", Bl1, "x1", Bx1, b1, repeat(2,1.0)); +// GaussianFactor expectedLF(kl1, Bl1, kx1, Bx1, b1, repeat(2,1.0)); // // // check if the result matches // EXPECT(assert_equal(expectedCG,*actualCG,1e-3)); @@ -226,17 +228,17 @@ TEST( GaussianFactor, error ) TEST( GaussianFactor, matrix ) { // create a small linear factor graph - Ordering ordering; ordering += "x1","x2","l1"; + Ordering ordering; ordering += kx1,kx2,kl1; FactorGraph fg = createGaussianFactorGraph(ordering); - // get the factor "f2" from the factor graph + // get the factor kf2 from the factor graph //GaussianFactor::shared_ptr lf = fg[1]; // NOTE: using the older version Vector b2 = Vector_(2, 0.2, -0.1); Matrix I = eye(2); // render with a given ordering Ordering ord; - ord += "x1","x2"; - JacobianFactor::shared_ptr lf(new JacobianFactor(ord["x1"], -I, ord["x2"], I, b2, sigma0_1)); + ord += kx1,kx2; + JacobianFactor::shared_ptr lf(new JacobianFactor(ord[kx1], -I, ord[kx2], I, b2, sigma0_1)); // Test whitened version Matrix A_act1; Vector b_act1; @@ -274,17 +276,17 @@ TEST( GaussianFactor, matrix ) TEST( GaussianFactor, matrix_aug ) { // create a small linear factor graph - Ordering ordering; ordering += "x1","x2","l1"; + Ordering ordering; ordering += kx1,kx2,kl1; FactorGraph fg = createGaussianFactorGraph(ordering); - // get the factor "f2" from the factor graph + // get the factor kf2 from the factor graph //GaussianFactor::shared_ptr lf = fg[1]; Vector b2 = Vector_(2, 0.2, -0.1); Matrix I = eye(2); // render with a given ordering Ordering ord; - ord += "x1","x2"; - JacobianFactor::shared_ptr lf(new JacobianFactor(ord["x1"], -I, ord["x2"], I, b2, sigma0_1)); + ord += kx1,kx2; + JacobianFactor::shared_ptr lf(new JacobianFactor(ord[kx1], -I, ord[kx2], I, b2, sigma0_1)); // Test unwhitened version @@ -325,15 +327,15 @@ void print(const list& i) { // SL-FIX TEST( GaussianFactor, sparse ) //{ // // create a small linear factor graph -// Ordering ordering; ordering += "x1","x2","l1"; +// Ordering ordering; ordering += kx1,kx2,kl1; // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // -// // get the factor "f2" from the factor graph +// // get the factor kf2 from the factor graph // GaussianFactor::shared_ptr lf = fg[1]; // // // render with a given ordering // Ordering ord; -// ord += "x1","x2"; +// ord += kx1,kx2; // // list i,j; // list s; @@ -355,15 +357,15 @@ void print(const list& i) { // SL-FIX TEST( GaussianFactor, sparse2 ) //{ // // create a small linear factor graph -// Ordering ordering; ordering += "x1","x2","l1"; +// Ordering ordering; ordering += kx1,kx2,kl1; // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // -// // get the factor "f2" from the factor graph +// // get the factor kf2 from the factor graph // GaussianFactor::shared_ptr lf = fg[1]; // // // render with a given ordering // Ordering ord; -// ord += "x2","l1","x1"; +// ord += kx2,kl1,kx1; // // list i,j; // list s; @@ -385,7 +387,7 @@ void print(const list& i) { TEST( GaussianFactor, size ) { // create a linear factor graph - Ordering ordering; ordering += "x1","x2","l1"; + Ordering ordering; ordering += kx1,kx2,kl1; GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // get some factors from the graph diff --git a/tests/testGaussianFactorGraph.cpp b/tests/testGaussianFactorGraph.cpp index 73b1f2d63..bec246854 100644 --- a/tests/testGaussianFactorGraph.cpp +++ b/tests/testGaussianFactorGraph.cpp @@ -28,7 +28,7 @@ using namespace boost::assign; #include -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h +// Magically casts strings like kx(3) to a Symbol('x',3) key, see Symbol.h #define GTSAM_MAGIC_KEY #include @@ -44,10 +44,13 @@ using namespace example; double tol=1e-5; +Key kx(size_t i) { return Symbol('x',i); } +Key kl(size_t i) { return Symbol('l',i); } + /* ************************************************************************* */ TEST( GaussianFactorGraph, equals ) { - Ordering ordering; ordering += "x1","x2","l1"; + Ordering ordering; ordering += kx(1),kx(2),kl(1); GaussianFactorGraph fg = createGaussianFactorGraph(ordering); GaussianFactorGraph fg2 = createGaussianFactorGraph(ordering); EXPECT(fg.equals(fg2)); @@ -55,7 +58,7 @@ TEST( GaussianFactorGraph, equals ) { /* ************************************************************************* */ //TEST( GaussianFactorGraph, error ) { -// Ordering ordering; ordering += "x1","x2","l1"; +// Ordering ordering; ordering += kx(1),kx(2),kl(1); // FactorGraph fg = createGaussianFactorGraph(ordering); // VectorValues cfg = createZeroDelta(ordering); // @@ -73,10 +76,10 @@ TEST( GaussianFactorGraph, equals ) { //{ // GaussianFactorGraph fg = createGaussianFactorGraph(); // -// set separator = fg.find_separator("x2"); +// set separator = fg.find_separator(kx(2)); // set expected; -// expected.insert("x1"); -// expected.insert("l1"); +// expected.insert(kx(1)); +// expected.insert(kl(1)); // // EXPECT(separator.size()==expected.size()); // set::iterator it1 = separator.begin(), it2 = expected.begin(); @@ -91,7 +94,7 @@ TEST( GaussianFactorGraph, equals ) { // GaussianFactorGraph fg = createGaussianFactorGraph(); // // // combine all factors -// GaussianFactor::shared_ptr actual = removeAndCombineFactors(fg,"x1"); +// GaussianFactor::shared_ptr actual = removeAndCombineFactors(fg,kx(1)); // // // the expected linear factor // Matrix Al1 = Matrix_(6,2, @@ -131,11 +134,11 @@ TEST( GaussianFactorGraph, equals ) { // b(5) = 1; // // vector > meas; -// meas.push_back(make_pair("l1", Al1)); -// meas.push_back(make_pair("x1", Ax1)); -// meas.push_back(make_pair("x2", Ax2)); +// meas.push_back(make_pair(kl(1), Al1)); +// meas.push_back(make_pair(kx(1), Ax1)); +// meas.push_back(make_pair(kx(2), Ax2)); // GaussianFactor expected(meas, b, ones(6)); -// //GaussianFactor expected("l1", Al1, "x1", Ax1, "x2", Ax2, b); +// //GaussianFactor expected(kl(1), Al1, kx(1), Ax1, kx(2), Ax2, b); // // // check if the two factors are the same // EXPECT(assert_equal(expected,*actual)); @@ -148,7 +151,7 @@ TEST( GaussianFactorGraph, equals ) { // GaussianFactorGraph fg = createGaussianFactorGraph(); // // // combine all factors -// GaussianFactor::shared_ptr actual = removeAndCombineFactors(fg,"x2"); +// GaussianFactor::shared_ptr actual = removeAndCombineFactors(fg,kx(2)); // // // the expected linear factor // Matrix Al1 = Matrix_(4,2, @@ -183,9 +186,9 @@ TEST( GaussianFactorGraph, equals ) { // b(3) = 1.5; // // vector > meas; -// meas.push_back(make_pair("l1", Al1)); -// meas.push_back(make_pair("x1", Ax1)); -// meas.push_back(make_pair("x2", Ax2)); +// meas.push_back(make_pair(kl(1), Al1)); +// meas.push_back(make_pair(kx(1), Ax1)); +// meas.push_back(make_pair(kx(2), Ax2)); // GaussianFactor expected(meas, b, ones(4)); // // // check if the two factors are the same @@ -195,14 +198,14 @@ TEST( GaussianFactorGraph, equals ) { ///* ************************************************************************* */ //TEST( GaussianFactorGraph, eliminateOne_x1 ) //{ -// Ordering ordering; ordering += "x1","l1","x2"; +// Ordering ordering; ordering += kx(1),kl(1),kx(2); // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // GaussianConditional::shared_ptr actual = GaussianSequentialSolver::EliminateUntil(fg, 1); // // // create expected Conditional Gaussian // Matrix I = 15*eye(2), R11 = I, S12 = -0.111111*I, S13 = -0.444444*I; // Vector d = Vector_(2, -0.133333, -0.0222222), sigma = ones(2); -// GaussianConditional expected(ordering["x1"],15*d,R11,ordering["l1"],S12,ordering["x2"],S13,sigma); +// GaussianConditional expected(ordering[kx(1)],15*d,R11,ordering[kl(1)],S12,ordering[kx(2)],S13,sigma); // // EXPECT(assert_equal(expected,*actual,tol)); //} @@ -210,7 +213,7 @@ TEST( GaussianFactorGraph, equals ) { ///* ************************************************************************* */ //TEST( GaussianFactorGraph, eliminateOne_x2 ) //{ -// Ordering ordering; ordering += "x2","l1","x1"; +// Ordering ordering; ordering += kx(2),kl(1),kx(1); // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // GaussianConditional::shared_ptr actual = GaussianSequentialSolver::EliminateUntil(fg, 1); // @@ -218,7 +221,7 @@ TEST( GaussianFactorGraph, equals ) { // double sig = 0.0894427; // Matrix I = eye(2)/sig, R11 = I, S12 = -0.2*I, S13 = -0.8*I; // Vector d = Vector_(2, 0.2, -0.14)/sig, sigma = ones(2); -// GaussianConditional expected(ordering["x2"],d,R11,ordering["l1"],S12,ordering["x1"],S13,sigma); +// GaussianConditional expected(ordering[kx(2)],d,R11,ordering[kl(1)],S12,ordering[kx(1)],S13,sigma); // // EXPECT(assert_equal(expected,*actual,tol)); //} @@ -226,7 +229,7 @@ TEST( GaussianFactorGraph, equals ) { ///* ************************************************************************* */ //TEST( GaussianFactorGraph, eliminateOne_l1 ) //{ -// Ordering ordering; ordering += "l1","x1","x2"; +// Ordering ordering; ordering += kl(1),kx(1),kx(2); // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // GaussianConditional::shared_ptr actual = GaussianSequentialSolver::EliminateUntil(fg, 1); // @@ -234,7 +237,7 @@ TEST( GaussianFactorGraph, equals ) { // double sig = sqrt(2)/10.; // Matrix I = eye(2)/sig, R11 = I, S12 = -0.5*I, S13 = -0.5*I; // Vector d = Vector_(2, -0.1, 0.25)/sig, sigma = ones(2); -// GaussianConditional expected(ordering["l1"],d,R11,ordering["x1"],S12,ordering["x2"],S13,sigma); +// GaussianConditional expected(ordering[kl(1)],d,R11,ordering[kx(1)],S12,ordering[kx(2)],S13,sigma); // // EXPECT(assert_equal(expected,*actual,tol)); //} @@ -243,12 +246,12 @@ TEST( GaussianFactorGraph, equals ) { //TEST( GaussianFactorGraph, eliminateOne_x1_fast ) //{ // GaussianFactorGraph fg = createGaussianFactorGraph(); -// GaussianConditional::shared_ptr actual = fg.eliminateOne("x1", false); +// GaussianConditional::shared_ptr actual = fg.eliminateOne(kx(1), false); // // // create expected Conditional Gaussian // Matrix I = 15*eye(2), R11 = I, S12 = -0.111111*I, S13 = -0.444444*I; // Vector d = Vector_(2, -0.133333, -0.0222222), sigma = ones(2); -// GaussianConditional expected("x1",15*d,R11,"l1",S12,"x2",S13,sigma); +// GaussianConditional expected(kx(1),15*d,R11,kl(1),S12,kx(2),S13,sigma); // // EXPECT(assert_equal(expected,*actual,tol)); //} @@ -257,13 +260,13 @@ TEST( GaussianFactorGraph, equals ) { //TEST( GaussianFactorGraph, eliminateOne_x2_fast ) //{ // GaussianFactorGraph fg = createGaussianFactorGraph(); -// GaussianConditional::shared_ptr actual = fg.eliminateOne("x2", false); +// GaussianConditional::shared_ptr actual = fg.eliminateOne(kx(2), false); // // // create expected Conditional Gaussian // double sig = 0.0894427; // Matrix I = eye(2)/sig, R11 = I, S12 = -0.2*I, S13 = -0.8*I; // Vector d = Vector_(2, 0.2, -0.14)/sig, sigma = ones(2); -// GaussianConditional expected("x2",d,R11,"l1",S12,"x1",S13,sigma); +// GaussianConditional expected(kx(2),d,R11,kl(1),S12,kx(1),S13,sigma); // // EXPECT(assert_equal(expected,*actual,tol)); //} @@ -272,13 +275,13 @@ TEST( GaussianFactorGraph, equals ) { //TEST( GaussianFactorGraph, eliminateOne_l1_fast ) //{ // GaussianFactorGraph fg = createGaussianFactorGraph(); -// GaussianConditional::shared_ptr actual = fg.eliminateOne("l1", false); +// GaussianConditional::shared_ptr actual = fg.eliminateOne(kl(1), false); // // // create expected Conditional Gaussian // double sig = sqrt(2)/10.; // Matrix I = eye(2)/sig, R11 = I, S12 = -0.5*I, S13 = -0.5*I; // Vector d = Vector_(2, -0.1, 0.25)/sig, sigma = ones(2); -// GaussianConditional expected("l1",d,R11,"x1",S12,"x2",S13,sigma); +// GaussianConditional expected(kl(1),d,R11,kx(1),S12,kx(2),S13,sigma); // // EXPECT(assert_equal(expected,*actual,tol)); //} @@ -290,18 +293,18 @@ TEST( GaussianFactorGraph, eliminateAll ) Matrix I = eye(2); Ordering ordering; - ordering += "x2","l1","x1"; + ordering += kx(2),kl(1),kx(1); Vector d1 = Vector_(2, -0.1,-0.1); - GaussianBayesNet expected = simpleGaussian(ordering["x1"],d1,0.1); + GaussianBayesNet expected = simpleGaussian(ordering[kx(1)],d1,0.1); double sig1 = 0.149071; Vector d2 = Vector_(2, 0.0, 0.2)/sig1, sigma2 = ones(2); - push_front(expected,ordering["l1"],d2, I/sig1,ordering["x1"], (-1)*I/sig1,sigma2); + push_front(expected,ordering[kl(1)],d2, I/sig1,ordering[kx(1)], (-1)*I/sig1,sigma2); double sig2 = 0.0894427; Vector d3 = Vector_(2, 0.2, -0.14)/sig2, sigma3 = ones(2); - push_front(expected,ordering["x2"],d3, I/sig2,ordering["l1"], (-0.2)*I/sig2, ordering["x1"], (-0.8)*I/sig2, sigma3); + push_front(expected,ordering[kx(2)],d3, I/sig2,ordering[kl(1)], (-0.2)*I/sig2, ordering[kx(1)], (-0.8)*I/sig2, sigma3); // Check one ordering GaussianFactorGraph fg1 = createGaussianFactorGraph(ordering); @@ -319,20 +322,20 @@ TEST( GaussianFactorGraph, eliminateAll ) // Matrix I = eye(2); // // Vector d1 = Vector_(2, -0.1,-0.1); -// GaussianBayesNet expected = simpleGaussian("x1",d1,0.1); +// GaussianBayesNet expected = simpleGaussian(kx(1),d1,0.1); // // double sig1 = 0.149071; // Vector d2 = Vector_(2, 0.0, 0.2)/sig1, sigma2 = ones(2); -// push_front(expected,"l1",d2, I/sig1,"x1", (-1)*I/sig1,sigma2); +// push_front(expected,kl(1),d2, I/sig1,kx(1), (-1)*I/sig1,sigma2); // // double sig2 = 0.0894427; // Vector d3 = Vector_(2, 0.2, -0.14)/sig2, sigma3 = ones(2); -// push_front(expected,"x2",d3, I/sig2,"l1", (-0.2)*I/sig2, "x1", (-0.8)*I/sig2, sigma3); +// push_front(expected,kx(2),d3, I/sig2,kl(1), (-0.2)*I/sig2, kx(1), (-0.8)*I/sig2, sigma3); // // // Check one ordering // GaussianFactorGraph fg1 = createGaussianFactorGraph(); // Ordering ordering; -// ordering += "x2","l1","x1"; +// ordering += kx(2),kl(1),kx(1); // GaussianBayesNet actual = fg1.eliminate(ordering, false); // EXPECT(assert_equal(expected,actual,tol)); //} @@ -340,16 +343,16 @@ TEST( GaussianFactorGraph, eliminateAll ) ///* ************************************************************************* */ //TEST( GaussianFactorGraph, add_priors ) //{ -// Ordering ordering; ordering += "l1","x1","x2"; +// Ordering ordering; ordering += kl(1),kx(1),kx(2); // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); // GaussianFactorGraph actual = fg.add_priors(3, vector(3,2)); // GaussianFactorGraph expected = createGaussianFactorGraph(ordering); // Matrix A = eye(2); // Vector b = zero(2); // SharedDiagonal sigma = sharedSigma(2,3.0); -// expected.push_back(GaussianFactor::shared_ptr(new JacobianFactor(ordering["l1"],A,b,sigma))); -// expected.push_back(GaussianFactor::shared_ptr(new JacobianFactor(ordering["x1"],A,b,sigma))); -// expected.push_back(GaussianFactor::shared_ptr(new JacobianFactor(ordering["x2"],A,b,sigma))); +// expected.push_back(GaussianFactor::shared_ptr(new JacobianFactor(ordering[kl(1)],A,b,sigma))); +// expected.push_back(GaussianFactor::shared_ptr(new JacobianFactor(ordering[kx(1)],A,b,sigma))); +// expected.push_back(GaussianFactor::shared_ptr(new JacobianFactor(ordering[kx(2)],A,b,sigma))); // EXPECT(assert_equal(expected,actual)); //} @@ -357,7 +360,7 @@ TEST( GaussianFactorGraph, eliminateAll ) TEST( GaussianFactorGraph, copying ) { // Create a graph - Ordering ordering; ordering += "x2","l1","x1"; + Ordering ordering; ordering += kx(2),kl(1),kx(1); GaussianFactorGraph actual = createGaussianFactorGraph(ordering); // Copy the graph ! @@ -378,7 +381,7 @@ TEST( GaussianFactorGraph, copying ) //{ // // render with a given ordering // Ordering ord; -// ord += "x2","l1","x1"; +// ord += kx(2),kl(1),kx(1); // // // Create a graph // GaussianFactorGraph fg = createGaussianFactorGraph(ordering); @@ -417,7 +420,7 @@ TEST( GaussianFactorGraph, copying ) TEST( GaussianFactorGraph, CONSTRUCTOR_GaussianBayesNet ) { Ordering ord; - ord += "x2","l1","x1"; + ord += kx(2),kl(1),kx(1); GaussianFactorGraph fg = createGaussianFactorGraph(ord); // render with a given ordering @@ -437,20 +440,20 @@ TEST( GaussianFactorGraph, CONSTRUCTOR_GaussianBayesNet ) /* ************************************************************************* */ TEST( GaussianFactorGraph, getOrdering) { - Ordering original; original += "l1","x1","x2"; + Ordering original; original += kl(1),kx(1),kx(2); FactorGraph symbolic(createGaussianFactorGraph(original)); Permutation perm(*Inference::PermutationCOLAMD(VariableIndex(symbolic))); Ordering actual = original; actual.permuteWithInverse((*perm.inverse())); - Ordering expected; expected += "l1","x2","x1"; + Ordering expected; expected += kl(1),kx(2),kx(1); EXPECT(assert_equal(expected,actual)); } // SL-FIX TEST( GaussianFactorGraph, getOrdering2) //{ // Ordering expected; -// expected += "l1","x1"; +// expected += kl(1),kx(1); // GaussianFactorGraph fg = createGaussianFactorGraph(); -// set interested; interested += "l1","x1"; +// set interested; interested += kl(1),kx(1); // Ordering actual = fg.getOrdering(interested); // EXPECT(assert_equal(expected,actual)); //} @@ -459,7 +462,7 @@ TEST( GaussianFactorGraph, getOrdering) TEST( GaussianFactorGraph, optimize_LDL ) { // create an ordering - Ordering ord; ord += "x2","l1","x1"; + Ordering ord; ord += kx(2),kl(1),kx(1); // create a graph GaussianFactorGraph fg = createGaussianFactorGraph(ord); @@ -477,7 +480,7 @@ TEST( GaussianFactorGraph, optimize_LDL ) TEST( GaussianFactorGraph, optimize_QR ) { // create an ordering - Ordering ord; ord += "x2","l1","x1"; + Ordering ord; ord += kx(2),kl(1),kx(1); // create a graph GaussianFactorGraph fg = createGaussianFactorGraph(ord); @@ -495,7 +498,7 @@ TEST( GaussianFactorGraph, optimize_QR ) // SL-FIX TEST( GaussianFactorGraph, optimizeMultiFrontlas ) //{ // // create an ordering -// Ordering ord; ord += "x2","l1","x1"; +// Ordering ord; ord += kx(2),kl(1),kx(1); // // // create a graph // GaussianFactorGraph fg = createGaussianFactorGraph(ord); @@ -513,7 +516,7 @@ TEST( GaussianFactorGraph, optimize_QR ) TEST( GaussianFactorGraph, combine) { // create an ordering - Ordering ord; ord += "x2","l1","x1"; + Ordering ord; ord += kx(2),kl(1),kx(1); // create a test graph GaussianFactorGraph fg1 = createGaussianFactorGraph(ord); @@ -535,7 +538,7 @@ TEST( GaussianFactorGraph, combine) TEST( GaussianFactorGraph, combine2) { // create an ordering - Ordering ord; ord += "x2","l1","x1"; + Ordering ord; ord += kx(2),kl(1),kx(1); // create a test graph GaussianFactorGraph fg1 = createGaussianFactorGraph(ord); @@ -568,13 +571,13 @@ void print(vector v) { // GaussianFactorGraph fg = createGaussianFactorGraph(); // // // ask for all factor indices connected to x1 -// list x1_factors = fg.factors("x1"); +// list x1_factors = fg.factors(kx(1)); // size_t x1_indices[] = { 0, 1, 2 }; // list x1_expected(x1_indices, x1_indices + 3); // EXPECT(x1_factors==x1_expected); // // // ask for all factor indices connected to x2 -// list x2_factors = fg.factors("x2"); +// list x2_factors = fg.factors(kx(2)); // size_t x2_indices[] = { 1, 3 }; // list x2_expected(x2_indices, x2_indices + 2); // EXPECT(x2_factors==x2_expected); @@ -592,7 +595,7 @@ void print(vector v) { // GaussianFactor::shared_ptr f2 = fg[2]; // // // call the function -// vector factors = fg.findAndRemoveFactors("x1"); +// vector factors = fg.findAndRemoveFactors(kx(1)); // // // Check the factors // EXPECT(f0==factors[0]); @@ -617,7 +620,7 @@ TEST(GaussianFactorGraph, createSmoother) //{ // GaussianFactorGraph fg = createGaussianFactorGraph(); // Dimensions expected; -// insert(expected)("l1", 2)("x1", 2)("x2", 2); +// insert(expected)(kl(1), 2)(kx(1), 2)(kx(2), 2); // Dimensions actual = fg.dimensions(); // EXPECT(expected==actual); //} @@ -627,7 +630,7 @@ TEST(GaussianFactorGraph, createSmoother) //{ // GaussianFactorGraph fg = createGaussianFactorGraph(); // Ordering expected; -// expected += "l1","x1","x2"; +// expected += kl(1),kx(1),kx(2); // EXPECT(assert_equal(expected,fg.keys())); //} @@ -635,16 +638,16 @@ TEST(GaussianFactorGraph, createSmoother) // SL-NEEDED? TEST( GaussianFactorGraph, involves ) //{ // GaussianFactorGraph fg = createGaussianFactorGraph(); -// EXPECT(fg.involves("l1")); -// EXPECT(fg.involves("x1")); -// EXPECT(fg.involves("x2")); -// EXPECT(!fg.involves("x3")); +// EXPECT(fg.involves(kl(1))); +// EXPECT(fg.involves(kx(1))); +// EXPECT(fg.involves(kx(2))); +// EXPECT(!fg.involves(kx(3))); //} /* ************************************************************************* */ double error(const VectorValues& x) { // create an ordering - Ordering ord; ord += "x2","l1","x1"; + Ordering ord; ord += kx(2),kl(1),kx(1); GaussianFactorGraph fg = createGaussianFactorGraph(ord); return fg.error(x); @@ -658,11 +661,11 @@ double error(const VectorValues& x) { // // Construct expected gradient // VectorValues expected; // -// // 2*f(x) = 100*(x1+c["x1"])^2 + 100*(x2-x1-[0.2;-0.1])^2 + 25*(l1-x1-[0.0;0.2])^2 + 25*(l1-x2-[-0.2;0.3])^2 +// // 2*f(x) = 100*(x1+c[kx(1)])^2 + 100*(x2-x1-[0.2;-0.1])^2 + 25*(l1-x1-[0.0;0.2])^2 + 25*(l1-x2-[-0.2;0.3])^2 // // worked out: df/dx1 = 100*[0.1;0.1] + 100*[0.2;-0.1]) + 25*[0.0;0.2] = [10+20;10-10+5] = [30;5] -// expected.insert("l1",Vector_(2, 5.0,-12.5)); -// expected.insert("x1",Vector_(2, 30.0, 5.0)); -// expected.insert("x2",Vector_(2,-25.0, 17.5)); +// expected.insert(kl(1),Vector_(2, 5.0,-12.5)); +// expected.insert(kx(1),Vector_(2, 30.0, 5.0)); +// expected.insert(kx(2),Vector_(2,-25.0, 17.5)); // // // Check the gradient at delta=0 // VectorValues zero = createZeroDelta(); @@ -675,7 +678,7 @@ double error(const VectorValues& x) { // // // Check the gradient at the solution (should be zero) // Ordering ord; -// ord += "x2","l1","x1"; +// ord += kx(2),kl(1),kx(1); // GaussianFactorGraph fg2 = createGaussianFactorGraph(); // VectorValues solution = fg2.optimize(ord); // destructive // VectorValues actual2 = fg.gradient(solution); @@ -686,7 +689,7 @@ double error(const VectorValues& x) { TEST( GaussianFactorGraph, multiplication ) { // create an ordering - Ordering ord; ord += "x2","l1","x1"; + Ordering ord; ord += kx(2),kl(1),kx(1); FactorGraph A = createGaussianFactorGraph(ord); VectorValues x = createCorrectDelta(ord); @@ -703,7 +706,7 @@ TEST( GaussianFactorGraph, multiplication ) // SL-NEEDED? TEST( GaussianFactorGraph, transposeMultiplication ) //{ // // create an ordering -// Ordering ord; ord += "x2","l1","x1"; +// Ordering ord; ord += kx(2),kl(1),kx(1); // // GaussianFactorGraph A = createGaussianFactorGraph(ord); // Errors e; @@ -713,9 +716,9 @@ TEST( GaussianFactorGraph, multiplication ) // e += Vector_(2,-7.5,-5.0); // // VectorValues expected = createZeroDelta(ord), actual = A ^ e; -// expected[ord["l1"]] = Vector_(2, -37.5,-50.0); -// expected[ord["x1"]] = Vector_(2,-150.0, 25.0); -// expected[ord["x2"]] = Vector_(2, 187.5, 25.0); +// expected[ord[kl(1)]] = Vector_(2, -37.5,-50.0); +// expected[ord[kx(1)]] = Vector_(2,-150.0, 25.0); +// expected[ord[kx(2)]] = Vector_(2, 187.5, 25.0); // EXPECT(assert_equal(expected,actual)); //} @@ -723,7 +726,7 @@ TEST( GaussianFactorGraph, multiplication ) // SL-NEEDED? TEST( GaussianFactorGraph, rhs ) //{ // // create an ordering -// Ordering ord; ord += "x2","l1","x1"; +// Ordering ord; ord += kx(2),kl(1),kx(1); // // GaussianFactorGraph Ab = createGaussianFactorGraph(ord); // Errors expected = createZeroDelta(ord), actual = Ab.rhs(); @@ -739,21 +742,21 @@ TEST( GaussianFactorGraph, multiplication ) TEST( GaussianFactorGraph, elimination ) { Ordering ord; - ord += "x1", "x2"; + ord += kx(1), kx(2); // Create Gaussian Factor Graph GaussianFactorGraph fg; Matrix Ap = eye(1), An = eye(1) * -1; Vector b = Vector_(1, 0.0); SharedDiagonal sigma = sharedSigma(1,2.0); - fg.add(ord["x1"], An, ord["x2"], Ap, b, sigma); - fg.add(ord["x1"], Ap, b, sigma); - fg.add(ord["x2"], Ap, b, sigma); + fg.add(ord[kx(1)], An, ord[kx(2)], Ap, b, sigma); + fg.add(ord[kx(1)], Ap, b, sigma); + fg.add(ord[kx(2)], Ap, b, sigma); // Eliminate GaussianBayesNet bayesNet = *GaussianSequentialSolver(fg).eliminate(); // Check sigma - EXPECT_DOUBLES_EQUAL(1.0,bayesNet[ord["x2"]]->get_sigmas()(0),1e-5); + EXPECT_DOUBLES_EQUAL(1.0,bayesNet[ord[kx(2)]]->get_sigmas()(0),1e-5); // Check matrix Matrix R;Vector d; @@ -808,7 +811,7 @@ TEST( GaussianFactorGraph, constrained_single ) // // // eliminate and solve // Ordering ord; -// ord += "y", "x"; +// ord += "yk, x"; // VectorValues actual = fg.optimize(ord); // // // verify @@ -839,7 +842,7 @@ TEST( GaussianFactorGraph, constrained_multi1 ) // // // eliminate and solve // Ordering ord; -// ord += "z", "x", "y"; +// ord += "zk, xk, y"; // VectorValues actual = fg.optimize(ord); // // // verify @@ -856,18 +859,18 @@ SharedDiagonal model = sharedSigma(2,1); // GaussianFactorGraph g; // Matrix I = eye(2); // Vector b = Vector_(0, 0, 0); -// g.add("x1", I, "x2", I, b, model); -// g.add("x1", I, "x3", I, b, model); -// g.add("x1", I, "x4", I, b, model); -// g.add("x2", I, "x3", I, b, model); -// g.add("x2", I, "x4", I, b, model); -// g.add("x3", I, "x4", I, b, model); +// g.add(kx(1), I, kx(2), I, b, model); +// g.add(kx(1), I, kx(3), I, b, model); +// g.add(kx(1), I, kx(4), I, b, model); +// g.add(kx(2), I, kx(3), I, b, model); +// g.add(kx(2), I, kx(4), I, b, model); +// g.add(kx(3), I, kx(4), I, b, model); // // map tree = g.findMinimumSpanningTree(); -// EXPECT(tree["x1"].compare("x1")==0); -// EXPECT(tree["x2"].compare("x1")==0); -// EXPECT(tree["x3"].compare("x1")==0); -// EXPECT(tree["x4"].compare("x1")==0); +// EXPECT(tree[kx(1)].compare(kx(1))==0); +// EXPECT(tree[kx(2)].compare(kx(1))==0); +// EXPECT(tree[kx(3)].compare(kx(1))==0); +// EXPECT(tree[kx(4)].compare(kx(1))==0); //} ///* ************************************************************************* */ @@ -876,17 +879,17 @@ SharedDiagonal model = sharedSigma(2,1); // GaussianFactorGraph g; // Matrix I = eye(2); // Vector b = Vector_(0, 0, 0); -// g.add("x1", I, "x2", I, b, model); -// g.add("x1", I, "x3", I, b, model); -// g.add("x1", I, "x4", I, b, model); -// g.add("x2", I, "x3", I, b, model); -// g.add("x2", I, "x4", I, b, model); +// g.add(kx(1), I, kx(2), I, b, model); +// g.add(kx(1), I, kx(3), I, b, model); +// g.add(kx(1), I, kx(4), I, b, model); +// g.add(kx(2), I, kx(3), I, b, model); +// g.add(kx(2), I, kx(4), I, b, model); // // PredecessorMap tree; -// tree["x1"] = "x1"; -// tree["x2"] = "x1"; -// tree["x3"] = "x1"; -// tree["x4"] = "x1"; +// tree[kx(1)] = kx(1); +// tree[kx(2)] = kx(1); +// tree[kx(3)] = kx(1); +// tree[kx(4)] = kx(1); // // GaussianFactorGraph Ab1, Ab2; // g.split(tree, Ab1, Ab2); @@ -897,17 +900,17 @@ SharedDiagonal model = sharedSigma(2,1); /* ************************************************************************* */ TEST(GaussianFactorGraph, replace) { - Ordering ord; ord += "x1","x2","x3","x4","x5","x6"; + Ordering ord; ord += kx(1),kx(2),kx(3),kx(4),kx(5),kx(6); SharedDiagonal noise(sharedSigma(3, 1.0)); GaussianFactorGraph::sharedFactor f1(new JacobianFactor( - ord["x1"], eye(3,3), ord["x2"], eye(3,3), zero(3), noise)); + ord[kx(1)], eye(3,3), ord[kx(2)], eye(3,3), zero(3), noise)); GaussianFactorGraph::sharedFactor f2(new JacobianFactor( - ord["x2"], eye(3,3), ord["x3"], eye(3,3), zero(3), noise)); + ord[kx(2)], eye(3,3), ord[kx(3)], eye(3,3), zero(3), noise)); GaussianFactorGraph::sharedFactor f3(new JacobianFactor( - ord["x3"], eye(3,3), ord["x4"], eye(3,3), zero(3), noise)); + ord[kx(3)], eye(3,3), ord[kx(4)], eye(3,3), zero(3), noise)); GaussianFactorGraph::sharedFactor f4(new JacobianFactor( - ord["x5"], eye(3,3), ord["x6"], eye(3,3), zero(3), noise)); + ord[kx(5)], eye(3,3), ord[kx(6)], eye(3,3), zero(3), noise)); GaussianFactorGraph actual; actual.push_back(f1); @@ -943,7 +946,7 @@ TEST(GaussianFactorGraph, replace) // // // combine in a factor // Matrix Ab; SharedDiagonal noise; -// Ordering order; order += "x2", "l1", "x1"; +// Ordering order; order += kx(2), kl(1), kx(1); // boost::tie(Ab, noise) = combineFactorsAndCreateMatrix(lfg, order, dimensions); // // // the expected augmented matrix @@ -971,26 +974,26 @@ TEST(GaussianFactorGraph, replace) // typedef GaussianFactorGraph::sharedFactor Factor; // SharedDiagonal model(Vector_(1, 0.5)); // GaussianFactorGraph fg; -// Factor factor1(new JacobianFactor("x1", Matrix_(1,1,1.), "x2", Matrix_(1,1,1.), Vector_(1,1.), model)); -// Factor factor2(new JacobianFactor("x2", Matrix_(1,1,1.), "x3", Matrix_(1,1,1.), Vector_(1,1.), model)); -// Factor factor3(new JacobianFactor("x3", Matrix_(1,1,1.), "x3", Matrix_(1,1,1.), Vector_(1,1.), model)); +// Factor factor1(new JacobianFactor(kx(1), Matrix_(1,1,1.), kx(2), Matrix_(1,1,1.), Vector_(1,1.), model)); +// Factor factor2(new JacobianFactor(kx(2), Matrix_(1,1,1.), kx(3), Matrix_(1,1,1.), Vector_(1,1.), model)); +// Factor factor3(new JacobianFactor(kx(3), Matrix_(1,1,1.), kx(3), Matrix_(1,1,1.), Vector_(1,1.), model)); // fg.push_back(factor1); // fg.push_back(factor2); // fg.push_back(factor3); // -// Ordering frontals; frontals += "x2", "x1"; +// Ordering frontals; frontals += kx(2), kx(1); // GaussianBayesNet bn = fg.eliminateFrontals(frontals); // // GaussianBayesNet bn_expected; -// GaussianBayesNet::sharedConditional conditional1(new GaussianConditional("x2", Vector_(1, 2.), Matrix_(1, 1, 2.), -// "x1", Matrix_(1, 1, 1.), "x3", Matrix_(1, 1, 1.), Vector_(1, 1.))); -// GaussianBayesNet::sharedConditional conditional2(new GaussianConditional("x1", Vector_(1, 0.), Matrix_(1, 1, -1.), -// "x3", Matrix_(1, 1, 1.), Vector_(1, 1.))); +// GaussianBayesNet::sharedConditional conditional1(new GaussianConditional(kx(2), Vector_(1, 2.), Matrix_(1, 1, 2.), +// kx(1), Matrix_(1, 1, 1.), kx(3), Matrix_(1, 1, 1.), Vector_(1, 1.))); +// GaussianBayesNet::sharedConditional conditional2(new GaussianConditional(kx(1), Vector_(1, 0.), Matrix_(1, 1, -1.), +// kx(3), Matrix_(1, 1, 1.), Vector_(1, 1.))); // bn_expected.push_back(conditional1); // bn_expected.push_back(conditional2); // EXPECT(assert_equal(bn_expected, bn)); // -// GaussianFactorGraph::sharedFactor factor_expected(new JacobianFactor("x3", Matrix_(1, 1, 2.), Vector_(1, 2.), SharedDiagonal(Vector_(1, 1.)))); +// GaussianFactorGraph::sharedFactor factor_expected(new JacobianFactor(kx(3), Matrix_(1, 1, 2.), Vector_(1, 2.), SharedDiagonal(Vector_(1, 1.)))); // GaussianFactorGraph fg_expected; // fg_expected.push_back(factor_expected); // EXPECT(assert_equal(fg_expected, fg)); @@ -1006,8 +1009,8 @@ TEST(GaussianFactorGraph, createSmoother2) LONGS_EQUAL(5,fg2.size()); // eliminate - vector x3var; x3var.push_back(ordering["x3"]); - vector x1var; x1var.push_back(ordering["x1"]); + vector x3var; x3var.push_back(ordering[kx(3)]); + vector x1var; x1var.push_back(ordering[kx(1)]); GaussianBayesNet p_x3 = *GaussianSequentialSolver( *GaussianSequentialSolver(fg2).jointFactorGraph(x3var)).eliminate(); GaussianBayesNet p_x1 = *GaussianSequentialSolver( @@ -1024,7 +1027,7 @@ TEST(GaussianFactorGraph, hasConstraints) FactorGraph fgc2 = createSimpleConstraintGraph() ; EXPECT(hasConstraints(fgc2)); - Ordering ordering; ordering += "x1", "x2", "l1"; + Ordering ordering; ordering += kx(1), kx(2), kl(1); FactorGraph fg = createGaussianFactorGraph(ordering); EXPECT(!hasConstraints(fg)); } diff --git a/tests/testGaussianISAM.cpp b/tests/testGaussianISAM.cpp index bd592fc68..40ad8b428 100644 --- a/tests/testGaussianISAM.cpp +++ b/tests/testGaussianISAM.cpp @@ -21,7 +21,7 @@ using namespace boost::assign; #include -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h +// Magically casts strings like kx(3) to a Symbol('x',3) key, see Symbol.h #define GTSAM_MAGIC_KEY #include @@ -37,6 +37,9 @@ using namespace std; using namespace gtsam; using namespace example; +Key kx(size_t i) { return Symbol('x',i); } +Key kl(size_t i) { return Symbol('l',i); } + /* ************************************************************************* */ // Some numbers that should be consistent among all smoother tests @@ -49,7 +52,7 @@ const double tol = 1e-4; TEST_UNSAFE( ISAM, iSAM_smoother ) { Ordering ordering; - for (int t = 1; t <= 7; t++) ordering += Symbol('x', t); + for (int t = 1; t <= 7; t++) ordering += kx(t); // Create smoother with 7 nodes GaussianFactorGraph smoother = createSmoother(7, ordering).first; @@ -82,7 +85,7 @@ TEST_UNSAFE( ISAM, iSAM_smoother ) // GaussianFactorGraph smoother = createSmoother(7); // // // Create initial tree from first 4 timestamps in reverse order ! -// Ordering ord; ord += "x4","x3","x2","x1"; +// Ordering ord; ord += kx(4),kx(3),kx(2),kx(1); // GaussianFactorGraph factors1; // for (int i=0;i<7;i++) factors1.push_back(smoother[i]); // GaussianISAM actual(*Inference::Eliminate(factors1)); @@ -129,7 +132,7 @@ TEST_UNSAFE( BayesTree, linear_smoother_shortcuts ) EXPECT(assert_equal(empty,actual1,tol)); // Check the conditional P(C2|Root) - GaussianISAM::sharedClique C2 = isamTree[ordering["x5"]]; + GaussianISAM::sharedClique C2 = isamTree[ordering[kx(5)]]; GaussianBayesNet actual2 = GaussianISAM::shortcut(C2,R); EXPECT(assert_equal(empty,actual2,tol)); @@ -137,8 +140,8 @@ TEST_UNSAFE( BayesTree, linear_smoother_shortcuts ) double sigma3 = 0.61808; Matrix A56 = Matrix_(2,2,-0.382022,0.,0.,-0.382022); GaussianBayesNet expected3; - push_front(expected3,ordering["x5"], zero(2), eye(2)/sigma3, ordering["x6"], A56/sigma3, ones(2)); - GaussianISAM::sharedClique C3 = isamTree[ordering["x4"]]; + push_front(expected3,ordering[kx(5)], zero(2), eye(2)/sigma3, ordering[kx(6)], A56/sigma3, ones(2)); + GaussianISAM::sharedClique C3 = isamTree[ordering[kx(4)]]; GaussianBayesNet actual3 = GaussianISAM::shortcut(C3,R); EXPECT(assert_equal(expected3,actual3,tol)); @@ -146,8 +149,8 @@ TEST_UNSAFE( BayesTree, linear_smoother_shortcuts ) double sigma4 = 0.661968; Matrix A46 = Matrix_(2,2,-0.146067,0.,0.,-0.146067); GaussianBayesNet expected4; - push_front(expected4, ordering["x4"], zero(2), eye(2)/sigma4, ordering["x6"], A46/sigma4, ones(2)); - GaussianISAM::sharedClique C4 = isamTree[ordering["x3"]]; + push_front(expected4, ordering[kx(4)], zero(2), eye(2)/sigma4, ordering[kx(6)], A46/sigma4, ones(2)); + GaussianISAM::sharedClique C4 = isamTree[ordering[kx(3)]]; GaussianBayesNet actual4 = GaussianISAM::shortcut(C4,R); EXPECT(assert_equal(expected4,actual4,tol)); } @@ -175,7 +178,7 @@ TEST_UNSAFE( BayesTree, balanced_smoother_marginals ) { // Create smoother with 7 nodes Ordering ordering; - ordering += "x1","x3","x5","x7","x2","x6","x4"; + ordering += kx(1),kx(3),kx(5),kx(7),kx(2),kx(6),kx(4); GaussianFactorGraph smoother = createSmoother(7, ordering).first; // Create the Bayes tree @@ -192,48 +195,48 @@ TEST_UNSAFE( BayesTree, balanced_smoother_marginals ) double tol=1e-5; // Check marginal on x1 - GaussianBayesNet expected1 = simpleGaussian(ordering["x1"], zero(2), sigmax1); - GaussianBayesNet actual1 = *bayesTree.marginalBayesNet(ordering["x1"]); + GaussianBayesNet expected1 = simpleGaussian(ordering[kx(1)], zero(2), sigmax1); + GaussianBayesNet actual1 = *bayesTree.marginalBayesNet(ordering[kx(1)]); Matrix expectedCovarianceX1 = eye(2,2) * (sigmax1 * sigmax1); Matrix actualCovarianceX1; - actualCovarianceX1 = bayesTree.marginalCovariance(ordering["x1"]); + actualCovarianceX1 = bayesTree.marginalCovariance(ordering[kx(1)]); EXPECT(assert_equal(expectedCovarianceX1, actualCovarianceX1, tol)); EXPECT(assert_equal(expected1,actual1,tol)); // Check marginal on x2 double sigx2 = 0.68712938; // FIXME: this should be corrected analytically - GaussianBayesNet expected2 = simpleGaussian(ordering["x2"], zero(2), sigx2); - GaussianBayesNet actual2 = *bayesTree.marginalBayesNet(ordering["x2"]); + GaussianBayesNet expected2 = simpleGaussian(ordering[kx(2)], zero(2), sigx2); + GaussianBayesNet actual2 = *bayesTree.marginalBayesNet(ordering[kx(2)]); Matrix expectedCovarianceX2 = eye(2,2) * (sigx2 * sigx2); Matrix actualCovarianceX2; - actualCovarianceX2 = bayesTree.marginalCovariance(ordering["x2"]); + actualCovarianceX2 = bayesTree.marginalCovariance(ordering[kx(2)]); EXPECT(assert_equal(expectedCovarianceX2, actualCovarianceX2, tol)); EXPECT(assert_equal(expected2,actual2,tol)); // Check marginal on x3 - GaussianBayesNet expected3 = simpleGaussian(ordering["x3"], zero(2), sigmax3); - GaussianBayesNet actual3 = *bayesTree.marginalBayesNet(ordering["x3"]); + GaussianBayesNet expected3 = simpleGaussian(ordering[kx(3)], zero(2), sigmax3); + GaussianBayesNet actual3 = *bayesTree.marginalBayesNet(ordering[kx(3)]); Matrix expectedCovarianceX3 = eye(2,2) * (sigmax3 * sigmax3); Matrix actualCovarianceX3; - actualCovarianceX3 = bayesTree.marginalCovariance(ordering["x3"]); + actualCovarianceX3 = bayesTree.marginalCovariance(ordering[kx(3)]); EXPECT(assert_equal(expectedCovarianceX3, actualCovarianceX3, tol)); EXPECT(assert_equal(expected3,actual3,tol)); // Check marginal on x4 - GaussianBayesNet expected4 = simpleGaussian(ordering["x4"], zero(2), sigmax4); - GaussianBayesNet actual4 = *bayesTree.marginalBayesNet(ordering["x4"]); + GaussianBayesNet expected4 = simpleGaussian(ordering[kx(4)], zero(2), sigmax4); + GaussianBayesNet actual4 = *bayesTree.marginalBayesNet(ordering[kx(4)]); Matrix expectedCovarianceX4 = eye(2,2) * (sigmax4 * sigmax4); Matrix actualCovarianceX4; - actualCovarianceX4 = bayesTree.marginalCovariance(ordering["x4"]); + actualCovarianceX4 = bayesTree.marginalCovariance(ordering[kx(4)]); EXPECT(assert_equal(expectedCovarianceX4, actualCovarianceX4, tol)); EXPECT(assert_equal(expected4,actual4,tol)); // Check marginal on x7 (should be equal to x1) - GaussianBayesNet expected7 = simpleGaussian(ordering["x7"], zero(2), sigmax7); - GaussianBayesNet actual7 = *bayesTree.marginalBayesNet(ordering["x7"]); + GaussianBayesNet expected7 = simpleGaussian(ordering[kx(7)], zero(2), sigmax7); + GaussianBayesNet actual7 = *bayesTree.marginalBayesNet(ordering[kx(7)]); Matrix expectedCovarianceX7 = eye(2,2) * (sigmax7 * sigmax7); Matrix actualCovarianceX7; - actualCovarianceX7 = bayesTree.marginalCovariance(ordering["x7"]); + actualCovarianceX7 = bayesTree.marginalCovariance(ordering[kx(7)]); EXPECT(assert_equal(expectedCovarianceX7, actualCovarianceX7, tol)); EXPECT(assert_equal(expected7,actual7,tol)); } @@ -243,7 +246,7 @@ TEST_UNSAFE( BayesTree, balanced_smoother_shortcuts ) { // Create smoother with 7 nodes Ordering ordering; - ordering += "x1","x3","x5","x7","x2","x6","x4"; + ordering += kx(1),kx(3),kx(5),kx(7),kx(2),kx(6),kx(4); GaussianFactorGraph smoother = createSmoother(7, ordering).first; // Create the Bayes tree @@ -257,19 +260,19 @@ TEST_UNSAFE( BayesTree, balanced_smoother_shortcuts ) EXPECT(assert_equal(empty,actual1,tol)); // Check the conditional P(C2|Root) - GaussianISAM::sharedClique C2 = isamTree[ordering["x3"]]; + GaussianISAM::sharedClique C2 = isamTree[ordering[kx(3)]]; GaussianBayesNet actual2 = GaussianISAM::shortcut(C2,R); EXPECT(assert_equal(empty,actual2,tol)); // Check the conditional P(C3|Root), which should be equal to P(x2|x4) /** TODO: Note for multifrontal conditional: - * p_x2_x4 is now an element conditional of the multifrontal conditional bayesTree[ordering["x2"]]->conditional() + * p_x2_x4 is now an element conditional of the multifrontal conditional bayesTree[ordering[kx(2)]]->conditional() * We don't know yet how to take it out. */ -// GaussianConditional::shared_ptr p_x2_x4 = bayesTree[ordering["x2"]]->conditional(); +// GaussianConditional::shared_ptr p_x2_x4 = bayesTree[ordering[kx(2)]]->conditional(); // p_x2_x4->print("Conditional p_x2_x4: "); // GaussianBayesNet expected3(p_x2_x4); -// GaussianISAM::sharedClique C3 = isamTree[ordering["x1"]]; +// GaussianISAM::sharedClique C3 = isamTree[ordering[kx(1)]]; // GaussianBayesNet actual3 = GaussianISAM::shortcut(C3,R); // EXPECT(assert_equal(expected3,actual3,tol)); } @@ -279,7 +282,7 @@ TEST_UNSAFE( BayesTree, balanced_smoother_shortcuts ) //{ // // Create smoother with 7 nodes // Ordering ordering; -// ordering += "x1","x3","x5","x7","x2","x6","x4"; +// ordering += kx(1),kx(3),kx(5),kx(7),kx(2),kx(6),kx(4); // GaussianFactorGraph smoother = createSmoother(7, ordering).first; // // // Create the Bayes tree @@ -288,9 +291,9 @@ TEST_UNSAFE( BayesTree, balanced_smoother_shortcuts ) // // // Check the clique marginal P(C3) // double sigmax2_alt = 1/1.45533; // THIS NEEDS TO BE CHECKED! -// GaussianBayesNet expected = simpleGaussian(ordering["x2"],zero(2),sigmax2_alt); -// push_front(expected,ordering["x1"], zero(2), eye(2)*sqrt(2), ordering["x2"], -eye(2)*sqrt(2)/2, ones(2)); -// GaussianISAM::sharedClique R = bayesTree.root(), C3 = bayesTree[ordering["x1"]]; +// GaussianBayesNet expected = simpleGaussian(ordering[kx(2)],zero(2),sigmax2_alt); +// push_front(expected,ordering[kx(1)], zero(2), eye(2)*sqrt(2), ordering[kx(2)], -eye(2)*sqrt(2)/2, ones(2)); +// GaussianISAM::sharedClique R = bayesTree.root(), C3 = bayesTree[ordering[kx(1)]]; // GaussianFactorGraph marginal = C3->marginal(R); // GaussianVariableIndex varIndex(marginal); // Permutation toFront(Permutation::PullToFront(C3->keys(), varIndex.size())); @@ -308,7 +311,7 @@ TEST_UNSAFE( BayesTree, balanced_smoother_joint ) { // Create smoother with 7 nodes Ordering ordering; - ordering += "x1","x3","x5","x7","x2","x6","x4"; + ordering += kx(1),kx(3),kx(5),kx(7),kx(2),kx(6),kx(4); GaussianFactorGraph smoother = createSmoother(7, ordering).first; // Create the Bayes tree, expected to look like: @@ -327,41 +330,41 @@ TEST_UNSAFE( BayesTree, balanced_smoother_joint ) GaussianBayesNet expected1; // Why does the sign get flipped on the prior? GaussianConditional::shared_ptr - parent1(new GaussianConditional(ordering["x7"], zero(2), -1*I/sigmax7, ones(2))); + parent1(new GaussianConditional(ordering[kx(7)], zero(2), -1*I/sigmax7, ones(2))); expected1.push_front(parent1); - push_front(expected1,ordering["x1"], zero(2), I/sigmax7, ordering["x7"], A/sigmax7, sigma); - GaussianBayesNet actual1 = *bayesTree.jointBayesNet(ordering["x1"],ordering["x7"]); + push_front(expected1,ordering[kx(1)], zero(2), I/sigmax7, ordering[kx(7)], A/sigmax7, sigma); + GaussianBayesNet actual1 = *bayesTree.jointBayesNet(ordering[kx(1)],ordering[kx(7)]); EXPECT(assert_equal(expected1,actual1,tol)); // // Check the joint density P(x7,x1) factored as P(x7|x1)P(x1) // GaussianBayesNet expected2; // GaussianConditional::shared_ptr -// parent2(new GaussianConditional(ordering["x1"], zero(2), -1*I/sigmax1, ones(2))); +// parent2(new GaussianConditional(ordering[kx(1)], zero(2), -1*I/sigmax1, ones(2))); // expected2.push_front(parent2); -// push_front(expected2,ordering["x7"], zero(2), I/sigmax1, ordering["x1"], A/sigmax1, sigma); -// GaussianBayesNet actual2 = *bayesTree.jointBayesNet(ordering["x7"],ordering["x1"]); +// push_front(expected2,ordering[kx(7)], zero(2), I/sigmax1, ordering[kx(1)], A/sigmax1, sigma); +// GaussianBayesNet actual2 = *bayesTree.jointBayesNet(ordering[kx(7)],ordering[kx(1)]); // EXPECT(assert_equal(expected2,actual2,tol)); // Check the joint density P(x1,x4), i.e. with a root variable GaussianBayesNet expected3; GaussianConditional::shared_ptr - parent3(new GaussianConditional(ordering["x4"], zero(2), I/sigmax4, ones(2))); + parent3(new GaussianConditional(ordering[kx(4)], zero(2), I/sigmax4, ones(2))); expected3.push_front(parent3); double sig14 = 0.784465; Matrix A14 = -0.0769231*I; - push_front(expected3,ordering["x1"], zero(2), I/sig14, ordering["x4"], A14/sig14, sigma); - GaussianBayesNet actual3 = *bayesTree.jointBayesNet(ordering["x1"],ordering["x4"]); + push_front(expected3,ordering[kx(1)], zero(2), I/sig14, ordering[kx(4)], A14/sig14, sigma); + GaussianBayesNet actual3 = *bayesTree.jointBayesNet(ordering[kx(1)],ordering[kx(4)]); EXPECT(assert_equal(expected3,actual3,tol)); // // Check the joint density P(x4,x1), i.e. with a root variable, factored the other way // GaussianBayesNet expected4; // GaussianConditional::shared_ptr -// parent4(new GaussianConditional(ordering["x1"], zero(2), -1.0*I/sigmax1, ones(2))); +// parent4(new GaussianConditional(ordering[kx(1)], zero(2), -1.0*I/sigmax1, ones(2))); // expected4.push_front(parent4); // double sig41 = 0.668096; // Matrix A41 = -0.055794*I; -// push_front(expected4,ordering["x4"], zero(2), I/sig41, ordering["x1"], A41/sig41, sigma); -// GaussianBayesNet actual4 = *bayesTree.jointBayesNet(ordering["x4"],ordering["x1"]); +// push_front(expected4,ordering[kx(4)], zero(2), I/sig41, ordering[kx(1)], A41/sig41, sigma); +// GaussianBayesNet actual4 = *bayesTree.jointBayesNet(ordering[kx(4)],ordering[kx(1)]); // EXPECT(assert_equal(expected4,actual4,tol)); } diff --git a/tests/testGaussianISAM2.cpp b/tests/testGaussianISAM2.cpp index b0a955484..443bc41fe 100644 --- a/tests/testGaussianISAM2.cpp +++ b/tests/testGaussianISAM2.cpp @@ -788,7 +788,7 @@ TEST(ISAM2, constrained_ordering) planarSLAM::Graph fullgraph; // We will constrain x3 and x4 to the end - FastSet constrained; constrained.insert(planarSLAM::PoseKey(3)); constrained.insert(planarSLAM::PoseKey(4)); + FastSet constrained; constrained.insert(planarSLAM::PoseKey(3)); constrained.insert(planarSLAM::PoseKey(4)); // i keeps track of the time step size_t i = 0; diff --git a/tests/testGaussianJunctionTree.cpp b/tests/testGaussianJunctionTree.cpp index 19b0c676d..a00afae11 100644 --- a/tests/testGaussianJunctionTree.cpp +++ b/tests/testGaussianJunctionTree.cpp @@ -25,7 +25,7 @@ #include using namespace boost::assign; -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h +// Magically casts strings like kx(3) to a Symbol('x',3) key, see Symbol.h #define GTSAM_MAGIC_KEY #include @@ -43,6 +43,9 @@ using namespace std; using namespace gtsam; using namespace example; +Key kx(size_t i) { return Symbol('x',i); } +Key kl(size_t i) { return Symbol('l',i); } + /* ************************************************************************* * Bayes tree for smoother with "nested dissection" ordering: C1 x5 x6 x4 @@ -53,20 +56,20 @@ using namespace example; TEST( GaussianJunctionTree, constructor2 ) { // create a graph - Ordering ordering; ordering += "x1","x3","x5","x7","x2","x6","x4"; + Ordering ordering; ordering += kx(1),kx(3),kx(5),kx(7),kx(2),kx(6),kx(4); GaussianFactorGraph fg = createSmoother(7, ordering).first; // create an ordering GaussianJunctionTree actual(fg); - vector frontal1; frontal1 += ordering["x5"], ordering["x6"], ordering["x4"]; - vector frontal2; frontal2 += ordering["x3"], ordering["x2"]; - vector frontal3; frontal3 += ordering["x1"]; - vector frontal4; frontal4 += ordering["x7"]; + vector frontal1; frontal1 += ordering[kx(5)], ordering[kx(6)], ordering[kx(4)]; + vector frontal2; frontal2 += ordering[kx(3)], ordering[kx(2)]; + vector frontal3; frontal3 += ordering[kx(1)]; + vector frontal4; frontal4 += ordering[kx(7)]; vector sep1; - vector sep2; sep2 += ordering["x4"]; - vector sep3; sep3 += ordering["x2"]; - vector sep4; sep4 += ordering["x6"]; + vector sep2; sep2 += ordering[kx(4)]; + vector sep3; sep3 += ordering[kx(2)]; + vector sep4; sep4 += ordering[kx(6)]; EXPECT(assert_equal(frontal1, actual.root()->frontal)); EXPECT(assert_equal(sep1, actual.root()->separator)); LONGS_EQUAL(5, actual.root()->size()); @@ -111,7 +114,7 @@ TEST( GaussianJunctionTree, optimizeMultiFrontal2) // create a graph example::Graph nlfg = createNonlinearFactorGraph(); Values noisy = createNoisyValues(); - Ordering ordering; ordering += "x1","x2","l1"; + Ordering ordering; ordering += kx(1),kx(2),kl(1); GaussianFactorGraph fg = *nlfg.linearize(noisy, ordering); // optimize the graph @@ -203,7 +206,7 @@ TEST(GaussianJunctionTree, simpleMarginal) { init.insert(pose2SLAM::PoseKey(1), Pose2(1.0, 0.0, 0.0)); Ordering ordering; - ordering += "x1", "x0"; + ordering += kx(1), kx(0); GaussianFactorGraph gfg = *fg.linearize(init, ordering); diff --git a/tests/testGraph.cpp b/tests/testGraph.cpp index 2ac2ce691..9131d801d 100644 --- a/tests/testGraph.cpp +++ b/tests/testGraph.cpp @@ -34,26 +34,28 @@ using namespace boost::assign; using namespace std; using namespace gtsam; +Key kx(size_t i) { return Symbol('x',i); } + /* ************************************************************************* */ // x1 -> x2 // -> x3 -> x4 // -> x5 TEST ( Ordering, predecessorMap2Keys ) { - PredecessorMap p_map; - p_map.insert("x1","x1"); - p_map.insert("x2","x1"); - p_map.insert("x3","x1"); - p_map.insert("x4","x3"); - p_map.insert("x5","x1"); + PredecessorMap p_map; + p_map.insert(kx(1),kx(1)); + p_map.insert(kx(2),kx(1)); + p_map.insert(kx(3),kx(1)); + p_map.insert(kx(4),kx(3)); + p_map.insert(kx(5),kx(1)); - list expected; - expected += "x4","x5","x3","x2","x1";//PoseKey(4), PoseKey(5), PoseKey(3), PoseKey(2), PoseKey(1); + list expected; + expected += kx(4),kx(5),kx(3),kx(2),kx(1);//PoseKey(4), PoseKey(5), PoseKey(3), PoseKey(2), PoseKey(1); - list actual = predecessorMap2Keys(p_map); + list actual = predecessorMap2Keys(p_map); LONGS_EQUAL(expected.size(), actual.size()); - list::const_iterator it1 = expected.begin(); - list::const_iterator it2 = actual.begin(); + list::const_iterator it1 = expected.begin(); + list::const_iterator it2 = actual.begin(); for(; it1!=expected.end(); it1++, it2++) CHECK(*it1 == *it2) } @@ -62,18 +64,18 @@ TEST ( Ordering, predecessorMap2Keys ) { TEST( Graph, predecessorMap2Graph ) { typedef SGraph::Vertex SVertex; - SGraph graph; + SGraph graph; SVertex root; - map key2vertex; + map key2vertex; - PredecessorMap p_map; - p_map.insert("x1", "x2"); - p_map.insert("x2", "x2"); - p_map.insert("x3", "x2"); - tie(graph, root, key2vertex) = predecessorMap2Graph, SVertex, string>(p_map); + PredecessorMap p_map; + p_map.insert(kx(1), kx(2)); + p_map.insert(kx(2), kx(2)); + p_map.insert(kx(3), kx(2)); + tie(graph, root, key2vertex) = predecessorMap2Graph, SVertex, Key>(p_map); LONGS_EQUAL(3, boost::num_vertices(graph)); - CHECK(root == key2vertex["x2"]); + CHECK(root == key2vertex[kx(2)]); } /* ************************************************************************* */ @@ -87,7 +89,7 @@ TEST( Graph, composePoses ) graph.addOdometry(2,3, p23, cov); graph.addOdometry(4,3, p43, cov); - PredecessorMap tree; + PredecessorMap tree; tree.insert(pose2SLAM::PoseKey(1),pose2SLAM::PoseKey(2)); tree.insert(pose2SLAM::PoseKey(2),pose2SLAM::PoseKey(2)); tree.insert(pose2SLAM::PoseKey(3),pose2SLAM::PoseKey(2)); @@ -96,7 +98,7 @@ TEST( Graph, composePoses ) Pose2 rootPose = p2; boost::shared_ptr actual = composePoses (graph, tree, rootPose); + Pose2, Key> (graph, tree, rootPose); Values expected; expected.insert(pose2SLAM::PoseKey(1), p1); diff --git a/tests/testNonlinearFactor.cpp b/tests/testNonlinearFactor.cpp index 5b4050997..a12857ed5 100644 --- a/tests/testNonlinearFactor.cpp +++ b/tests/testNonlinearFactor.cpp @@ -25,7 +25,7 @@ // TODO: DANGEROUS, create shared pointers #define GTSAM_MAGIC_GAUSSIAN 2 -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h +// Magically casts strings like PoseKey(3) to a Symbol('x',3) key, see Symbol.h #define GTSAM_MAGIC_KEY #include @@ -211,7 +211,7 @@ TEST( NonlinearFactor, linearize_constraint1 ) // create expected Ordering ord(*config.orderingArbitrary()); Vector b = Vector_(2, 0., -3.); - JacobianFactor expected(ord["x1"], Matrix_(2,2, 5.0, 0.0, 0.0, 1.0), b, constraint); + JacobianFactor expected(ord[PoseKey(1)], Matrix_(2,2, 5.0, 0.0, 0.0, 1.0), b, constraint); CHECK(assert_equal((const GaussianFactor&)expected, *actual)); } @@ -233,15 +233,15 @@ TEST( NonlinearFactor, linearize_constraint2 ) Ordering ord(*config.orderingArbitrary()); Matrix A = Matrix_(2,2, 5.0, 0.0, 0.0, 1.0); Vector b = Vector_(2, -15., -3.); - JacobianFactor expected(ord["x1"], -1*A, ord["l1"], A, b, constraint); + JacobianFactor expected(ord[PoseKey(1)], -1*A, ord[PointKey(1)], A, b, constraint); CHECK(assert_equal((const GaussianFactor&)expected, *actual)); } /* ************************************************************************* */ class TestFactor4 : public NoiseModelFactor4 { public: - typedef NoiseModeFactor4 Base; - TestFactor4() : Base(sharedSigmas(Vector_(1, 2.0)), "x1", "x2", "x3", "x4") {} + typedef NoiseModelFactor4 Base; + TestFactor4() : Base(sharedSigmas(Vector_(1, 2.0)), PoseKey(1), PoseKey(2), PoseKey(3), PoseKey(4)) {} virtual Vector evaluateError(const LieVector& x1, const LieVector& x2, const LieVector& x3, const LieVector& x4, @@ -263,13 +263,13 @@ public: TEST(NonlinearFactor, NoiseModelFactor4) { TestFactor4 tf; Values tv; - tv.insert("x1", LieVector(1, 1.0)); - tv.insert("x2", LieVector(1, 2.0)); - tv.insert("x3", LieVector(1, 3.0)); - tv.insert("x4", LieVector(1, 4.0)); + tv.insert(PoseKey(1), LieVector(1, 1.0)); + tv.insert(PoseKey(2), LieVector(1, 2.0)); + tv.insert(PoseKey(3), LieVector(1, 3.0)); + tv.insert(PoseKey(4), LieVector(1, 4.0)); EXPECT(assert_equal(Vector_(1, 10.0), tf.unwhitenedError(tv))); DOUBLES_EQUAL(25.0/2.0, tf.error(tv), 1e-9); - Ordering ordering; ordering += "x1", "x2", "x3", "x4"; + Ordering ordering; ordering += PoseKey(1), PoseKey(2), PoseKey(3), PoseKey(4); JacobianFactor jf(*boost::dynamic_pointer_cast(tf.linearize(tv, ordering))); LONGS_EQUAL(jf.keys()[0], 0); LONGS_EQUAL(jf.keys()[1], 1); @@ -286,7 +286,7 @@ TEST(NonlinearFactor, NoiseModelFactor4) { class TestFactor5 : public NoiseModelFactor5 { public: typedef NoiseModelFactor5 Base; - TestFactor5() : Base(sharedSigmas(Vector_(1, 2.0)), "x1", "x2", "x3", "x4", "x5") {} + TestFactor5() : Base(sharedSigmas(Vector_(1, 2.0)), PoseKey(1), PoseKey(2), PoseKey(3), PoseKey(4), PoseKey(5)) {} virtual Vector evaluateError(const X1& x1, const X2& x2, const X3& x3, const X4& x4, const X5& x5, @@ -310,14 +310,14 @@ public: TEST(NonlinearFactor, NoiseModelFactor5) { TestFactor5 tf; Values tv; - tv.insert("x1", LieVector(1, 1.0)); - tv.insert("x2", LieVector(1, 2.0)); - tv.insert("x3", LieVector(1, 3.0)); - tv.insert("x4", LieVector(1, 4.0)); - tv.insert("x5", LieVector(1, 5.0)); + tv.insert(PoseKey(1), LieVector(1, 1.0)); + tv.insert(PoseKey(2), LieVector(1, 2.0)); + tv.insert(PoseKey(3), LieVector(1, 3.0)); + tv.insert(PoseKey(4), LieVector(1, 4.0)); + tv.insert(PoseKey(5), LieVector(1, 5.0)); EXPECT(assert_equal(Vector_(1, 15.0), tf.unwhitenedError(tv))); DOUBLES_EQUAL(56.25/2.0, tf.error(tv), 1e-9); - Ordering ordering; ordering += "x1", "x2", "x3", "x4", "x5"; + Ordering ordering; ordering += PoseKey(1), PoseKey(2), PoseKey(3), PoseKey(4), PoseKey(5); JacobianFactor jf(*boost::dynamic_pointer_cast(tf.linearize(tv, ordering))); LONGS_EQUAL(jf.keys()[0], 0); LONGS_EQUAL(jf.keys()[1], 1); @@ -336,7 +336,7 @@ TEST(NonlinearFactor, NoiseModelFactor5) { class TestFactor6 : public NoiseModelFactor6 { public: typedef NoiseModelFactor6 Base; - TestFactor6() : Base(sharedSigmas(Vector_(1, 2.0)), "x1", "x2", "x3", "x4", "x5", "x6") {} + TestFactor6() : Base(sharedSigmas(Vector_(1, 2.0)), PoseKey(1), PoseKey(2), PoseKey(3), PoseKey(4), PoseKey(5), PoseKey(6)) {} virtual Vector evaluateError(const X1& x1, const X2& x2, const X3& x3, const X4& x4, const X5& x5, const X6& x6, @@ -362,15 +362,15 @@ public: TEST(NonlinearFactor, NoiseModelFactor6) { TestFactor6 tf; Values tv; - tv.insert("x1", LieVector(1, 1.0)); - tv.insert("x2", LieVector(1, 2.0)); - tv.insert("x3", LieVector(1, 3.0)); - tv.insert("x4", LieVector(1, 4.0)); - tv.insert("x5", LieVector(1, 5.0)); - tv.insert("x6", LieVector(1, 6.0)); + tv.insert(PoseKey(1), LieVector(1, 1.0)); + tv.insert(PoseKey(2), LieVector(1, 2.0)); + tv.insert(PoseKey(3), LieVector(1, 3.0)); + tv.insert(PoseKey(4), LieVector(1, 4.0)); + tv.insert(PoseKey(5), LieVector(1, 5.0)); + tv.insert(PoseKey(6), LieVector(1, 6.0)); EXPECT(assert_equal(Vector_(1, 21.0), tf.unwhitenedError(tv))); DOUBLES_EQUAL(110.25/2.0, tf.error(tv), 1e-9); - Ordering ordering; ordering += "x1", "x2", "x3", "x4", "x5", "x6"; + Ordering ordering; ordering += PoseKey(1), PoseKey(2), PoseKey(3), PoseKey(4), PoseKey(5), PoseKey(6); JacobianFactor jf(*boost::dynamic_pointer_cast(tf.linearize(tv, ordering))); LONGS_EQUAL(jf.keys()[0], 0); LONGS_EQUAL(jf.keys()[1], 1); diff --git a/tests/testNonlinearFactorGraph.cpp b/tests/testNonlinearFactorGraph.cpp index da8d8a114..08d50afb6 100644 --- a/tests/testNonlinearFactorGraph.cpp +++ b/tests/testNonlinearFactorGraph.cpp @@ -39,6 +39,9 @@ using namespace boost::assign; using namespace gtsam; using namespace example; +Key kx(size_t i) { return Symbol('x',i); } +Key kl(size_t i) { return Symbol('l',i); } + /* ************************************************************************* */ TEST( Graph, equals ) { @@ -67,16 +70,16 @@ TEST( Graph, keys ) set actual = fg.keys(); LONGS_EQUAL(3, actual.size()); set::const_iterator it = actual.begin(); - CHECK(assert_equal(Symbol('l', 1), *(it++))); - CHECK(assert_equal(Symbol('x', 1), *(it++))); - CHECK(assert_equal(Symbol('x', 2), *(it++))); + LONGS_EQUAL(kl(1), *(it++)); + LONGS_EQUAL(kx(1), *(it++)); + LONGS_EQUAL(kx(2), *(it++)); } /* ************************************************************************* */ TEST( Graph, GET_ORDERING) { // Ordering expected; expected += "x1","l1","x2"; // For starting with x1,x2,l1 - Ordering expected; expected += "l1","x2","x1"; // For starting with l1,x1,x2 + Ordering expected; expected += kl(1), kx(2), kx(1); // For starting with l1,x1,x2 Graph nlfg = createNonlinearFactorGraph(); SymbolicFactorGraph::shared_ptr symbolic; Ordering::shared_ptr ordering; diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index f7a09f95f..41db76e1e 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -26,7 +26,7 @@ using namespace boost::assign; #include using namespace boost; -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h +// Magically casts strings like kx(3) to a Symbol('x',3) key, see Symbol.h #define GTSAM_MAGIC_KEY #include @@ -44,6 +44,9 @@ using namespace gtsam; const double tol = 1e-5; +Key kx(size_t i) { return Symbol('x',i); } +Key kl(size_t i) { return Symbol('l',i); } + typedef NonlinearOptimizer Optimizer; /* ************************************************************************* */ @@ -55,20 +58,20 @@ TEST( NonlinearOptimizer, linearizeAndOptimizeForDelta ) // Expected values structure is the difference between the noisy config // and the ground-truth config. One step only because it's linear ! - Ordering ord1; ord1 += "x2","l1","x1"; + Ordering ord1; ord1 += kx(2),kl(1),kx(1); VectorValues expected(initial->dims(ord1)); Vector dl1(2); dl1(0) = -0.1; dl1(1) = 0.1; - expected[ord1["l1"]] = dl1; + expected[ord1[kl(1)]] = dl1; Vector dx1(2); dx1(0) = -0.1; dx1(1) = -0.1; - expected[ord1["x1"]] = dx1; + expected[ord1[kx(1)]] = dx1; Vector dx2(2); dx2(0) = 0.1; dx2(1) = -0.2; - expected[ord1["x2"]] = dx2; + expected[ord1[kx(2)]] = dx2; // Check one ordering Optimizer optimizer1(fg, initial, Optimizer::shared_ordering(new Ordering(ord1))); @@ -78,7 +81,7 @@ TEST( NonlinearOptimizer, linearizeAndOptimizeForDelta ) // SL-FIX // Check another // shared_ptr ord2(new Ordering()); -// *ord2 += "x1","x2","l1"; +// *ord2 += kx(1),kx(2),kl(1); // solver = Optimizer::shared_solver(new Optimizer::solver(ord2)); // Optimizer optimizer2(fg, initial, solver); // @@ -87,7 +90,7 @@ TEST( NonlinearOptimizer, linearizeAndOptimizeForDelta ) // // // And yet another... // shared_ptr ord3(new Ordering()); -// *ord3 += "l1","x1","x2"; +// *ord3 += kl(1),kx(1),kx(2); // solver = Optimizer::shared_solver(new Optimizer::solver(ord3)); // Optimizer optimizer3(fg, initial, solver); // @@ -96,7 +99,7 @@ TEST( NonlinearOptimizer, linearizeAndOptimizeForDelta ) // // // More... // shared_ptr ord4(new Ordering()); -// *ord4 += "x1","x2", "l1"; +// *ord4 += kx(1),kx(2), kl(1); // solver = Optimizer::shared_solver(new Optimizer::solver(ord4)); // Optimizer optimizer4(fg, initial, solver); // @@ -118,7 +121,7 @@ TEST( NonlinearOptimizer, iterateLM ) // ordering shared_ptr ord(new Ordering()); - ord->push_back("x1"); + ord->push_back(kx(1)); // create initial optimization state, with lambda=0 Optimizer optimizer(fg, config, ord, NonlinearOptimizationParameters::newLambda(0.)); @@ -161,7 +164,7 @@ TEST( NonlinearOptimizer, optimize ) // optimize parameters shared_ptr ord(new Ordering()); - ord->push_back("x1"); + ord->push_back(kx(1)); // initial optimization state is the same in both cases tested boost::shared_ptr params = boost::make_shared(); @@ -299,7 +302,7 @@ TEST_UNSAFE(NonlinearOptimizer, NullFactor) { // optimize parameters shared_ptr ord(new Ordering()); - ord->push_back("x1"); + ord->push_back(kx(1)); // initial optimization state is the same in both cases tested boost::shared_ptr params = boost::make_shared(); @@ -367,7 +370,7 @@ TEST_UNSAFE(NonlinearOptimizer, NullFactor) { // // // Check one ordering // shared_ptr ord1(new Ordering()); -// *ord1 += "x2","l1","x1"; +// *ord1 += kx(2),kl(1),kx(1); // solver = Optimizer::shared_solver(new Optimizer::solver(ord1)); // Optimizer optimizer1(fg, initial, solver); // diff --git a/tests/testSymbolicBayesNet.cpp b/tests/testSymbolicBayesNet.cpp index efa794a25..adddfab34 100644 --- a/tests/testSymbolicBayesNet.cpp +++ b/tests/testSymbolicBayesNet.cpp @@ -21,7 +21,7 @@ using namespace boost::assign; #include -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h +// Magically casts strings like kx(3) to a Symbol('x',3) key, see Symbol.h #define GTSAM_MAGIC_KEY #include @@ -34,6 +34,9 @@ using namespace std; using namespace gtsam; using namespace example; +Key kx(size_t i) { return Symbol('x',i); } +Key kl(size_t i) { return Symbol('l',i); } + //Symbol _B_('B', 0), _L_('L', 0); //IndexConditional::shared_ptr // B(new IndexConditional(_B_)), @@ -42,12 +45,12 @@ using namespace example; /* ************************************************************************* */ TEST( SymbolicBayesNet, constructor ) { - Ordering o; o += "x2","l1","x1"; + Ordering o; o += kx(2),kl(1),kx(1); // Create manually IndexConditional::shared_ptr - x2(new IndexConditional(o["x2"],o["l1"], o["x1"])), - l1(new IndexConditional(o["l1"],o["x1"])), - x1(new IndexConditional(o["x1"])); + x2(new IndexConditional(o[kx(2)],o[kl(1)], o[kx(1)])), + l1(new IndexConditional(o[kl(1)],o[kx(1)])), + x1(new IndexConditional(o[kx(1)])); BayesNet expected; expected.push_back(x2); expected.push_back(l1); diff --git a/tests/testSymbolicFactorGraph.cpp b/tests/testSymbolicFactorGraph.cpp index 4295a5263..596a715cb 100644 --- a/tests/testSymbolicFactorGraph.cpp +++ b/tests/testSymbolicFactorGraph.cpp @@ -20,7 +20,7 @@ using namespace boost::assign; #include -// Magically casts strings like "x3" to a Symbol('x',3) key, see Symbol.h +// Magically casts strings like kx(3) to a Symbol('x',3) key, see Symbol.h #define GTSAM_MAGIC_KEY #include @@ -33,16 +33,19 @@ using namespace std; using namespace gtsam; using namespace example; +Key kx(size_t i) { return Symbol('x',i); } +Key kl(size_t i) { return Symbol('l',i); } + /* ************************************************************************* */ TEST( SymbolicFactorGraph, symbolicFactorGraph ) { - Ordering o; o += "x1","l1","x2"; + Ordering o; o += kx(1),kl(1),kx(2); // construct expected symbolic graph SymbolicFactorGraph expected; - expected.push_factor(o["x1"]); - expected.push_factor(o["x1"],o["x2"]); - expected.push_factor(o["x1"],o["l1"]); - expected.push_factor(o["x2"],o["l1"]); + expected.push_factor(o[kx(1)]); + expected.push_factor(o[kx(1)],o[kx(2)]); + expected.push_factor(o[kx(1)],o[kl(1)]); + expected.push_factor(o[kx(2)],o[kl(1)]); // construct it from the factor graph GaussianFactorGraph factorGraph = createGaussianFactorGraph(o); @@ -59,7 +62,7 @@ TEST( SymbolicFactorGraph, symbolicFactorGraph ) // SymbolicFactorGraph actual(factorGraph); // SymbolicFactor::shared_ptr f1 = actual[0]; // SymbolicFactor::shared_ptr f3 = actual[2]; -// actual.findAndRemoveFactors("x2"); +// actual.findAndRemoveFactors(kx(2)); // // // construct expected graph after find_factors_and_remove // SymbolicFactorGraph expected; @@ -79,13 +82,13 @@ TEST( SymbolicFactorGraph, symbolicFactorGraph ) // SymbolicFactorGraph fg(factorGraph); // // // ask for all factor indices connected to x1 -// list x1_factors = fg.factors("x1"); +// list x1_factors = fg.factors(kx(1)); // int x1_indices[] = { 0, 1, 2 }; // list x1_expected(x1_indices, x1_indices + 3); // CHECK(x1_factors==x1_expected); // // // ask for all factor indices connected to x2 -// list x2_factors = fg.factors("x2"); +// list x2_factors = fg.factors(kx(2)); // int x2_indices[] = { 1, 3 }; // list x2_expected(x2_indices, x2_indices + 2); // CHECK(x2_factors==x2_expected); @@ -99,26 +102,26 @@ TEST( SymbolicFactorGraph, symbolicFactorGraph ) // SymbolicFactorGraph fg(factorGraph); // // // combine all factors connected to x1 -// SymbolicFactor::shared_ptr actual = removeAndCombineFactors(fg,"x1"); +// SymbolicFactor::shared_ptr actual = removeAndCombineFactors(fg,kx(1)); // // // check result -// SymbolicFactor expected("l1","x1","x2"); +// SymbolicFactor expected(kl(1),kx(1),kx(2)); // CHECK(assert_equal(expected,*actual)); //} ///* ************************************************************************* */ //TEST( SymbolicFactorGraph, eliminateOne ) //{ -// Ordering o; o += "x1","l1","x2"; +// Ordering o; o += kx(1),kl(1),kx(2); // // create a test graph // GaussianFactorGraph factorGraph = createGaussianFactorGraph(o); // SymbolicFactorGraph fg(factorGraph); // // // eliminate -// IndexConditional::shared_ptr actual = GaussianSequentialSolver::EliminateUntil(fg, o["x1"]+1); +// IndexConditional::shared_ptr actual = GaussianSequentialSolver::EliminateUntil(fg, o[kx(1)]+1); // // // create expected symbolic IndexConditional -// IndexConditional expected(o["x1"],o["l1"],o["x2"]); +// IndexConditional expected(o[kx(1)],o[kl(1)],o[kx(2)]); // // CHECK(assert_equal(expected,*actual)); //} @@ -126,12 +129,12 @@ TEST( SymbolicFactorGraph, symbolicFactorGraph ) /* ************************************************************************* */ TEST( SymbolicFactorGraph, eliminate ) { - Ordering o; o += "x2","l1","x1"; + Ordering o; o += kx(2),kl(1),kx(1); // create expected Chordal bayes Net - IndexConditional::shared_ptr x2(new IndexConditional(o["x2"], o["l1"], o["x1"])); - IndexConditional::shared_ptr l1(new IndexConditional(o["l1"], o["x1"])); - IndexConditional::shared_ptr x1(new IndexConditional(o["x1"])); + IndexConditional::shared_ptr x2(new IndexConditional(o[kx(2)], o[kl(1)], o[kx(1)])); + IndexConditional::shared_ptr l1(new IndexConditional(o[kl(1)], o[kx(1)])); + IndexConditional::shared_ptr x1(new IndexConditional(o[kx(1)])); SymbolicBayesNet expected; expected.push_back(x2);