Merge pull request #1323 from borglab/hybrid/multifrontal
						commit
						583d12151c
					
				|  | @ -105,7 +105,7 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { | |||
| /* *******************************************************************************/ | ||||
| void GaussianMixture::print(const std::string &s, | ||||
|                             const KeyFormatter &formatter) const { | ||||
|   std::cout << s; | ||||
|   std::cout << (s.empty() ? "" : s + "\n"); | ||||
|   if (isContinuous()) std::cout << "Continuous "; | ||||
|   if (isDiscrete()) std::cout << "Discrete "; | ||||
|   if (isHybrid()) std::cout << "Hybrid "; | ||||
|  |  | |||
|  | @ -14,7 +14,7 @@ | |||
|  * @brief   Hybrid Bayes Tree, the result of eliminating a | ||||
|  * HybridJunctionTree | ||||
|  * @date Mar 11, 2022 | ||||
|  * @author  Fan Jiang | ||||
|  * @author  Fan Jiang, Varun Agrawal | ||||
|  */ | ||||
| 
 | ||||
| #include <gtsam/base/treeTraversal-inst.h> | ||||
|  | @ -73,6 +73,8 @@ struct HybridAssignmentData { | |||
|   GaussianBayesTree::sharedNode parentClique_; | ||||
|   // The gaussian bayes tree that will be recursively created.
 | ||||
|   GaussianBayesTree* gaussianbayesTree_; | ||||
|   // Flag indicating if all the nodes are valid. Used in optimize().
 | ||||
|   bool valid_; | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Construct a new Hybrid Assignment Data object. | ||||
|  | @ -83,10 +85,13 @@ struct HybridAssignmentData { | |||
|    */ | ||||
|   HybridAssignmentData(const DiscreteValues& assignment, | ||||
|                        const GaussianBayesTree::sharedNode& parentClique, | ||||
|                        GaussianBayesTree* gbt) | ||||
|                        GaussianBayesTree* gbt, bool valid = true) | ||||
|       : assignment_(assignment), | ||||
|         parentClique_(parentClique), | ||||
|         gaussianbayesTree_(gbt) {} | ||||
|         gaussianbayesTree_(gbt), | ||||
|         valid_(valid) {} | ||||
| 
 | ||||
|   bool isValid() const { return valid_; } | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief A function used during tree traversal that operates on each node | ||||
|  | @ -101,6 +106,7 @@ struct HybridAssignmentData { | |||
|       HybridAssignmentData& parentData) { | ||||
|     // Extract the gaussian conditional from the Hybrid clique
 | ||||
|     HybridConditional::shared_ptr hybrid_conditional = node->conditional(); | ||||
| 
 | ||||
|     GaussianConditional::shared_ptr conditional; | ||||
|     if (hybrid_conditional->isHybrid()) { | ||||
|       conditional = (*hybrid_conditional->asMixture())(parentData.assignment_); | ||||
|  | @ -111,15 +117,21 @@ struct HybridAssignmentData { | |||
|       conditional = boost::make_shared<GaussianConditional>(); | ||||
|     } | ||||
| 
 | ||||
|     // Create the GaussianClique for the current node
 | ||||
|     auto clique = boost::make_shared<GaussianBayesTree::Node>(conditional); | ||||
|     // Add the current clique to the GaussianBayesTree.
 | ||||
|     parentData.gaussianbayesTree_->addClique(clique, parentData.parentClique_); | ||||
|     GaussianBayesTree::sharedNode clique; | ||||
|     if (conditional) { | ||||
|       // Create the GaussianClique for the current node
 | ||||
|       clique = boost::make_shared<GaussianBayesTree::Node>(conditional); | ||||
|       // Add the current clique to the GaussianBayesTree.
 | ||||
|       parentData.gaussianbayesTree_->addClique(clique, | ||||
|                                                parentData.parentClique_); | ||||
|     } else { | ||||
|       parentData.valid_ = false; | ||||
|     } | ||||
| 
 | ||||
|     // Create new HybridAssignmentData where the current node is the parent
 | ||||
|     // This will be passed down to the children nodes
 | ||||
|     HybridAssignmentData data(parentData.assignment_, clique, | ||||
|                               parentData.gaussianbayesTree_); | ||||
|                               parentData.gaussianbayesTree_, parentData.valid_); | ||||
|     return data; | ||||
|   } | ||||
| }; | ||||
|  | @ -138,6 +150,9 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { | |||
|         visitorPost); | ||||
|   } | ||||
| 
 | ||||
|   if (!rootData.isValid()) { | ||||
|     return VectorValues(); | ||||
|   } | ||||
|   VectorValues result = gbt.optimize(); | ||||
| 
 | ||||
|   // Return the optimized bayes net result.
 | ||||
|  |  | |||
|  | @ -50,9 +50,12 @@ class GTSAM_EXPORT HybridBayesTreeClique | |||
|   typedef boost::shared_ptr<This> shared_ptr; | ||||
|   typedef boost::weak_ptr<This> weak_ptr; | ||||
|   HybridBayesTreeClique() {} | ||||
|   virtual ~HybridBayesTreeClique() {} | ||||
|   HybridBayesTreeClique(const boost::shared_ptr<HybridConditional>& conditional) | ||||
|       : Base(conditional) {} | ||||
|   ///< Copy constructor
 | ||||
|   HybridBayesTreeClique(const HybridBayesTreeClique& clique) : Base(clique) {} | ||||
| 
 | ||||
|   virtual ~HybridBayesTreeClique() {} | ||||
| }; | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
|  |  | |||
|  | @ -24,7 +24,7 @@ | |||
| namespace gtsam { | ||||
| 
 | ||||
| /**
 | ||||
|  * Elimination Tree type for Hybrid | ||||
|  * Elimination Tree type for Hybrid Factor Graphs. | ||||
|  * | ||||
|  * @ingroup hybrid | ||||
|  */ | ||||
|  |  | |||
|  | @ -92,7 +92,6 @@ GaussianMixtureFactor::Sum sumFrontals( | |||
|       if (auto cgmf = boost::dynamic_pointer_cast<GaussianMixtureFactor>(f)) { | ||||
|         sum = cgmf->add(sum); | ||||
|       } | ||||
| 
 | ||||
|       if (auto gm = boost::dynamic_pointer_cast<HybridConditional>(f)) { | ||||
|         sum = gm->asMixture()->add(sum); | ||||
|       } | ||||
|  | @ -189,7 +188,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, | |||
|   DiscreteKeys discreteSeparator(discreteSeparatorSet.begin(), | ||||
|                                  discreteSeparatorSet.end()); | ||||
| 
 | ||||
|   // sum out frontals, this is the factor on the separator
 | ||||
|   // sum out frontals, this is the factor 𝜏 on the separator
 | ||||
|   GaussianMixtureFactor::Sum sum = sumFrontals(factors); | ||||
| 
 | ||||
|   // If a tree leaf contains nullptr,
 | ||||
|  | @ -257,13 +256,14 @@ hybridElimination(const HybridGaussianFactorGraph &factors, | |||
|   // If there are no more continuous parents, then we should create here a
 | ||||
|   // DiscreteFactor, with the error for each discrete choice.
 | ||||
|   if (keysOfSeparator.empty()) { | ||||
|     // TODO(Varun) Use the math from the iMHS_Math-1-indexed document
 | ||||
|     VectorValues empty_values; | ||||
|     auto factorProb = [&](const GaussianFactor::shared_ptr &factor) { | ||||
|       if (!factor) { | ||||
|         return 0.0;  // If nullptr, return 0.0 probability
 | ||||
|       } else { | ||||
|         return 1.0; | ||||
|         double error = | ||||
|             0.5 * std::abs(factor->augmentedInformation().determinant()); | ||||
|         return std::exp(-error); | ||||
|       } | ||||
|     }; | ||||
|     DecisionTree<Key, double> fdt(separatorFactors, factorProb); | ||||
|  | @ -529,122 +529,13 @@ AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::probPrime( | |||
| } | ||||
| 
 | ||||
| /* ************************************************************************ */ | ||||
| DecisionTree<Key, VectorValues::shared_ptr> | ||||
| HybridGaussianFactorGraph::continuousDelta( | ||||
|     const DiscreteKeys &discrete_keys, | ||||
|     const boost::shared_ptr<BayesNetType> &continuousBayesNet, | ||||
|     const std::vector<DiscreteValues> &assignments) const { | ||||
|   // Create a decision tree of all the different VectorValues
 | ||||
|   std::vector<VectorValues::shared_ptr> vector_values; | ||||
|   for (const DiscreteValues &assignment : assignments) { | ||||
|     VectorValues values = continuousBayesNet->optimize(assignment); | ||||
|     vector_values.push_back(boost::make_shared<VectorValues>(values)); | ||||
|   } | ||||
|   DecisionTree<Key, VectorValues::shared_ptr> delta_tree(discrete_keys, | ||||
|                                                          vector_values); | ||||
| 
 | ||||
|   return delta_tree; | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************ */ | ||||
| AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::continuousProbPrimes( | ||||
|     const DiscreteKeys &orig_discrete_keys, | ||||
|     const boost::shared_ptr<BayesNetType> &continuousBayesNet) const { | ||||
|   // Generate all possible assignments.
 | ||||
|   const std::vector<DiscreteValues> assignments = | ||||
|       DiscreteValues::CartesianProduct(orig_discrete_keys); | ||||
| 
 | ||||
|   // Save a copy of the original discrete key ordering
 | ||||
|   DiscreteKeys discrete_keys(orig_discrete_keys); | ||||
|   // Reverse discrete keys order for correct tree construction
 | ||||
|   std::reverse(discrete_keys.begin(), discrete_keys.end()); | ||||
| 
 | ||||
|   // Create a decision tree of all the different VectorValues
 | ||||
|   DecisionTree<Key, VectorValues::shared_ptr> delta_tree = | ||||
|       this->continuousDelta(discrete_keys, continuousBayesNet, assignments); | ||||
| 
 | ||||
|   // Get the probPrime tree with the correct leaf probabilities
 | ||||
|   std::vector<double> probPrimes; | ||||
|   for (const DiscreteValues &assignment : assignments) { | ||||
|     VectorValues delta = *delta_tree(assignment); | ||||
| 
 | ||||
|     // If VectorValues is empty, it means this is a pruned branch.
 | ||||
|     // Set thr probPrime to 0.0.
 | ||||
|     if (delta.size() == 0) { | ||||
|       probPrimes.push_back(0.0); | ||||
|       continue; | ||||
|     } | ||||
| 
 | ||||
|     // Compute the error given the delta and the assignment.
 | ||||
|     double error = this->error(delta, assignment); | ||||
|     probPrimes.push_back(exp(-error)); | ||||
|   } | ||||
| 
 | ||||
|   AlgebraicDecisionTree<Key> probPrimeTree(discrete_keys, probPrimes); | ||||
|   return probPrimeTree; | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************ */ | ||||
| boost::shared_ptr<HybridGaussianFactorGraph::BayesNetType> | ||||
| HybridGaussianFactorGraph::eliminateHybridSequential( | ||||
|     const boost::optional<Ordering> continuous, | ||||
|     const boost::optional<Ordering> discrete, const Eliminate &function, | ||||
|     OptionalVariableIndex variableIndex) const { | ||||
|   Ordering continuous_ordering = | ||||
|       continuous ? *continuous : Ordering(this->continuousKeys()); | ||||
|   Ordering discrete_ordering = | ||||
|       discrete ? *discrete : Ordering(this->discreteKeys()); | ||||
| 
 | ||||
|   // Eliminate continuous
 | ||||
|   HybridBayesNet::shared_ptr bayesNet; | ||||
|   HybridGaussianFactorGraph::shared_ptr discreteGraph; | ||||
|   std::tie(bayesNet, discreteGraph) = | ||||
|       BaseEliminateable::eliminatePartialSequential(continuous_ordering, | ||||
|                                                     function, variableIndex); | ||||
| 
 | ||||
|   // Get the last continuous conditional which will have all the discrete keys
 | ||||
|   auto last_conditional = bayesNet->at(bayesNet->size() - 1); | ||||
|   DiscreteKeys discrete_keys = last_conditional->discreteKeys(); | ||||
| 
 | ||||
|   // If not discrete variables, return the eliminated bayes net.
 | ||||
|   if (discrete_keys.size() == 0) { | ||||
|     return bayesNet; | ||||
|   } | ||||
| 
 | ||||
|   AlgebraicDecisionTree<Key> probPrimeTree = | ||||
|       this->continuousProbPrimes(discrete_keys, bayesNet); | ||||
| 
 | ||||
|   discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); | ||||
| 
 | ||||
|   // Perform discrete elimination
 | ||||
|   HybridBayesNet::shared_ptr discreteBayesNet = | ||||
|       discreteGraph->BaseEliminateable::eliminateSequential( | ||||
|           discrete_ordering, function, variableIndex); | ||||
| 
 | ||||
|   bayesNet->add(*discreteBayesNet); | ||||
| 
 | ||||
|   return bayesNet; | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************ */ | ||||
| boost::shared_ptr<HybridGaussianFactorGraph::BayesNetType> | ||||
| HybridGaussianFactorGraph::eliminateSequential( | ||||
|     OptionalOrderingType orderingType, const Eliminate &function, | ||||
|     OptionalVariableIndex variableIndex) const { | ||||
|   return BaseEliminateable::eliminateSequential(orderingType, function, | ||||
|                                                 variableIndex); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************ */ | ||||
| boost::shared_ptr<HybridGaussianFactorGraph::BayesNetType> | ||||
| HybridGaussianFactorGraph::eliminateSequential( | ||||
|     const Ordering &ordering, const Eliminate &function, | ||||
|     OptionalVariableIndex variableIndex) const { | ||||
| std::pair<Ordering, Ordering> | ||||
| HybridGaussianFactorGraph::separateContinuousDiscreteOrdering( | ||||
|     const Ordering &ordering) const { | ||||
|   KeySet all_continuous_keys = this->continuousKeys(); | ||||
|   KeySet all_discrete_keys = this->discreteKeys(); | ||||
|   Ordering continuous_ordering, discrete_ordering; | ||||
| 
 | ||||
|   // Segregate the continuous and the discrete keys
 | ||||
|   for (auto &&key : ordering) { | ||||
|     if (std::find(all_continuous_keys.begin(), all_continuous_keys.end(), | ||||
|                   key) != all_continuous_keys.end()) { | ||||
|  | @ -657,8 +548,7 @@ HybridGaussianFactorGraph::eliminateSequential( | |||
|     } | ||||
|   } | ||||
| 
 | ||||
|   return this->eliminateHybridSequential(continuous_ordering, | ||||
|                                          discrete_ordering); | ||||
|   return std::make_pair(continuous_ordering, discrete_ordering); | ||||
| } | ||||
| 
 | ||||
| }  // namespace gtsam
 | ||||
|  |  | |||
|  | @ -217,57 +217,92 @@ class GTSAM_EXPORT HybridGaussianFactorGraph | |||
|                    const DiscreteValues& discreteValues) const; | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Compute the VectorValues solution for the continuous variables for | ||||
|    * each mode. | ||||
|    * @brief Helper method to compute the VectorValues solution for the | ||||
|    * continuous variables for each discrete mode. | ||||
|    * Used as a helper to compute q(\mu | M, Z) which is used by | ||||
|    * both P(X | M, Z) and P(M | Z). | ||||
|    * | ||||
|    * @tparam BAYES Template on the type of Bayes graph, either a bayes net or a | ||||
|    * bayes tree. | ||||
|    * @param discrete_keys The discrete keys which form all the modes. | ||||
|    * @param continuousBayesNet The Bayes Net representing the continuous | ||||
|    * @param continuousBayesNet The Bayes Net/Tree representing the continuous | ||||
|    * eliminated variables. | ||||
|    * @param assignments List of all discrete assignments to create the final | ||||
|    * decision tree. | ||||
|    * @return DecisionTree<Key, VectorValues::shared_ptr> | ||||
|    */ | ||||
|   template <typename BAYES> | ||||
|   DecisionTree<Key, VectorValues::shared_ptr> continuousDelta( | ||||
|       const DiscreteKeys& discrete_keys, | ||||
|       const boost::shared_ptr<BayesNetType>& continuousBayesNet, | ||||
|       const std::vector<DiscreteValues>& assignments) const; | ||||
|       const boost::shared_ptr<BAYES>& continuousBayesNet, | ||||
|       const std::vector<DiscreteValues>& assignments) const { | ||||
|     // Create a decision tree of all the different VectorValues
 | ||||
|     std::vector<VectorValues::shared_ptr> vector_values; | ||||
|     for (const DiscreteValues& assignment : assignments) { | ||||
|       VectorValues values = continuousBayesNet->optimize(assignment); | ||||
|       vector_values.push_back(boost::make_shared<VectorValues>(values)); | ||||
|     } | ||||
|     DecisionTree<Key, VectorValues::shared_ptr> delta_tree(discrete_keys, | ||||
|                                                            vector_values); | ||||
| 
 | ||||
|     return delta_tree; | ||||
|   } | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Compute the unnormalized probabilities of the continuous variables | ||||
|    * for each of the modes. | ||||
|    * | ||||
|    * @tparam BAYES  Template on the type of Bayes graph, either a bayes net or a | ||||
|    * bayes tree. | ||||
|    * @param discrete_keys The discrete keys which form all the modes. | ||||
|    * @param continuousBayesNet The Bayes Net representing the continuous | ||||
|    * eliminated variables. | ||||
|    * @return AlgebraicDecisionTree<Key> | ||||
|    */ | ||||
|   template <typename BAYES> | ||||
|   AlgebraicDecisionTree<Key> continuousProbPrimes( | ||||
|       const DiscreteKeys& discrete_keys, | ||||
|       const boost::shared_ptr<BayesNetType>& continuousBayesNet) const; | ||||
|       const boost::shared_ptr<BAYES>& continuousBayesNet) const { | ||||
|     // Generate all possible assignments.
 | ||||
|     const std::vector<DiscreteValues> assignments = | ||||
|         DiscreteValues::CartesianProduct(discrete_keys); | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Custom elimination function which computes the correct | ||||
|    * continuous probabilities. | ||||
|    * | ||||
|    * @param continuous Optional ordering for all continuous variables. | ||||
|    * @param discrete Optional ordering for all discrete variables. | ||||
|    * @return boost::shared_ptr<BayesNetType> | ||||
|    */ | ||||
|   boost::shared_ptr<BayesNetType> eliminateHybridSequential( | ||||
|       const boost::optional<Ordering> continuous = boost::none, | ||||
|       const boost::optional<Ordering> discrete = boost::none, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
|     // Save a copy of the original discrete key ordering
 | ||||
|     DiscreteKeys reversed_discrete_keys(discrete_keys); | ||||
|     // Reverse discrete keys order for correct tree construction
 | ||||
|     std::reverse(reversed_discrete_keys.begin(), reversed_discrete_keys.end()); | ||||
| 
 | ||||
|     // Create a decision tree of all the different VectorValues
 | ||||
|     DecisionTree<Key, VectorValues::shared_ptr> delta_tree = | ||||
|         this->continuousDelta(reversed_discrete_keys, continuousBayesNet, | ||||
|                               assignments); | ||||
| 
 | ||||
|     // Get the probPrime tree with the correct leaf probabilities
 | ||||
|     std::vector<double> probPrimes; | ||||
|     for (const DiscreteValues& assignment : assignments) { | ||||
|       VectorValues delta = *delta_tree(assignment); | ||||
| 
 | ||||
|       // If VectorValues is empty, it means this is a pruned branch.
 | ||||
|       // Set thr probPrime to 0.0.
 | ||||
|       if (delta.size() == 0) { | ||||
|         probPrimes.push_back(0.0); | ||||
|         continue; | ||||
|       } | ||||
| 
 | ||||
|       // Compute the error given the delta and the assignment.
 | ||||
|       double error = this->error(delta, assignment); | ||||
|       probPrimes.push_back(exp(-error)); | ||||
|     } | ||||
| 
 | ||||
|     AlgebraicDecisionTree<Key> probPrimeTree(reversed_discrete_keys, | ||||
|                                              probPrimes); | ||||
|     return probPrimeTree; | ||||
|   } | ||||
| 
 | ||||
|   std::pair<Ordering, Ordering> separateContinuousDiscreteOrdering( | ||||
|       const Ordering& ordering) const; | ||||
| 
 | ||||
|   boost::shared_ptr<BayesNetType> eliminateSequential( | ||||
|       OptionalOrderingType orderingType = boost::none, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
|    | ||||
|   boost::shared_ptr<BayesNetType> eliminateSequential( | ||||
|       const Ordering& ordering, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Return a Colamd constrained ordering where the discrete keys are | ||||
|  |  | |||
|  | @ -51,10 +51,11 @@ class HybridEliminationTree; | |||
|  */ | ||||
| class GTSAM_EXPORT HybridJunctionTree | ||||
|     : public JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> { | ||||
| 
 | ||||
|  public: | ||||
|   typedef JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> | ||||
|       Base;                                    ///< Base class
 | ||||
|   typedef HybridJunctionTree This;     ///< This class
 | ||||
|   typedef HybridJunctionTree This;             ///< This class
 | ||||
|   typedef boost::shared_ptr<This> shared_ptr;  ///< Shared pointer to this class
 | ||||
| 
 | ||||
|   /**
 | ||||
|  |  | |||
|  | @ -32,7 +32,7 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph, | |||
|       addConditionals(graph, hybridBayesNet_, ordering); | ||||
| 
 | ||||
|   // Eliminate.
 | ||||
|   auto bayesNetFragment = graph.eliminateHybridSequential(); | ||||
|   auto bayesNetFragment = graph.eliminateSequential(ordering); | ||||
| 
 | ||||
|   /// Prune
 | ||||
|   if (maxNrLeaves) { | ||||
|  |  | |||
|  | @ -164,25 +164,6 @@ TEST(HybridBayesNet, Optimize) { | |||
|   EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test bayes net multifrontal optimize
 | ||||
| TEST(HybridBayesNet, OptimizeMultifrontal) { | ||||
|   Switching s(4); | ||||
| 
 | ||||
|   Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); | ||||
|   HybridBayesTree::shared_ptr hybridBayesTree = | ||||
|       s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering); | ||||
|   HybridValues delta = hybridBayesTree->optimize(); | ||||
| 
 | ||||
|   VectorValues expectedValues; | ||||
|   expectedValues.insert(X(0), -0.999904 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(1), -0.99029 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(2), -1.00971 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(3), -1.0001 * Vector1::Ones()); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test bayes net error
 | ||||
| TEST(HybridBayesNet, Error) { | ||||
|  |  | |||
|  | @ -32,6 +32,25 @@ using noiseModel::Isotropic; | |||
| using symbol_shorthand::M; | ||||
| using symbol_shorthand::X; | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test multifrontal optimize
 | ||||
| TEST(HybridBayesTree, OptimizeMultifrontal) { | ||||
|   Switching s(4); | ||||
| 
 | ||||
|   Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); | ||||
|   HybridBayesTree::shared_ptr hybridBayesTree = | ||||
|       s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering); | ||||
|   HybridValues delta = hybridBayesTree->optimize(); | ||||
| 
 | ||||
|   VectorValues expectedValues; | ||||
|   expectedValues.insert(X(0), -0.999904 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(1), -0.99029 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(2), -1.00971 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(3), -1.0001 * Vector1::Ones()); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test for optimizing a HybridBayesTree with a given assignment.
 | ||||
| TEST(HybridBayesTree, OptimizeAssignment) { | ||||
|  | @ -137,6 +156,12 @@ TEST(HybridBayesTree, Optimize) { | |||
|         boost::dynamic_pointer_cast<DecisionTreeFactor>(factor->inner())); | ||||
|   } | ||||
|    | ||||
|   // Add the probabilities for each branch
 | ||||
|   DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; | ||||
|   vector<double> probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656, | ||||
|                           0.037152205, 0.12248971,  0.07349729,  0.08}; | ||||
|   dfg.emplace_shared<DecisionTreeFactor>(discrete_keys, probs); | ||||
| 
 | ||||
|   DiscreteValues expectedMPE = dfg.optimize(); | ||||
|   VectorValues expectedValues = hybridBayesNet->optimize(expectedMPE); | ||||
| 
 | ||||
|  |  | |||
|  | @ -15,6 +15,7 @@ | |||
|  * @author  Varun Agrawal | ||||
|  */ | ||||
| 
 | ||||
| #include <gtsam/discrete/DiscreteBayesNet.h> | ||||
| #include <gtsam/geometry/Pose2.h> | ||||
| #include <gtsam/hybrid/HybridBayesNet.h> | ||||
| #include <gtsam/hybrid/HybridNonlinearFactorGraph.h> | ||||
|  | @ -23,6 +24,7 @@ | |||
| #include <gtsam/hybrid/MixtureFactor.h> | ||||
| #include <gtsam/inference/Symbol.h> | ||||
| #include <gtsam/linear/GaussianBayesNet.h> | ||||
| #include <gtsam/linear/GaussianBayesTree.h> | ||||
| #include <gtsam/linear/GaussianFactorGraph.h> | ||||
| #include <gtsam/linear/JacobianFactor.h> | ||||
| #include <gtsam/linear/NoiseModel.h> | ||||
|  | @ -69,6 +71,28 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, | |||
|   return ordering; | ||||
| } | ||||
| 
 | ||||
| TEST(HybridEstimation, Full) { | ||||
|   size_t K = 3; | ||||
|   std::vector<double> measurements = {0, 1, 2}; | ||||
|   // Ground truth discrete seq
 | ||||
|   std::vector<size_t> discrete_seq = {1, 1, 0}; | ||||
|   // Switching example of robot moving in 1D
 | ||||
|   // with given measurements and equal mode priors.
 | ||||
|   Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); | ||||
|   HybridGaussianFactorGraph graph = switching.linearizedFactorGraph; | ||||
| 
 | ||||
|   Ordering hybridOrdering; | ||||
|   hybridOrdering += X(0); | ||||
|   hybridOrdering += X(1); | ||||
|   hybridOrdering += X(2); | ||||
|   hybridOrdering += M(0); | ||||
|   hybridOrdering += M(1); | ||||
|   HybridBayesNet::shared_ptr bayesNet = | ||||
|       graph.eliminateSequential(hybridOrdering); | ||||
| 
 | ||||
|   EXPECT_LONGS_EQUAL(5, bayesNet->size()); | ||||
| } | ||||
| 
 | ||||
| /****************************************************************************/ | ||||
| // Test approximate inference with an additional pruning step.
 | ||||
| TEST(HybridEstimation, Incremental) { | ||||
|  | @ -78,6 +102,8 @@ TEST(HybridEstimation, Incremental) { | |||
|   // Ground truth discrete seq
 | ||||
|   std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, | ||||
|                                       1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; | ||||
|   // Switching example of robot moving in 1D with given measurements and equal
 | ||||
|   // mode priors.
 | ||||
|   Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); | ||||
|   HybridSmoother smoother; | ||||
|   HybridNonlinearFactorGraph graph; | ||||
|  | @ -135,7 +161,7 @@ TEST(HybridEstimation, Incremental) { | |||
|  * @param between_sigma Noise model sigma for the between factor. | ||||
|  * @return GaussianFactorGraph::shared_ptr | ||||
|  */ | ||||
| GaussianFactorGraph::shared_ptr specificProblem( | ||||
| GaussianFactorGraph::shared_ptr specificModesFactorGraph( | ||||
|     size_t K, const std::vector<double>& measurements, | ||||
|     const std::vector<size_t>& discrete_seq, double measurement_sigma = 0.1, | ||||
|     double between_sigma = 1.0) { | ||||
|  | @ -183,7 +209,7 @@ std::vector<size_t> getDiscreteSequence(size_t x) { | |||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * @brief Helper method to get the probPrimeTree | ||||
|  * @brief Helper method to get the tree of unnormalized probabilities | ||||
|  * as per the new elimination scheme. | ||||
|  * | ||||
|  * @param graph The HybridGaussianFactorGraph to eliminate. | ||||
|  | @ -241,82 +267,169 @@ AlgebraicDecisionTree<Key> probPrimeTree( | |||
| TEST(HybridEstimation, Probability) { | ||||
|   constexpr size_t K = 4; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2}; | ||||
| 
 | ||||
|   // This is the correct sequence
 | ||||
|   // std::vector<size_t> discrete_seq = {1, 1, 0};
 | ||||
| 
 | ||||
|   double between_sigma = 1.0, measurement_sigma = 0.1; | ||||
| 
 | ||||
|   std::vector<double> expected_errors, expected_prob_primes; | ||||
|   std::map<size_t, std::vector<size_t>> discrete_seq_map; | ||||
|   for (size_t i = 0; i < pow(2, K - 1); i++) { | ||||
|     std::vector<size_t> discrete_seq = getDiscreteSequence<K>(i); | ||||
|     discrete_seq_map[i] = getDiscreteSequence<K>(i); | ||||
| 
 | ||||
|     GaussianFactorGraph::shared_ptr linear_graph = specificProblem( | ||||
|         K, measurements, discrete_seq, measurement_sigma, between_sigma); | ||||
|     GaussianFactorGraph::shared_ptr linear_graph = specificModesFactorGraph( | ||||
|         K, measurements, discrete_seq_map[i], measurement_sigma, between_sigma); | ||||
| 
 | ||||
|     auto bayes_net = linear_graph->eliminateSequential(); | ||||
| 
 | ||||
|     VectorValues values = bayes_net->optimize(); | ||||
| 
 | ||||
|     double error = linear_graph->error(values); | ||||
|     expected_errors.push_back(error); | ||||
|     double prob_prime = linear_graph->probPrime(values); | ||||
|     expected_prob_primes.push_back(prob_prime); | ||||
|   } | ||||
| 
 | ||||
|   // Switching example of robot moving in 1D with given measurements and equal
 | ||||
|   // mode priors.
 | ||||
|   Switching switching(K, between_sigma, measurement_sigma, measurements, | ||||
|                       "1/1 1/1"); | ||||
|   auto graph = switching.linearizedFactorGraph; | ||||
|   Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); | ||||
| 
 | ||||
|   HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(ordering); | ||||
|   auto discreteConditional = bayesNet->atDiscrete(bayesNet->size() - 3); | ||||
| 
 | ||||
|   // Test if the probPrimeTree matches the probability of
 | ||||
|   // the individual factor graphs
 | ||||
|   for (size_t i = 0; i < pow(2, K - 1); i++) { | ||||
|     DiscreteValues discrete_assignment; | ||||
|     for (size_t v = 0; v < discrete_seq_map[i].size(); v++) { | ||||
|       discrete_assignment[M(v)] = discrete_seq_map[i][v]; | ||||
|     } | ||||
|     double discrete_transition_prob = 0.25; | ||||
|     EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i) * discrete_transition_prob, | ||||
|                          (*discreteConditional)(discrete_assignment), 1e-8); | ||||
|   } | ||||
| 
 | ||||
|   HybridValues hybrid_values = bayesNet->optimize(); | ||||
| 
 | ||||
|   // This is the correct sequence as designed
 | ||||
|   DiscreteValues discrete_seq; | ||||
|   discrete_seq[M(0)] = 1; | ||||
|   discrete_seq[M(1)] = 1; | ||||
|   discrete_seq[M(2)] = 0; | ||||
| 
 | ||||
|   EXPECT(assert_equal(discrete_seq, hybrid_values.discrete())); | ||||
| } | ||||
| 
 | ||||
| /****************************************************************************/ | ||||
| /**
 | ||||
|  * Test for correctness of different branches of the P'(Continuous | Discrete) | ||||
|  * in the multi-frontal setting. The values should match those of P'(Continuous) | ||||
|  * for each discrete mode. | ||||
|  */ | ||||
| TEST(HybridEstimation, ProbabilityMultifrontal) { | ||||
|   constexpr size_t K = 4; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2}; | ||||
| 
 | ||||
|   double between_sigma = 1.0, measurement_sigma = 0.1; | ||||
| 
 | ||||
|   // For each discrete mode sequence, create the individual factor graphs and
 | ||||
|   // optimize each.
 | ||||
|   std::vector<double> expected_errors, expected_prob_primes; | ||||
|   std::map<size_t, std::vector<size_t>> discrete_seq_map; | ||||
|   for (size_t i = 0; i < pow(2, K - 1); i++) { | ||||
|     discrete_seq_map[i] = getDiscreteSequence<K>(i); | ||||
| 
 | ||||
|     GaussianFactorGraph::shared_ptr linear_graph = specificModesFactorGraph( | ||||
|         K, measurements, discrete_seq_map[i], measurement_sigma, between_sigma); | ||||
| 
 | ||||
|     auto bayes_tree = linear_graph->eliminateMultifrontal(); | ||||
| 
 | ||||
|     VectorValues values = bayes_tree->optimize(); | ||||
| 
 | ||||
|     expected_errors.push_back(linear_graph->error(values)); | ||||
|     expected_prob_primes.push_back(linear_graph->probPrime(values)); | ||||
|   } | ||||
| 
 | ||||
|   Switching switching(K, between_sigma, measurement_sigma, measurements); | ||||
|   // Switching example of robot moving in 1D with given measurements and equal
 | ||||
|   // mode priors.
 | ||||
|   Switching switching(K, between_sigma, measurement_sigma, measurements, | ||||
|                       "1/1 1/1"); | ||||
|   auto graph = switching.linearizedFactorGraph; | ||||
|   Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); | ||||
| 
 | ||||
|   // Get the tree of unnormalized probabilities for each mode sequence.
 | ||||
|   AlgebraicDecisionTree<Key> expected_probPrimeTree = probPrimeTree(graph); | ||||
| 
 | ||||
|   // Eliminate continuous
 | ||||
|   Ordering continuous_ordering(graph.continuousKeys()); | ||||
|   HybridBayesNet::shared_ptr bayesNet; | ||||
|   HybridBayesTree::shared_ptr bayesTree; | ||||
|   HybridGaussianFactorGraph::shared_ptr discreteGraph; | ||||
|   std::tie(bayesNet, discreteGraph) = | ||||
|       graph.eliminatePartialSequential(continuous_ordering); | ||||
|   std::tie(bayesTree, discreteGraph) = | ||||
|       graph.eliminatePartialMultifrontal(continuous_ordering); | ||||
| 
 | ||||
|   // Get the last continuous conditional which will have all the discrete keys
 | ||||
|   auto last_conditional = bayesNet->at(bayesNet->size() - 1); | ||||
|   Key last_continuous_key = | ||||
|       continuous_ordering.at(continuous_ordering.size() - 1); | ||||
|   auto last_conditional = (*bayesTree)[last_continuous_key]->conditional(); | ||||
|   DiscreteKeys discrete_keys = last_conditional->discreteKeys(); | ||||
| 
 | ||||
|   const std::vector<DiscreteValues> assignments = | ||||
|       DiscreteValues::CartesianProduct(discrete_keys); | ||||
| 
 | ||||
|   // Reverse discrete keys order for correct tree construction
 | ||||
|   std::reverse(discrete_keys.begin(), discrete_keys.end()); | ||||
| 
 | ||||
|   // Create a decision tree of all the different VectorValues
 | ||||
|   DecisionTree<Key, VectorValues::shared_ptr> delta_tree = | ||||
|       graph.continuousDelta(discrete_keys, bayesNet, assignments); | ||||
| 
 | ||||
|   AlgebraicDecisionTree<Key> probPrimeTree = | ||||
|       graph.continuousProbPrimes(discrete_keys, bayesNet); | ||||
|       graph.continuousProbPrimes(discrete_keys, bayesTree); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expected_probPrimeTree, probPrimeTree)); | ||||
| 
 | ||||
|   // Test if the probPrimeTree matches the probability of
 | ||||
|   // the individual factor graphs
 | ||||
|   for (size_t i = 0; i < pow(2, K - 1); i++) { | ||||
|     std::vector<size_t> discrete_seq = getDiscreteSequence<K>(i); | ||||
|     Assignment<Key> discrete_assignment; | ||||
|     for (size_t v = 0; v < discrete_seq.size(); v++) { | ||||
|       discrete_assignment[M(v)] = discrete_seq[v]; | ||||
|     for (size_t v = 0; v < discrete_seq_map[i].size(); v++) { | ||||
|       discrete_assignment[M(v)] = discrete_seq_map[i][v]; | ||||
|     } | ||||
|     EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i), | ||||
|                          probPrimeTree(discrete_assignment), 1e-8); | ||||
|   } | ||||
| 
 | ||||
|   // remainingGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
 | ||||
|   discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); | ||||
| 
 | ||||
|   // Ordering discrete(graph.discreteKeys());
 | ||||
|   // // remainingGraph->print("remainingGraph");
 | ||||
|   // // discrete.print();
 | ||||
|   // auto discreteBayesNet = remainingGraph->eliminateSequential(discrete);
 | ||||
|   // bayesNet->add(*discreteBayesNet);
 | ||||
|   // // bayesNet->print();
 | ||||
|   Ordering discrete(graph.discreteKeys()); | ||||
|   auto discreteBayesTree = | ||||
|       discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete); | ||||
| 
 | ||||
|   // HybridValues hybrid_values = bayesNet->optimize();
 | ||||
|   // hybrid_values.discrete().print();
 | ||||
|   EXPECT_LONGS_EQUAL(1, discreteBayesTree->size()); | ||||
|   // DiscreteBayesTree should have only 1 clique
 | ||||
|   auto discrete_clique = (*discreteBayesTree)[discrete.at(0)]; | ||||
| 
 | ||||
|   std::set<HybridBayesTreeClique::shared_ptr> clique_set; | ||||
|   for (auto node : bayesTree->nodes()) { | ||||
|     clique_set.insert(node.second); | ||||
|   } | ||||
| 
 | ||||
|   // Set the root of the bayes tree as the discrete clique
 | ||||
|   for (auto clique : clique_set) { | ||||
|     if (clique->conditional()->parents() == | ||||
|         discrete_clique->conditional()->frontals()) { | ||||
|       discreteBayesTree->addClique(clique, discrete_clique); | ||||
| 
 | ||||
|     } else { | ||||
|       // Remove the clique from the children of the parents since it will get
 | ||||
|       // added again in addClique.
 | ||||
|       auto clique_it = std::find(clique->parent()->children.begin(), | ||||
|                                  clique->parent()->children.end(), clique); | ||||
|       clique->parent()->children.erase(clique_it); | ||||
|       discreteBayesTree->addClique(clique, clique->parent()); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   HybridValues hybrid_values = discreteBayesTree->optimize(); | ||||
| 
 | ||||
|   // This is the correct sequence as designed
 | ||||
|   DiscreteValues discrete_seq; | ||||
|   discrete_seq[M(0)] = 1; | ||||
|   discrete_seq[M(1)] = 1; | ||||
|   discrete_seq[M(2)] = 0; | ||||
| 
 | ||||
|   EXPECT(assert_equal(discrete_seq, hybrid_values.discrete())); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
|  |  | |||
|  | @ -182,7 +182,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { | |||
|        boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())})); | ||||
| 
 | ||||
|   hfg.add(DecisionTreeFactor(m1, {2, 8})); | ||||
|   hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); | ||||
|   // TODO(Varun) Adding extra discrete variable not connected to continuous
 | ||||
|   // variable throws segfault
 | ||||
|   //  hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
 | ||||
| 
 | ||||
|   HybridBayesTree::shared_ptr result = | ||||
|       hfg.eliminateMultifrontal(hfg.getHybridOrdering()); | ||||
|  |  | |||
|  | @ -165,7 +165,8 @@ TEST(HybridGaussianElimination, IncrementalInference) { | |||
|   discrete_ordering += M(0); | ||||
|   discrete_ordering += M(1); | ||||
|   HybridBayesTree::shared_ptr discreteBayesTree = | ||||
|       expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); | ||||
|       expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( | ||||
|           discrete_ordering); | ||||
| 
 | ||||
|   DiscreteValues m00; | ||||
|   m00[M(0)] = 0, m00[M(1)] = 0; | ||||
|  | @ -175,12 +176,12 @@ TEST(HybridGaussianElimination, IncrementalInference) { | |||
| 
 | ||||
|   auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional(); | ||||
| 
 | ||||
|   // Test if the probability values are as expected with regression tests.
 | ||||
|   // Test the probability values with regression tests.
 | ||||
|   DiscreteValues assignment; | ||||
|   EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); | ||||
|   EXPECT(assert_equal(0.0619233, m00_prob, 1e-5)); | ||||
|   assignment[M(0)] = 0; | ||||
|   assignment[M(1)] = 0; | ||||
|   EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); | ||||
|   EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); | ||||
|   assignment[M(0)] = 1; | ||||
|   assignment[M(1)] = 0; | ||||
|   EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); | ||||
|  | @ -193,11 +194,15 @@ TEST(HybridGaussianElimination, IncrementalInference) { | |||
| 
 | ||||
|   // Check if the clique conditional generated from incremental elimination
 | ||||
|   // matches that of batch elimination.
 | ||||
|   auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); | ||||
|   auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>( | ||||
|       (*expectedChordal)[M(1)]->conditional()->inner()); | ||||
|   auto expectedChordal = | ||||
|       expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal(); | ||||
|   auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>( | ||||
|       isam[M(1)]->conditional()->inner()); | ||||
|   // Account for the probability terms from evaluating continuous FGs
 | ||||
|   DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; | ||||
|   vector<double> probs = {0.061923317, 0.20415914, 0.18374323, 0.2}; | ||||
|   auto expectedConditional = | ||||
|       boost::make_shared<DecisionTreeFactor>(discrete_keys, probs); | ||||
|   EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); | ||||
| } | ||||
| 
 | ||||
|  |  | |||
|  | @ -372,8 +372,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { | |||
|       dynamic_pointer_cast<DecisionTreeFactor>(hybridDiscreteFactor->inner()); | ||||
|   CHECK(discreteFactor); | ||||
|   EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size()); | ||||
|   // All leaves should be probability 1 since this is not P*(X|M,Z)
 | ||||
|   EXPECT(discreteFactor->root_->isLeaf()); | ||||
|   EXPECT(discreteFactor->root_->isLeaf() == false); | ||||
| 
 | ||||
|   // TODO(Varun) Test emplace_discrete
 | ||||
| } | ||||
|  | @ -386,11 +385,11 @@ TEST(HybridFactorGraph, Partial_Elimination) { | |||
| 
 | ||||
|   auto linearizedFactorGraph = self.linearizedFactorGraph; | ||||
| 
 | ||||
|   // Create ordering.
 | ||||
|   // Create ordering of only continuous variables.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t k = 0; k < self.K; k++) ordering += X(k); | ||||
| 
 | ||||
|   // Eliminate partially.
 | ||||
|   // Eliminate partially i.e. only continuous part.
 | ||||
|   HybridBayesNet::shared_ptr hybridBayesNet; | ||||
|   HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; | ||||
|   std::tie(hybridBayesNet, remainingFactorGraph) = | ||||
|  | @ -441,14 +440,6 @@ TEST(HybridFactorGraph, Full_Elimination) { | |||
|       discrete_fg.push_back(df->inner()); | ||||
|     } | ||||
| 
 | ||||
|     // Get the probabilit P*(X | M, Z)
 | ||||
|     DiscreteKeys discrete_keys = | ||||
|         remainingFactorGraph_partial->at(2)->discreteKeys(); | ||||
|     AlgebraicDecisionTree<Key> probPrimeTree = | ||||
|         linearizedFactorGraph.continuousProbPrimes(discrete_keys, | ||||
|                                                    hybridBayesNet_partial); | ||||
|     discrete_fg.add(DecisionTreeFactor(discrete_keys, probPrimeTree)); | ||||
| 
 | ||||
|     ordering.clear(); | ||||
|     for (size_t k = 0; k < self.K - 1; k++) ordering += M(k); | ||||
|     discreteBayesNet = | ||||
|  |  | |||
|  | @ -153,7 +153,8 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
|   HybridBayesTree::shared_ptr expectedHybridBayesTree; | ||||
|   HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; | ||||
|   std::tie(expectedHybridBayesTree, expectedRemainingGraph) = | ||||
|       switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); | ||||
|       switching.linearizedFactorGraph | ||||
|           .BaseEliminateable::eliminatePartialMultifrontal(ordering); | ||||
| 
 | ||||
|   // The densities on X(1) should be the same
 | ||||
|   auto x0_conditional = dynamic_pointer_cast<GaussianMixture>( | ||||
|  | @ -182,7 +183,8 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
|   discrete_ordering += M(0); | ||||
|   discrete_ordering += M(1); | ||||
|   HybridBayesTree::shared_ptr discreteBayesTree = | ||||
|       expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); | ||||
|       expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( | ||||
|           discrete_ordering); | ||||
| 
 | ||||
|   DiscreteValues m00; | ||||
|   m00[M(0)] = 0, m00[M(1)] = 0; | ||||
|  | @ -193,12 +195,12 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
|   auto discreteConditional = | ||||
|       bayesTree[M(1)]->conditional()->asDiscreteConditional(); | ||||
| 
 | ||||
|   // Test if the probability values are as expected with regression tests.
 | ||||
|   // Test the probability values with regression tests.
 | ||||
|   DiscreteValues assignment; | ||||
|   EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); | ||||
|   EXPECT(assert_equal(0.0619233, m00_prob, 1e-5)); | ||||
|   assignment[M(0)] = 0; | ||||
|   assignment[M(1)] = 0; | ||||
|   EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); | ||||
|   EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); | ||||
|   assignment[M(0)] = 1; | ||||
|   assignment[M(1)] = 0; | ||||
|   EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); | ||||
|  | @ -212,10 +214,13 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
|   // Check if the clique conditional generated from incremental elimination
 | ||||
|   // matches that of batch elimination.
 | ||||
|   auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); | ||||
|   auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>( | ||||
|       (*expectedChordal)[M(1)]->conditional()->inner()); | ||||
|   auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>( | ||||
|       bayesTree[M(1)]->conditional()->inner()); | ||||
|   // Account for the probability terms from evaluating continuous FGs
 | ||||
|   DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; | ||||
|   vector<double> probs = {0.061923317, 0.20415914, 0.18374323, 0.2}; | ||||
|   auto expectedConditional = | ||||
|       boost::make_shared<DecisionTreeFactor>(discrete_keys, probs); | ||||
|   EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); | ||||
| } | ||||
| 
 | ||||
|  | @ -250,7 +255,8 @@ TEST(HybridNonlinearISAM, Approx_inference) { | |||
|   HybridBayesTree::shared_ptr unprunedHybridBayesTree; | ||||
|   HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; | ||||
|   std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) = | ||||
|       switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); | ||||
|       switching.linearizedFactorGraph | ||||
|           .BaseEliminateable::eliminatePartialMultifrontal(ordering); | ||||
| 
 | ||||
|   size_t maxNrLeaves = 5; | ||||
|   incrementalHybrid.update(graph1, initial); | ||||
|  |  | |||
|  | @ -73,7 +73,7 @@ public: | |||
|   /**
 | ||||
|    * @brief Append new keys to the ordering as `ordering += keys`. | ||||
|    * | ||||
|    * @param key | ||||
|    * @param keys The key vector to append to this ordering. | ||||
|    * @return The ordering variable with appended keys. | ||||
|    */ | ||||
|   This& operator+=(KeyVector& keys); | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue