provide logNormalizers directly to the augment method
							parent
							
								
									c38756c9f2
								
							
						
					
					
						commit
						79c7c6a8b6
					
				|  | @ -35,45 +35,17 @@ namespace gtsam { | ||||||
|  * the `b` vector as an additional row. |  * the `b` vector as an additional row. | ||||||
|  * |  * | ||||||
|  * @param factors DecisionTree of GaussianFactor shared pointers. |  * @param factors DecisionTree of GaussianFactor shared pointers. | ||||||
|  * @param varyingNormalizers Flag indicating the normalizers are different for |  * @param logNormalizers Tree of log-normalizers corresponding to each | ||||||
|  * each component. |  * Gaussian factor in factors. | ||||||
|  * @return GaussianMixtureFactor::Factors |  * @return GaussianMixtureFactor::Factors | ||||||
|  */ |  */ | ||||||
| GaussianMixtureFactor::Factors augment( | GaussianMixtureFactor::Factors augment( | ||||||
|     const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { |     const GaussianMixtureFactor::Factors &factors, | ||||||
|   if (!varyingNormalizers) { |     const AlgebraicDecisionTree<Key> &logNormalizers) { | ||||||
|     return factors; |  | ||||||
|   } |  | ||||||
| 
 |  | ||||||
|   // First compute all the sqrt(|2 pi Sigma|) terms
 |  | ||||||
|   auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { |  | ||||||
|     auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf); |  | ||||||
|     // If we have, say, a Hessian factor, then no need to do anything
 |  | ||||||
|     if (!jf) return 0.0; |  | ||||||
| 
 |  | ||||||
|     auto model = jf->get_model(); |  | ||||||
|     // If there is no noise model, there is nothing to do.
 |  | ||||||
|     if (!model) { |  | ||||||
|       return 0.0; |  | ||||||
|     } |  | ||||||
|     // Since noise models are Gaussian, we can get the logDeterminant using the
 |  | ||||||
|     // same trick as in GaussianConditional
 |  | ||||||
|     double logDetR = |  | ||||||
|         model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); |  | ||||||
|     double logDeterminantSigma = -2.0 * logDetR; |  | ||||||
| 
 |  | ||||||
|     size_t n = model->dim(); |  | ||||||
|     constexpr double log2pi = 1.8378770664093454835606594728112; |  | ||||||
|     return n * log2pi + logDeterminantSigma; |  | ||||||
|   }; |  | ||||||
| 
 |  | ||||||
|   AlgebraicDecisionTree<Key> log_normalizers = |  | ||||||
|       DecisionTree<Key, double>(factors, computeNormalizers); |  | ||||||
| 
 |  | ||||||
|   // Find the minimum value so we can "proselytize" to positive values.
 |   // Find the minimum value so we can "proselytize" to positive values.
 | ||||||
|   // Done because we can't have sqrt of negative numbers.
 |   // Done because we can't have sqrt of negative numbers.
 | ||||||
|   double min_log_normalizer = log_normalizers.min(); |   double min_log_normalizer = logNormalizers.min(); | ||||||
|   log_normalizers = log_normalizers.apply( |   AlgebraicDecisionTree<Key> log_normalizers = logNormalizers.apply( | ||||||
|       [&min_log_normalizer](double n) { return n - min_log_normalizer; }); |       [&min_log_normalizer](double n) { return n - min_log_normalizer; }); | ||||||
| 
 | 
 | ||||||
|   // Finally, update the [A|b] matrices.
 |   // Finally, update the [A|b] matrices.
 | ||||||
|  | @ -82,8 +54,6 @@ GaussianMixtureFactor::Factors augment( | ||||||
|                     const GaussianMixtureFactor::sharedFactor &gf) { |                     const GaussianMixtureFactor::sharedFactor &gf) { | ||||||
|     auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf); |     auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf); | ||||||
|     if (!jf) return gf; |     if (!jf) return gf; | ||||||
|     // If there is no noise model, there is nothing to do.
 |  | ||||||
|     if (!jf->get_model()) return gf; |  | ||||||
|     // If the log_normalizer is 0, do nothing
 |     // If the log_normalizer is 0, do nothing
 | ||||||
|     if (log_normalizers(assignment) == 0.0) return gf; |     if (log_normalizers(assignment) == 0.0) return gf; | ||||||
| 
 | 
 | ||||||
|  | @ -102,12 +72,11 @@ GaussianMixtureFactor::Factors augment( | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| /* *******************************************************************************/ | /* *******************************************************************************/ | ||||||
| GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, | GaussianMixtureFactor::GaussianMixtureFactor( | ||||||
|                                              const DiscreteKeys &discreteKeys, |     const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, | ||||||
|                                              const Factors &factors, |     const Factors &factors, const AlgebraicDecisionTree<Key> &logNormalizers) | ||||||
|                                              bool varyingNormalizers) |  | ||||||
|     : Base(continuousKeys, discreteKeys), |     : Base(continuousKeys, discreteKeys), | ||||||
|       factors_(augment(factors, varyingNormalizers)) {} |       factors_(augment(factors, logNormalizers)) {} | ||||||
| 
 | 
 | ||||||
| /* *******************************************************************************/ | /* *******************************************************************************/ | ||||||
| bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { | bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { | ||||||
|  | @ -194,6 +163,21 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { | ||||||
|   const sharedFactor gf = factors_(values.discrete()); |   const sharedFactor gf = factors_(values.discrete()); | ||||||
|   return gf->error(values.continuous()); |   return gf->error(values.continuous()); | ||||||
| } | } | ||||||
|  | 
 | ||||||
| /* *******************************************************************************/ | /* *******************************************************************************/ | ||||||
|  | double ComputeLogNormalizer( | ||||||
|  |     const noiseModel::Gaussian::shared_ptr &noise_model) { | ||||||
|  |   // Since noise models are Gaussian, we can get the logDeterminant using
 | ||||||
|  |   // the same trick as in GaussianConditional
 | ||||||
|  |   double logDetR = noise_model->R() | ||||||
|  |                        .diagonal() | ||||||
|  |                        .unaryExpr([](double x) { return log(x); }) | ||||||
|  |                        .sum(); | ||||||
|  |   double logDeterminantSigma = -2.0 * logDetR; | ||||||
|  | 
 | ||||||
|  |   size_t n = noise_model->dim(); | ||||||
|  |   constexpr double log2pi = 1.8378770664093454835606594728112; | ||||||
|  |   return n * log2pi + logDeterminantSigma; | ||||||
|  | } | ||||||
| 
 | 
 | ||||||
| }  // namespace gtsam
 | }  // namespace gtsam
 | ||||||
|  |  | ||||||
|  | @ -82,13 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { | ||||||
|    * their cardinalities. |    * their cardinalities. | ||||||
|    * @param factors The decision tree of Gaussian factors stored as the mixture |    * @param factors The decision tree of Gaussian factors stored as the mixture | ||||||
|    * density. |    * density. | ||||||
|    * @param varyingNormalizers Flag indicating factor components have varying |    * @param logNormalizers Tree of log-normalizers corresponding to each | ||||||
|    * normalizer values. |    * Gaussian factor in factors. | ||||||
|    */ |    */ | ||||||
|   GaussianMixtureFactor(const KeyVector &continuousKeys, |   GaussianMixtureFactor(const KeyVector &continuousKeys, | ||||||
|                         const DiscreteKeys &discreteKeys, |                         const DiscreteKeys &discreteKeys, | ||||||
|                         const Factors &factors, |                         const Factors &factors, | ||||||
|                         bool varyingNormalizers = false); |                         const AlgebraicDecisionTree<Key> &logNormalizers = | ||||||
|  |                             AlgebraicDecisionTree<Key>(0.0)); | ||||||
| 
 | 
 | ||||||
|   /**
 |   /**
 | ||||||
|    * @brief Construct a new GaussianMixtureFactor object using a vector of |    * @brief Construct a new GaussianMixtureFactor object using a vector of | ||||||
|  | @ -97,16 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { | ||||||
|    * @param continuousKeys Vector of keys for continuous factors. |    * @param continuousKeys Vector of keys for continuous factors. | ||||||
|    * @param discreteKeys Vector of discrete keys. |    * @param discreteKeys Vector of discrete keys. | ||||||
|    * @param factors Vector of gaussian factor shared pointers. |    * @param factors Vector of gaussian factor shared pointers. | ||||||
|    * @param varyingNormalizers Flag indicating factor components have varying |    * @param logNormalizers Tree of log-normalizers corresponding to each | ||||||
|    * normalizer values. |    * Gaussian factor in factors. | ||||||
|    */ |    */ | ||||||
|   GaussianMixtureFactor(const KeyVector &continuousKeys, |   GaussianMixtureFactor(const KeyVector &continuousKeys, | ||||||
|                         const DiscreteKeys &discreteKeys, |                         const DiscreteKeys &discreteKeys, | ||||||
|                         const std::vector<sharedFactor> &factors, |                         const std::vector<sharedFactor> &factors, | ||||||
|                         bool varyingNormalizers = false) |                         const AlgebraicDecisionTree<Key> &logNormalizers = | ||||||
|  |                             AlgebraicDecisionTree<Key>(0.0)) | ||||||
|       : GaussianMixtureFactor(continuousKeys, discreteKeys, |       : GaussianMixtureFactor(continuousKeys, discreteKeys, | ||||||
|                               Factors(discreteKeys, factors), |                               Factors(discreteKeys, factors), logNormalizers) {} | ||||||
|                               varyingNormalizers) {} |  | ||||||
| 
 | 
 | ||||||
|   /// @}
 |   /// @}
 | ||||||
|   /// @name Testable
 |   /// @name Testable
 | ||||||
|  | @ -178,4 +179,7 @@ template <> | ||||||
| struct traits<GaussianMixtureFactor> : public Testable<GaussianMixtureFactor> { | struct traits<GaussianMixtureFactor> : public Testable<GaussianMixtureFactor> { | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
|  | double ComputeLogNormalizer( | ||||||
|  |     const noiseModel::Gaussian::shared_ptr &noise_model); | ||||||
|  | 
 | ||||||
| }  // namespace gtsam
 | }  // namespace gtsam
 | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue