provide logNormalizers directly to the augment method
							parent
							
								
									c38756c9f2
								
							
						
					
					
						commit
						79c7c6a8b6
					
				|  | @ -35,45 +35,17 @@ namespace gtsam { | |||
|  * the `b` vector as an additional row. | ||||
|  * | ||||
|  * @param factors DecisionTree of GaussianFactor shared pointers. | ||||
|  * @param varyingNormalizers Flag indicating the normalizers are different for | ||||
|  * each component. | ||||
|  * @param logNormalizers Tree of log-normalizers corresponding to each | ||||
|  * Gaussian factor in factors. | ||||
|  * @return GaussianMixtureFactor::Factors | ||||
|  */ | ||||
| GaussianMixtureFactor::Factors augment( | ||||
|     const GaussianMixtureFactor::Factors &factors, bool varyingNormalizers) { | ||||
|   if (!varyingNormalizers) { | ||||
|     return factors; | ||||
|   } | ||||
| 
 | ||||
|   // First compute all the sqrt(|2 pi Sigma|) terms
 | ||||
|   auto computeNormalizers = [](const GaussianMixtureFactor::sharedFactor &gf) { | ||||
|     auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf); | ||||
|     // If we have, say, a Hessian factor, then no need to do anything
 | ||||
|     if (!jf) return 0.0; | ||||
| 
 | ||||
|     auto model = jf->get_model(); | ||||
|     // If there is no noise model, there is nothing to do.
 | ||||
|     if (!model) { | ||||
|       return 0.0; | ||||
|     } | ||||
|     // Since noise models are Gaussian, we can get the logDeterminant using the
 | ||||
|     // same trick as in GaussianConditional
 | ||||
|     double logDetR = | ||||
|         model->R().diagonal().unaryExpr([](double x) { return log(x); }).sum(); | ||||
|     double logDeterminantSigma = -2.0 * logDetR; | ||||
| 
 | ||||
|     size_t n = model->dim(); | ||||
|     constexpr double log2pi = 1.8378770664093454835606594728112; | ||||
|     return n * log2pi + logDeterminantSigma; | ||||
|   }; | ||||
| 
 | ||||
|   AlgebraicDecisionTree<Key> log_normalizers = | ||||
|       DecisionTree<Key, double>(factors, computeNormalizers); | ||||
| 
 | ||||
|     const GaussianMixtureFactor::Factors &factors, | ||||
|     const AlgebraicDecisionTree<Key> &logNormalizers) { | ||||
|   // Find the minimum value so we can "proselytize" to positive values.
 | ||||
|   // Done because we can't have sqrt of negative numbers.
 | ||||
|   double min_log_normalizer = log_normalizers.min(); | ||||
|   log_normalizers = log_normalizers.apply( | ||||
|   double min_log_normalizer = logNormalizers.min(); | ||||
|   AlgebraicDecisionTree<Key> log_normalizers = logNormalizers.apply( | ||||
|       [&min_log_normalizer](double n) { return n - min_log_normalizer; }); | ||||
| 
 | ||||
|   // Finally, update the [A|b] matrices.
 | ||||
|  | @ -82,8 +54,6 @@ GaussianMixtureFactor::Factors augment( | |||
|                     const GaussianMixtureFactor::sharedFactor &gf) { | ||||
|     auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf); | ||||
|     if (!jf) return gf; | ||||
|     // If there is no noise model, there is nothing to do.
 | ||||
|     if (!jf->get_model()) return gf; | ||||
|     // If the log_normalizer is 0, do nothing
 | ||||
|     if (log_normalizers(assignment) == 0.0) return gf; | ||||
| 
 | ||||
|  | @ -102,12 +72,11 @@ GaussianMixtureFactor::Factors augment( | |||
| } | ||||
| 
 | ||||
| /* *******************************************************************************/ | ||||
| GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys, | ||||
|                                              const DiscreteKeys &discreteKeys, | ||||
|                                              const Factors &factors, | ||||
|                                              bool varyingNormalizers) | ||||
| GaussianMixtureFactor::GaussianMixtureFactor( | ||||
|     const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys, | ||||
|     const Factors &factors, const AlgebraicDecisionTree<Key> &logNormalizers) | ||||
|     : Base(continuousKeys, discreteKeys), | ||||
|       factors_(augment(factors, varyingNormalizers)) {} | ||||
|       factors_(augment(factors, logNormalizers)) {} | ||||
| 
 | ||||
| /* *******************************************************************************/ | ||||
| bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { | ||||
|  | @ -194,6 +163,21 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { | |||
|   const sharedFactor gf = factors_(values.discrete()); | ||||
|   return gf->error(values.continuous()); | ||||
| } | ||||
| 
 | ||||
| /* *******************************************************************************/ | ||||
| double ComputeLogNormalizer( | ||||
|     const noiseModel::Gaussian::shared_ptr &noise_model) { | ||||
|   // Since noise models are Gaussian, we can get the logDeterminant using
 | ||||
|   // the same trick as in GaussianConditional
 | ||||
|   double logDetR = noise_model->R() | ||||
|                        .diagonal() | ||||
|                        .unaryExpr([](double x) { return log(x); }) | ||||
|                        .sum(); | ||||
|   double logDeterminantSigma = -2.0 * logDetR; | ||||
| 
 | ||||
|   size_t n = noise_model->dim(); | ||||
|   constexpr double log2pi = 1.8378770664093454835606594728112; | ||||
|   return n * log2pi + logDeterminantSigma; | ||||
| } | ||||
| 
 | ||||
| }  // namespace gtsam
 | ||||
|  |  | |||
|  | @ -82,13 +82,14 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { | |||
|    * their cardinalities. | ||||
|    * @param factors The decision tree of Gaussian factors stored as the mixture | ||||
|    * density. | ||||
|    * @param varyingNormalizers Flag indicating factor components have varying | ||||
|    * normalizer values. | ||||
|    * @param logNormalizers Tree of log-normalizers corresponding to each | ||||
|    * Gaussian factor in factors. | ||||
|    */ | ||||
|   GaussianMixtureFactor(const KeyVector &continuousKeys, | ||||
|                         const DiscreteKeys &discreteKeys, | ||||
|                         const Factors &factors, | ||||
|                         bool varyingNormalizers = false); | ||||
|                         const AlgebraicDecisionTree<Key> &logNormalizers = | ||||
|                             AlgebraicDecisionTree<Key>(0.0)); | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Construct a new GaussianMixtureFactor object using a vector of | ||||
|  | @ -97,16 +98,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { | |||
|    * @param continuousKeys Vector of keys for continuous factors. | ||||
|    * @param discreteKeys Vector of discrete keys. | ||||
|    * @param factors Vector of gaussian factor shared pointers. | ||||
|    * @param varyingNormalizers Flag indicating factor components have varying | ||||
|    * normalizer values. | ||||
|    * @param logNormalizers Tree of log-normalizers corresponding to each | ||||
|    * Gaussian factor in factors. | ||||
|    */ | ||||
|   GaussianMixtureFactor(const KeyVector &continuousKeys, | ||||
|                         const DiscreteKeys &discreteKeys, | ||||
|                         const std::vector<sharedFactor> &factors, | ||||
|                         bool varyingNormalizers = false) | ||||
|                         const AlgebraicDecisionTree<Key> &logNormalizers = | ||||
|                             AlgebraicDecisionTree<Key>(0.0)) | ||||
|       : GaussianMixtureFactor(continuousKeys, discreteKeys, | ||||
|                               Factors(discreteKeys, factors), | ||||
|                               varyingNormalizers) {} | ||||
|                               Factors(discreteKeys, factors), logNormalizers) {} | ||||
| 
 | ||||
|   /// @}
 | ||||
|   /// @name Testable
 | ||||
|  | @ -178,4 +179,7 @@ template <> | |||
| struct traits<GaussianMixtureFactor> : public Testable<GaussianMixtureFactor> { | ||||
| }; | ||||
| 
 | ||||
| double ComputeLogNormalizer( | ||||
|     const noiseModel::Gaussian::shared_ptr &noise_model); | ||||
| 
 | ||||
| }  // namespace gtsam
 | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue