better naming
							parent
							
								
									92b829dd55
								
							
						
					
					
						commit
						9b3176e5ef
					
				|  | @ -51,14 +51,14 @@ HybridGaussianConditional::HybridGaussianConditional( | |||
|                  discreteParents, GetFactorValuePairs(conditionals)), | ||||
|       BaseConditional(continuousFrontals.size()), | ||||
|       conditionals_(conditionals) { | ||||
|   // Calculate logConstant_ as the minimum of the negative-log normalizers of
 | ||||
|   // Calculate negLogConstant_ as the minimum of the negative-log normalizers of
 | ||||
|   // the conditionals, by visiting the decision tree:
 | ||||
|   logConstant_ = std::numeric_limits<double>::infinity(); | ||||
|   negLogConstant_ = std::numeric_limits<double>::infinity(); | ||||
|   conditionals_.visit( | ||||
|       [this](const GaussianConditional::shared_ptr &conditional) { | ||||
|         if (conditional) { | ||||
|           this->logConstant_ = | ||||
|               std::min(this->logConstant_, conditional->negLogConstant()); | ||||
|           this->negLogConstant_ = | ||||
|               std::min(this->negLogConstant_, conditional->negLogConstant()); | ||||
|         } | ||||
|       }); | ||||
| } | ||||
|  | @ -84,7 +84,7 @@ GaussianFactorGraphTree HybridGaussianConditional::asGaussianFactorGraphTree() | |||
|   auto wrap = [this](const GaussianConditional::shared_ptr &gc) { | ||||
|     // First check if conditional has not been pruned
 | ||||
|     if (gc) { | ||||
|       const double Cgm_Kgcm = gc->negLogConstant() - this->logConstant_; | ||||
|       const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_; | ||||
|       // If there is a difference in the covariances, we need to account for
 | ||||
|       // that since the error is dependent on the mode.
 | ||||
|       if (Cgm_Kgcm > 0.0) { | ||||
|  | @ -155,8 +155,7 @@ void HybridGaussianConditional::print(const std::string &s, | |||
|     std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), "; | ||||
|   } | ||||
|   std::cout << std::endl | ||||
|             << " logNormalizationConstant: " << -negLogConstant() | ||||
|             << std::endl | ||||
|             << " logNormalizationConstant: " << -negLogConstant() << std::endl | ||||
|             << std::endl; | ||||
|   conditionals_.print( | ||||
|       "", [&](Key k) { return formatter(k); }, | ||||
|  | @ -214,7 +213,7 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood( | |||
|       [&](const GaussianConditional::shared_ptr &conditional) | ||||
|           -> GaussianFactorValuePair { | ||||
|         const auto likelihood_m = conditional->likelihood(given); | ||||
|         const double Cgm_Kgcm = conditional->negLogConstant() - logConstant_; | ||||
|         const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_; | ||||
|         if (Cgm_Kgcm == 0.0) { | ||||
|           return {likelihood_m, 0.0}; | ||||
|         } else { | ||||
|  |  | |||
|  | @ -66,7 +66,7 @@ class GTSAM_EXPORT HybridGaussianConditional | |||
|   Conditionals conditionals_;  ///< a decision tree of Gaussian conditionals.
 | ||||
|   ///< Negative-log of the normalization constant (log(\sqrt(|2πΣ|))).
 | ||||
|   ///< Take advantage of the neg-log space so everything is a minimization
 | ||||
|   double logConstant_; | ||||
|   double negLogConstant_; | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Convert a HybridGaussianConditional of conditionals into | ||||
|  | @ -158,7 +158,7 @@ class GTSAM_EXPORT HybridGaussianConditional | |||
|    * | ||||
|    * @return double | ||||
|    */ | ||||
|   inline double negLogConstant() const override { return logConstant_; } | ||||
|   inline double negLogConstant() const override { return negLogConstant_; } | ||||
| 
 | ||||
|   /**
 | ||||
|    * Create a likelihood factor for a hybrid Gaussian conditional, | ||||
|  |  | |||
|  | @ -233,13 +233,13 @@ continuousElimination(const HybridGaussianFactorGraph &factors, | |||
| 
 | ||||
| /* ************************************************************************ */ | ||||
| /**
 | ||||
|  * @brief Exponentiate (not necessarily normalized) log-values, normalize, and | ||||
|  * then return as AlgebraicDecisionTree<Key>. | ||||
|  * @brief Exponentiate (not necessarily normalized) negative log-values, | ||||
|  * normalize, and then return as AlgebraicDecisionTree<Key>. | ||||
|  * | ||||
|  * @param logValues DecisionTree of (unnormalized) log values. | ||||
|  * @return AlgebraicDecisionTree<Key> | ||||
|  */ | ||||
| static AlgebraicDecisionTree<Key> probabilitiesFromLogValues( | ||||
| static AlgebraicDecisionTree<Key> probabilitiesFromNegativeLogValues( | ||||
|     const AlgebraicDecisionTree<Key> &logValues) { | ||||
|   // Perform normalization
 | ||||
|   double min_log = logValues.min(); | ||||
|  | @ -271,7 +271,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, | |||
|           DecisionTree<Key, double>(gmf->factors(), logProbability); | ||||
| 
 | ||||
|       AlgebraicDecisionTree<Key> probabilities = | ||||
|           probabilitiesFromLogValues(logProbabilities); | ||||
|           probabilitiesFromNegativeLogValues(logProbabilities); | ||||
|       dfg.emplace_shared<DecisionTreeFactor>(gmf->discreteKeys(), | ||||
|                                              probabilities); | ||||
| 
 | ||||
|  | @ -337,7 +337,7 @@ static std::shared_ptr<Factor> createDiscreteFactor( | |||
|   AlgebraicDecisionTree<Key> negLogProbabilities( | ||||
|       DecisionTree<Key, double>(eliminationResults, negLogProbability)); | ||||
|   AlgebraicDecisionTree<Key> probabilities = | ||||
|       probabilitiesFromLogValues(negLogProbabilities); | ||||
|       probabilitiesFromNegativeLogValues(negLogProbabilities); | ||||
| 
 | ||||
|   return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities); | ||||
| } | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue