operator returns pairs, extensive switching test
							parent
							
								
									584a71fb94
								
							
						
					
					
						commit
						e1c0d0e227
					
				|  | @ -273,13 +273,7 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood( | |||
|       [&](const GaussianConditional::shared_ptr& conditional) -> GaussianFactorValuePair { | ||||
|         const auto likelihood_m = conditional->likelihood(given); | ||||
|         const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_; | ||||
|         if (Cgm_Kgcm == 0.0) { | ||||
|           return {likelihood_m, 0.0}; | ||||
|         } else { | ||||
|           // Add a constant to the likelihood in case the noise models
 | ||||
|           // are not all equal.
 | ||||
|           return {likelihood_m, Cgm_Kgcm}; | ||||
|         } | ||||
|         return {likelihood_m, Cgm_Kgcm}; | ||||
|       }); | ||||
|   return std::make_shared<HybridGaussianFactor>(discreteParentKeys, likelihoods); | ||||
| } | ||||
|  |  | |||
|  | @ -187,9 +187,9 @@ void HybridGaussianFactor::print(const std::string& s, const KeyFormatter& forma | |||
| } | ||||
| 
 | ||||
| /* *******************************************************************************/ | ||||
| HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()( | ||||
| GaussianFactorValuePair HybridGaussianFactor::operator()( | ||||
|     const DiscreteValues& assignment) const { | ||||
|   return factors_(assignment).first; | ||||
|   return factors_(assignment); | ||||
| } | ||||
| 
 | ||||
| /* *******************************************************************************/ | ||||
|  |  | |||
|  | @ -129,7 +129,7 @@ public: | |||
|   /// @{
 | ||||
| 
 | ||||
|   /// Get factor at a given discrete assignment.
 | ||||
|   sharedFactor operator()(const DiscreteValues &assignment) const; | ||||
|   GaussianFactorValuePair operator()(const DiscreteValues &assignment) const; | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Compute error of the HybridGaussianFactor as a tree. | ||||
|  |  | |||
|  | @ -81,7 +81,7 @@ static void printFactor(const std::shared_ptr<Factor>& factor, | |||
|     if (assignment.empty()) | ||||
|       hgf->print("HybridGaussianFactor:", keyFormatter); | ||||
|     else | ||||
|       hgf->operator()(assignment)->print("HybridGaussianFactor, component:", keyFormatter); | ||||
|       hgf->operator()(assignment).first->print("HybridGaussianFactor, component:", keyFormatter); | ||||
|   } else if (auto gf = std::dynamic_pointer_cast<GaussianFactor>(factor)) { | ||||
|     factor->print("GaussianFactor:\n", keyFormatter); | ||||
| 
 | ||||
|  | @ -329,6 +329,7 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(const ResultTree& elim | |||
|       const double negLogK = conditional->negLogConstant(); | ||||
|       hf->constantTerm() += -2.0 * negLogK; | ||||
|       return {factor, negLogK}; | ||||
|       return {factor, scalar + negLogK}; | ||||
|     } else if (!conditional && !factor) { | ||||
|       return {nullptr, 0.0};  // TODO(frank): or should this be infinity?
 | ||||
|     } else { | ||||
|  | @ -355,7 +356,9 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { | |||
|   DiscreteKeys discreteSeparator = GetDiscreteKeys(*this); | ||||
| 
 | ||||
|   // Collect all the factors to create a set of Gaussian factor graphs in a
 | ||||
|   // decision tree indexed by all discrete keys involved.
 | ||||
|   // decision tree indexed by all discrete keys involved. Just like any hybrid factor, every
 | ||||
|   // assignment also has a scalar error, in this case the sum of all errors in the graph. This error
 | ||||
|   // is assignment-specific and accounts for any difference in noise models used.
 | ||||
|   HybridGaussianProductFactor productFactor = collectProductFactor(); | ||||
| 
 | ||||
|   // Convert factor graphs with a nullptr to an empty factor graph.
 | ||||
|  | @ -374,11 +377,12 @@ HybridGaussianFactorGraph::eliminate(const Ordering& keys) const { | |||
|     } | ||||
| 
 | ||||
|     // Expensive elimination of product factor.
 | ||||
|     auto result = EliminatePreferCholesky(graph, keys); | ||||
|     auto result = EliminatePreferCholesky(graph, keys);  /// <<<<<< MOST COMPUTE IS HERE
 | ||||
| 
 | ||||
|     // Record whether there any continuous variables left
 | ||||
|     someContinuousLeft |= !result.second->empty(); | ||||
| 
 | ||||
|     // We pass on the scalar unmodified.
 | ||||
|     return {result, scalar}; | ||||
|   }; | ||||
| 
 | ||||
|  | @ -548,7 +552,7 @@ GaussianFactorGraph HybridGaussianFactorGraph::choose(const DiscreteValues& assi | |||
|     } else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) { | ||||
|       gfg.push_back(gf); | ||||
|     } else if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(f)) { | ||||
|       gfg.push_back((*hgf)(assignment)); | ||||
|       gfg.push_back((*hgf)(assignment).first); | ||||
|     } else if (auto hgc = std::dynamic_pointer_cast<HybridGaussianConditional>(f)) { | ||||
|       gfg.push_back((*hgc)(assignment)); | ||||
|     } else if (auto hc = std::dynamic_pointer_cast<HybridConditional>(f)) { | ||||
|  |  | |||
|  | @ -18,9 +18,12 @@ | |||
|  * @date    December 2021 | ||||
|  */ | ||||
| 
 | ||||
| #include <gtsam/base/Testable.h> | ||||
| #include <gtsam/discrete/DiscreteFactor.h> | ||||
| #include <gtsam/hybrid/HybridBayesNet.h> | ||||
| #include <gtsam/hybrid/HybridBayesTree.h> | ||||
| #include <gtsam/hybrid/HybridConditional.h> | ||||
| #include <gtsam/hybrid/HybridGaussianFactorGraph.h> | ||||
| #include <gtsam/nonlinear/NonlinearFactorGraph.h> | ||||
| 
 | ||||
| #include "Switching.h" | ||||
|  | @ -28,6 +31,7 @@ | |||
| 
 | ||||
| // Include for test suite
 | ||||
| #include <CppUnitLite/TestHarness.h> | ||||
| #include <memory> | ||||
| 
 | ||||
| using namespace std; | ||||
| using namespace gtsam; | ||||
|  | @ -113,7 +117,7 @@ TEST(HybridBayesNet, EvaluatePureDiscrete) { | |||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test creation of a tiny hybrid Bayes net.
 | ||||
| // Test API for a tiny hybrid Bayes net.
 | ||||
| TEST(HybridBayesNet, Tiny) { | ||||
|   auto bayesNet = tiny::createHybridBayesNet();  // P(z|x,mode)P(x)P(mode)
 | ||||
|   EXPECT_LONGS_EQUAL(3, bayesNet.size()); | ||||
|  | @ -164,10 +168,8 @@ TEST(HybridBayesNet, Tiny) { | |||
|   EXPECT(!pruned.equals(bayesNet)); | ||||
| 
 | ||||
|   // error
 | ||||
|   const double error0 = chosen0.error(vv) + gc0->negLogConstant() - | ||||
|                         px->negLogConstant() - log(0.4); | ||||
|   const double error1 = chosen1.error(vv) + gc1->negLogConstant() - | ||||
|                         px->negLogConstant() - log(0.6); | ||||
|   const double error0 = chosen0.error(vv) + gc0->negLogConstant() - px->negLogConstant() - log(0.4); | ||||
|   const double error1 = chosen1.error(vv) + gc1->negLogConstant() - px->negLogConstant() - log(0.6); | ||||
|   // print errors:
 | ||||
|   EXPECT_DOUBLES_EQUAL(error0, bayesNet.error(zero), 1e-9); | ||||
|   EXPECT_DOUBLES_EQUAL(error1, bayesNet.error(one), 1e-9); | ||||
|  | @ -188,6 +190,23 @@ TEST(HybridBayesNet, Tiny) { | |||
|   // toFactorGraph
 | ||||
|   auto fg = bayesNet.toFactorGraph({{Z(0), Vector1(5.0)}}); | ||||
|   EXPECT_LONGS_EQUAL(3, fg.size()); | ||||
|   GTSAM_PRINT(fg); | ||||
| 
 | ||||
|   // Create the product factor for eliminating x0:
 | ||||
|   HybridGaussianFactorGraph factors_x0; | ||||
|   factors_x0.push_back(fg.at(0)); | ||||
|   factors_x0.push_back(fg.at(1)); | ||||
|   auto productFactor = factors_x0.collectProductFactor(); | ||||
| 
 | ||||
|   // Check that scalars are 0 and 1.79
 | ||||
|   EXPECT_DOUBLES_EQUAL(0.0, productFactor({{M(0), 0}}).second, 1e-9); | ||||
|   EXPECT_DOUBLES_EQUAL(1.791759, productFactor({{M(0), 1}}).second, 1e-5); | ||||
| 
 | ||||
|   // Call eliminate and check scalar:
 | ||||
|   auto result = factors_x0.eliminate({X(0)}); | ||||
|   GTSAM_PRINT(*result.first); | ||||
|   auto df = std::dynamic_pointer_cast<DecisionTreeFactor>(result.second); | ||||
|   GTSAM_PRINT(df->errorTree()); | ||||
| 
 | ||||
|   // Check that the ratio of probPrime to evaluate is the same for all modes.
 | ||||
|   std::vector<double> ratio(2); | ||||
|  | @ -209,17 +228,13 @@ TEST(HybridBayesNet, Tiny) { | |||
| /* ****************************************************************************/ | ||||
| // Hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
 | ||||
| namespace different_sigmas { | ||||
| const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), | ||||
|                                                          Vector1(-4.0), 5.0); | ||||
| const auto gc = GaussianConditional::sharedMeanAndStddev(X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0); | ||||
| 
 | ||||
| const std::vector<std::pair<Vector, double>> parms{{Vector1(5), 2.0}, | ||||
|                                                    {Vector1(2), 3.0}}; | ||||
| const std::vector<std::pair<Vector, double>> parms{{Vector1(5), 2.0}, {Vector1(2), 3.0}}; | ||||
| const auto hgc = std::make_shared<HybridGaussianConditional>(Asia, X(1), parms); | ||||
| 
 | ||||
| const auto prior = std::make_shared<DiscreteConditional>(Asia, "99/1"); | ||||
| auto wrap = [](const auto& c) { | ||||
|   return std::make_shared<HybridConditional>(c); | ||||
| }; | ||||
| auto wrap = [](const auto& c) { return std::make_shared<HybridConditional>(c); }; | ||||
| const HybridBayesNet bayesNet{wrap(gc), wrap(hgc), wrap(prior)}; | ||||
| 
 | ||||
| // Create values at which to evaluate.
 | ||||
|  | @ -233,8 +248,8 @@ TEST(HybridBayesNet, evaluateHybrid) { | |||
| 
 | ||||
|   const double conditionalProbability = gc->evaluate(values.continuous()); | ||||
|   const double mixtureProbability = hgc->evaluate(values); | ||||
|   EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99, | ||||
|                        bayesNet.evaluate(values), 1e-9); | ||||
|   EXPECT_DOUBLES_EQUAL( | ||||
|       conditionalProbability * mixtureProbability * 0.99, bayesNet.evaluate(values), 1e-9); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
|  | @ -256,14 +271,10 @@ TEST(HybridBayesNet, Choose) { | |||
| 
 | ||||
|   EXPECT_LONGS_EQUAL(4, gbn.size()); | ||||
| 
 | ||||
|   EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), | ||||
|                       *gbn.at(0))); | ||||
|   EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), | ||||
|                       *gbn.at(1))); | ||||
|   EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), | ||||
|                       *gbn.at(2))); | ||||
|   EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), | ||||
|                       *gbn.at(3))); | ||||
|   EXPECT(assert_equal(*(*hybridBayesNet->at(0)->asHybrid())(assignment), *gbn.at(0))); | ||||
|   EXPECT(assert_equal(*(*hybridBayesNet->at(1)->asHybrid())(assignment), *gbn.at(1))); | ||||
|   EXPECT(assert_equal(*(*hybridBayesNet->at(2)->asHybrid())(assignment), *gbn.at(2))); | ||||
|   EXPECT(assert_equal(*(*hybridBayesNet->at(3)->asHybrid())(assignment), *gbn.at(3))); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
|  | @ -300,8 +311,7 @@ TEST(HybridBayesNet, OptimizeAssignment) { | |||
| TEST(HybridBayesNet, Optimize) { | ||||
|   Switching s(4, 1.0, 0.1, {0, 1, 2, 3}, "1/1 1/1"); | ||||
| 
 | ||||
|   HybridBayesNet::shared_ptr hybridBayesNet = | ||||
|       s.linearizedFactorGraph.eliminateSequential(); | ||||
|   HybridBayesNet::shared_ptr hybridBayesNet = s.linearizedFactorGraph.eliminateSequential(); | ||||
| 
 | ||||
|   HybridValues delta = hybridBayesNet->optimize(); | ||||
| 
 | ||||
|  | @ -328,8 +338,7 @@ TEST(HybridBayesNet, Pruning) { | |||
|   // ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1)
 | ||||
|   Switching s(3); | ||||
| 
 | ||||
|   HybridBayesNet::shared_ptr posterior = | ||||
|       s.linearizedFactorGraph.eliminateSequential(); | ||||
|   HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); | ||||
|   EXPECT_LONGS_EQUAL(5, posterior->size()); | ||||
| 
 | ||||
|   // Optimize
 | ||||
|  | @ -355,12 +364,9 @@ TEST(HybridBayesNet, Pruning) { | |||
|   logProbability += posterior->at(0)->asHybrid()->logProbability(hybridValues); | ||||
|   logProbability += posterior->at(1)->asHybrid()->logProbability(hybridValues); | ||||
|   logProbability += posterior->at(2)->asHybrid()->logProbability(hybridValues); | ||||
|   logProbability += | ||||
|       posterior->at(3)->asDiscrete()->logProbability(hybridValues); | ||||
|   logProbability += | ||||
|       posterior->at(4)->asDiscrete()->logProbability(hybridValues); | ||||
|   EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues), | ||||
|                        1e-9); | ||||
|   logProbability += posterior->at(3)->asDiscrete()->logProbability(hybridValues); | ||||
|   logProbability += posterior->at(4)->asDiscrete()->logProbability(hybridValues); | ||||
|   EXPECT_DOUBLES_EQUAL(logProbability, posterior->logProbability(hybridValues), 1e-9); | ||||
| 
 | ||||
|   // Check agreement with discrete posterior
 | ||||
|   // double density = exp(logProbability);
 | ||||
|  | @ -381,8 +387,7 @@ TEST(HybridBayesNet, Pruning) { | |||
| TEST(HybridBayesNet, Prune) { | ||||
|   Switching s(4); | ||||
| 
 | ||||
|   HybridBayesNet::shared_ptr posterior = | ||||
|       s.linearizedFactorGraph.eliminateSequential(); | ||||
|   HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); | ||||
|   EXPECT_LONGS_EQUAL(7, posterior->size()); | ||||
| 
 | ||||
|   HybridValues delta = posterior->optimize(); | ||||
|  | @ -399,8 +404,7 @@ TEST(HybridBayesNet, Prune) { | |||
| TEST(HybridBayesNet, UpdateDiscreteConditionals) { | ||||
|   Switching s(4); | ||||
| 
 | ||||
|   HybridBayesNet::shared_ptr posterior = | ||||
|       s.linearizedFactorGraph.eliminateSequential(); | ||||
|   HybridBayesNet::shared_ptr posterior = s.linearizedFactorGraph.eliminateSequential(); | ||||
|   EXPECT_LONGS_EQUAL(7, posterior->size()); | ||||
| 
 | ||||
|   DiscreteConditional joint; | ||||
|  | @ -412,8 +416,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { | |||
|   auto prunedDecisionTree = joint.prune(maxNrLeaves); | ||||
| 
 | ||||
| #ifdef GTSAM_DT_MERGING | ||||
|   EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, | ||||
|                      prunedDecisionTree.nrLeaves()); | ||||
|   EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/, prunedDecisionTree.nrLeaves()); | ||||
| #else | ||||
|   EXPECT_LONGS_EQUAL(8 /*full tree*/, prunedDecisionTree.nrLeaves()); | ||||
| #endif | ||||
|  | @ -421,16 +424,14 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { | |||
|   // regression
 | ||||
|   // NOTE(Frank): I had to include *three* non-zeroes here now.
 | ||||
|   DecisionTreeFactor::ADT potentials( | ||||
|       s.modes, | ||||
|       std::vector<double>{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381}); | ||||
|       s.modes, std::vector<double>{0, 0, 0, 0.28739288, 0, 0.43106901, 0, 0.2815381}); | ||||
|   DiscreteConditional expectedConditional(3, s.modes, potentials); | ||||
| 
 | ||||
|   // Prune!
 | ||||
|   auto pruned = posterior->prune(maxNrLeaves); | ||||
| 
 | ||||
|   // Functor to verify values against the expectedConditional
 | ||||
|   auto checker = [&](const Assignment<Key>& assignment, | ||||
|                      double probability) -> double { | ||||
|   auto checker = [&](const Assignment<Key>& assignment, double probability) -> double { | ||||
|     // typecast so we can use this to get probability value
 | ||||
|     DiscreteValues choices(assignment); | ||||
|     if (prunedDecisionTree(choices) == 0) { | ||||
|  | @ -445,8 +446,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) { | |||
|   CHECK(pruned.at(0)->asDiscrete()); | ||||
|   auto pruned_discrete_conditionals = pruned.at(0)->asDiscrete(); | ||||
|   auto discrete_conditional_tree = | ||||
|       std::dynamic_pointer_cast<DecisionTreeFactor::ADT>( | ||||
|           pruned_discrete_conditionals); | ||||
|       std::dynamic_pointer_cast<DecisionTreeFactor::ADT>(pruned_discrete_conditionals); | ||||
| 
 | ||||
|   // The checker functor verifies the values for us.
 | ||||
|   discrete_conditional_tree->apply(checker); | ||||
|  | @ -460,13 +460,10 @@ TEST(HybridBayesNet, Sampling) { | |||
|   auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0)); | ||||
|   nfg.emplace_shared<PriorFactor<double>>(X(0), 0.0, noise_model); | ||||
| 
 | ||||
|   auto zero_motion = | ||||
|       std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model); | ||||
|   auto one_motion = | ||||
|       std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model); | ||||
|   auto zero_motion = std::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model); | ||||
|   auto one_motion = std::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model); | ||||
|   nfg.emplace_shared<HybridNonlinearFactor>( | ||||
|       DiscreteKey(M(0), 2), | ||||
|       std::vector<NoiseModelFactor::shared_ptr>{zero_motion, one_motion}); | ||||
|       DiscreteKey(M(0), 2), std::vector<NoiseModelFactor::shared_ptr>{zero_motion, one_motion}); | ||||
| 
 | ||||
|   DiscreteKey mode(M(0), 2); | ||||
|   nfg.emplace_shared<DiscreteDistribution>(mode, "1/1"); | ||||
|  | @ -538,18 +535,17 @@ TEST(HybridBayesNet, ErrorTreeWithConditional) { | |||
|   hbn.emplace_shared<GaussianConditional>(x0, Vector1(0.0), I_1x1, prior_model); | ||||
| 
 | ||||
|   // Add measurement P(z0 | x0)
 | ||||
|   hbn.emplace_shared<GaussianConditional>(z0, Vector1(0.0), -I_1x1, x0, I_1x1, | ||||
|                                           measurement_model); | ||||
|   hbn.emplace_shared<GaussianConditional>(z0, Vector1(0.0), -I_1x1, x0, I_1x1, measurement_model); | ||||
| 
 | ||||
|   // Add hybrid motion model
 | ||||
|   double mu = 0.0; | ||||
|   double sigma0 = 1e2, sigma1 = 1e-2; | ||||
|   auto model0 = noiseModel::Isotropic::Sigma(1, sigma0); | ||||
|   auto model1 = noiseModel::Isotropic::Sigma(1, sigma1); | ||||
|   auto c0 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1, | ||||
|                                              x0, -I_1x1, model0), | ||||
|        c1 = make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1, | ||||
|                                              x0, -I_1x1, model1); | ||||
|   auto c0 = | ||||
|            make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model0), | ||||
|        c1 = | ||||
|            make_shared<GaussianConditional>(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); | ||||
|   DiscreteKey m1(M(2), 2); | ||||
|   hbn.emplace_shared<HybridGaussianConditional>(m1, std::vector{c0, c1}); | ||||
| 
 | ||||
|  |  | |||
|  | @ -217,7 +217,7 @@ TEST(HybridGaussianConditional, Likelihood2) { | |||
|   // Check the detailed JacobianFactor calculation for mode==1.
 | ||||
|   { | ||||
|     // We have a JacobianFactor
 | ||||
|     const auto gf1 = (*likelihood)(assignment1); | ||||
|     const auto [gf1, _] = (*likelihood)(assignment1); | ||||
|     const auto jf1 = std::dynamic_pointer_cast<JacobianFactor>(gf1); | ||||
|     CHECK(jf1); | ||||
| 
 | ||||
|  |  | |||
|  | @ -165,8 +165,119 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { | |||
|   EXPECT_LONGS_EQUAL(4, result->size()); | ||||
| } | ||||
| 
 | ||||
| /*
 | ||||
| ****************************************************************************/ | ||||
| /* ************************************************************************* */ | ||||
| // Test API for the smallest switching network.
 | ||||
| // None of these are regression tests.
 | ||||
| TEST(HybridBayesNet, Switching) { | ||||
|   const double betweenSigma = 0.3, priorSigma = 0.1; | ||||
|   Switching s(2, betweenSigma, priorSigma); | ||||
|   const HybridGaussianFactorGraph& graph = s.linearizedFactorGraph; | ||||
|   EXPECT_LONGS_EQUAL(4, graph.size()); | ||||
| 
 | ||||
|   // Create some continuous and discrete values
 | ||||
|   VectorValues continuousValues{{X(0), Vector1(0.1)}, {X(1), Vector1(1.2)}}; | ||||
|   DiscreteValues modeZero{{M(0), 0}}, modeOne{{M(0), 1}}; | ||||
| 
 | ||||
|   // Get the hybrid gaussian factor and check it is as expected
 | ||||
|   auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(graph.at(1)); | ||||
|   CHECK(hgf); | ||||
| 
 | ||||
|   // Get factors and scalars for both modes
 | ||||
|   auto [factor0, scalar0] = (*hgf)(modeZero); | ||||
|   auto [factor1, scalar1] = (*hgf)(modeOne); | ||||
|   CHECK(factor0); | ||||
|   CHECK(factor1); | ||||
| 
 | ||||
|   // Check scalars against negLogConstant of noise model
 | ||||
|   auto betweenModel = noiseModel::Isotropic::Sigma(1, betweenSigma); | ||||
|   EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar0, 1e-9); | ||||
|   EXPECT_DOUBLES_EQUAL(betweenModel->negLogConstant(), scalar1, 1e-9); | ||||
| 
 | ||||
|   // Check error for M(0) = 0
 | ||||
|   HybridValues values0{continuousValues, modeZero}; | ||||
|   double expectedError0 = 0; | ||||
|   for (const auto& factor : graph) expectedError0 += factor->error(values0); | ||||
|   EXPECT_DOUBLES_EQUAL(expectedError0, graph.error(values0), 1e-5); | ||||
| 
 | ||||
|   // Check error for M(0) = 1
 | ||||
|   HybridValues values1{continuousValues, modeOne}; | ||||
|   double expectedError1 = 0; | ||||
|   for (const auto& factor : graph) expectedError1 += factor->error(values1); | ||||
|   EXPECT_DOUBLES_EQUAL(expectedError1, graph.error(values1), 1e-5); | ||||
| 
 | ||||
|   // Check errorTree
 | ||||
|   AlgebraicDecisionTree<Key> actualErrors = graph.errorTree(continuousValues); | ||||
|   // Create expected error tree
 | ||||
|   AlgebraicDecisionTree<Key> expectedErrors(M(0), expectedError0, expectedError1); | ||||
| 
 | ||||
|   // Check that the actual error tree matches the expected one
 | ||||
|   EXPECT(assert_equal(expectedErrors, actualErrors, 1e-5)); | ||||
| 
 | ||||
|   // Check probPrime
 | ||||
|   double probPrime0 = graph.probPrime(values0); | ||||
|   EXPECT_DOUBLES_EQUAL(std::exp(-expectedError0), probPrime0, 1e-5); | ||||
| 
 | ||||
|   double probPrime1 = graph.probPrime(values1); | ||||
|   EXPECT_DOUBLES_EQUAL(std::exp(-expectedError1), probPrime1, 1e-5); | ||||
| 
 | ||||
|   // Check discretePosterior
 | ||||
|   AlgebraicDecisionTree<Key> posterior = graph.discretePosterior(continuousValues); | ||||
|   double sum = probPrime0 + probPrime1; | ||||
|   AlgebraicDecisionTree<Key> expectedPosterior(M(0), probPrime0 / sum, probPrime1 / sum); | ||||
|   EXPECT(assert_equal(expectedPosterior, posterior, 1e-5)); | ||||
| 
 | ||||
|   // Make the clique of factors connected to x0:
 | ||||
|   HybridGaussianFactorGraph factors_x0; | ||||
|   factors_x0.push_back(graph.at(0)); | ||||
|   factors_x0.push_back(hgf); | ||||
| 
 | ||||
|   // Test collectProductFactor
 | ||||
|   auto productFactor = factors_x0.collectProductFactor(); | ||||
| 
 | ||||
|   // For M(0) = 0
 | ||||
|   auto [gaussianFactor0, actualScalar0] = productFactor(modeZero); | ||||
|   EXPECT(gaussianFactor0.size() == 2); | ||||
|   EXPECT_DOUBLES_EQUAL((*hgf)(modeZero).second, actualScalar0, 1e-5); | ||||
| 
 | ||||
|   // For M(0) = 1
 | ||||
|   auto [gaussianFactor1, actualScalar1] = productFactor(modeOne); | ||||
|   EXPECT(gaussianFactor1.size() == 2); | ||||
|   EXPECT_DOUBLES_EQUAL((*hgf)(modeOne).second, actualScalar1, 1e-5); | ||||
| 
 | ||||
|   // Test eliminate
 | ||||
|   Ordering ordering{X(0)}; | ||||
|   auto [conditional, factor] = factors_x0.eliminate(ordering); | ||||
| 
 | ||||
|   // Check the conditional
 | ||||
|   CHECK(conditional); | ||||
|   EXPECT(conditional->isHybrid()); | ||||
|   auto hybridConditional = conditional->asHybrid(); | ||||
|   CHECK(hybridConditional); | ||||
|   EXPECT_LONGS_EQUAL(1, hybridConditional->nrFrontals());  // x0
 | ||||
|   EXPECT_LONGS_EQUAL(2, hybridConditional->nrParents());   // x1, m0
 | ||||
| 
 | ||||
|   // Check the remaining factor
 | ||||
|   EXPECT(factor); | ||||
|   EXPECT(std::dynamic_pointer_cast<HybridGaussianFactor>(factor)); | ||||
|   auto hybridFactor = std::dynamic_pointer_cast<HybridGaussianFactor>(factor); | ||||
|   EXPECT_LONGS_EQUAL(2, hybridFactor->keys().size());  // x1, m0
 | ||||
| 
 | ||||
|   // Check that the conditional and remaining factor are consistent for both modes
 | ||||
|   for (auto&& mode : {modeZero, modeOne}) { | ||||
|     auto gc = (*hybridConditional)(mode); | ||||
|     auto gf = (*hybridFactor)(mode); | ||||
| 
 | ||||
|     // The error of the original factors should equal the sum of errors of the conditional and
 | ||||
|     // remaining factor, modulo the normalization constant of the conditional.
 | ||||
|     double originalError = factors_x0.error({continuousValues, mode}); | ||||
|     EXPECT_DOUBLES_EQUAL( | ||||
|         originalError, | ||||
|         gc->negLogConstant() + gc->error(continuousValues) + gf.first->error(continuousValues), | ||||
|         1e-9); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| // Select a particular continuous factor graph given a discrete assignment
 | ||||
| TEST(HybridGaussianFactorGraph, DiscreteSelection) { | ||||
|   Switching s(3); | ||||
|  | @ -410,12 +521,12 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) { | |||
| 
 | ||||
|   // Expected decision tree with two factor graphs:
 | ||||
|   // f(x0;mode=0)P(x0)
 | ||||
|   GaussianFactorGraph expectedFG0{(*hybrid)(d0), prior}; | ||||
|   GaussianFactorGraph expectedFG0{(*hybrid)(d0).first, prior}; | ||||
|   EXPECT(assert_equal(expectedFG0, actual(d0).first, 1e-5)); | ||||
|   EXPECT(assert_equal(0.0, actual(d0).second, 1e-5)); | ||||
| 
 | ||||
|   // f(x0;mode=1)P(x0)
 | ||||
|   GaussianFactorGraph expectedFG1{(*hybrid)(d1), prior}; | ||||
|   GaussianFactorGraph expectedFG1{(*hybrid)(d1).first, prior}; | ||||
|   EXPECT(assert_equal(expectedFG1, actual(d1).first, 1e-5)); | ||||
|   EXPECT(assert_equal(1.79176, actual(d1).second, 1e-5)); | ||||
| } | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue