Merge pull request #1319 from borglab/hybrid/tests
						commit
						6466b03b5c
					
				|  | @ -103,7 +103,7 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { | |||
| /* *******************************************************************************/ | ||||
| void GaussianMixture::print(const std::string &s, | ||||
|                             const KeyFormatter &formatter) const { | ||||
|   std::cout << s; | ||||
|   std::cout << (s.empty() ? "" : s + "\n"); | ||||
|   if (isContinuous()) std::cout << "Continuous "; | ||||
|   if (isDiscrete()) std::cout << "Discrete "; | ||||
|   if (isHybrid()) std::cout << "Hybrid "; | ||||
|  |  | |||
|  | @ -99,6 +99,7 @@ std::function<double(const Assignment<Key> &, double)> prunerFunc( | |||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| // TODO(dellaert): what is this non-const method used for? Abolish it?
 | ||||
| void HybridBayesNet::updateDiscreteConditionals( | ||||
|     const DecisionTreeFactor::shared_ptr &prunedDecisionTree) { | ||||
|   KeyVector prunedTreeKeys = prunedDecisionTree->keys(); | ||||
|  | @ -107,7 +108,6 @@ void HybridBayesNet::updateDiscreteConditionals( | |||
|   for (size_t i = 0; i < this->size(); i++) { | ||||
|     HybridConditional::shared_ptr conditional = this->at(i); | ||||
|     if (conditional->isDiscrete()) { | ||||
|       // std::cout << demangle(typeid(conditional).name()) << std::endl;
 | ||||
|       auto discrete = conditional->asDiscrete(); | ||||
|       KeyVector frontals(discrete->frontals().begin(), | ||||
|                          discrete->frontals().end()); | ||||
|  | @ -217,13 +217,18 @@ HybridValues HybridBayesNet::optimize() const { | |||
|   DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); | ||||
| 
 | ||||
|   // Given the MPE, compute the optimal continuous values.
 | ||||
|   GaussianBayesNet gbn = choose(mpe); | ||||
|   return HybridValues(gbn.optimize(), mpe); | ||||
|   return HybridValues(optimize(mpe), mpe); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| VectorValues HybridBayesNet::optimize(const DiscreteValues &assignment) const { | ||||
|   GaussianBayesNet gbn = choose(assignment); | ||||
| 
 | ||||
|   // Check if there exists a nullptr in the GaussianBayesNet
 | ||||
|   // If yes, return an empty VectorValues
 | ||||
|   if (std::find(gbn.begin(), gbn.end(), nullptr) != gbn.end()) { | ||||
|     return VectorValues(); | ||||
|   } | ||||
|   return gbn.optimize(); | ||||
| } | ||||
| 
 | ||||
|  |  | |||
|  | @ -14,7 +14,7 @@ | |||
|  * @brief   Hybrid Bayes Tree, the result of eliminating a | ||||
|  * HybridJunctionTree | ||||
|  * @date Mar 11, 2022 | ||||
|  * @author  Fan Jiang | ||||
|  * @author  Fan Jiang, Varun Agrawal | ||||
|  */ | ||||
| 
 | ||||
| #include <gtsam/base/treeTraversal-inst.h> | ||||
|  | @ -73,6 +73,8 @@ struct HybridAssignmentData { | |||
|   GaussianBayesTree::sharedNode parentClique_; | ||||
|   // The gaussian bayes tree that will be recursively created.
 | ||||
|   GaussianBayesTree* gaussianbayesTree_; | ||||
|   // Flag indicating if all the nodes are valid. Used in optimize().
 | ||||
|   bool valid_; | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Construct a new Hybrid Assignment Data object. | ||||
|  | @ -83,10 +85,13 @@ struct HybridAssignmentData { | |||
|    */ | ||||
|   HybridAssignmentData(const DiscreteValues& assignment, | ||||
|                        const GaussianBayesTree::sharedNode& parentClique, | ||||
|                        GaussianBayesTree* gbt) | ||||
|                        GaussianBayesTree* gbt, bool valid = true) | ||||
|       : assignment_(assignment), | ||||
|         parentClique_(parentClique), | ||||
|         gaussianbayesTree_(gbt) {} | ||||
|         gaussianbayesTree_(gbt), | ||||
|         valid_(valid) {} | ||||
| 
 | ||||
|   bool isValid() const { return valid_; } | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief A function used during tree traversal that operates on each node | ||||
|  | @ -101,6 +106,7 @@ struct HybridAssignmentData { | |||
|       HybridAssignmentData& parentData) { | ||||
|     // Extract the gaussian conditional from the Hybrid clique
 | ||||
|     HybridConditional::shared_ptr hybrid_conditional = node->conditional(); | ||||
| 
 | ||||
|     GaussianConditional::shared_ptr conditional; | ||||
|     if (hybrid_conditional->isHybrid()) { | ||||
|       conditional = (*hybrid_conditional->asMixture())(parentData.assignment_); | ||||
|  | @ -111,15 +117,21 @@ struct HybridAssignmentData { | |||
|       conditional = boost::make_shared<GaussianConditional>(); | ||||
|     } | ||||
| 
 | ||||
|     // Create the GaussianClique for the current node
 | ||||
|     auto clique = boost::make_shared<GaussianBayesTree::Node>(conditional); | ||||
|     // Add the current clique to the GaussianBayesTree.
 | ||||
|     parentData.gaussianbayesTree_->addClique(clique, parentData.parentClique_); | ||||
|     GaussianBayesTree::sharedNode clique; | ||||
|     if (conditional) { | ||||
|       // Create the GaussianClique for the current node
 | ||||
|       clique = boost::make_shared<GaussianBayesTree::Node>(conditional); | ||||
|       // Add the current clique to the GaussianBayesTree.
 | ||||
|       parentData.gaussianbayesTree_->addClique(clique, | ||||
|                                                parentData.parentClique_); | ||||
|     } else { | ||||
|       parentData.valid_ = false; | ||||
|     } | ||||
| 
 | ||||
|     // Create new HybridAssignmentData where the current node is the parent
 | ||||
|     // This will be passed down to the children nodes
 | ||||
|     HybridAssignmentData data(parentData.assignment_, clique, | ||||
|                               parentData.gaussianbayesTree_); | ||||
|                               parentData.gaussianbayesTree_, parentData.valid_); | ||||
|     return data; | ||||
|   } | ||||
| }; | ||||
|  | @ -138,6 +150,9 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { | |||
|         visitorPost); | ||||
|   } | ||||
| 
 | ||||
|   if (!rootData.isValid()) { | ||||
|     return VectorValues(); | ||||
|   } | ||||
|   VectorValues result = gbt.optimize(); | ||||
| 
 | ||||
|   // Return the optimized bayes net result.
 | ||||
|  |  | |||
|  | @ -50,9 +50,12 @@ class GTSAM_EXPORT HybridBayesTreeClique | |||
|   typedef boost::shared_ptr<This> shared_ptr; | ||||
|   typedef boost::weak_ptr<This> weak_ptr; | ||||
|   HybridBayesTreeClique() {} | ||||
|   virtual ~HybridBayesTreeClique() {} | ||||
|   HybridBayesTreeClique(const boost::shared_ptr<HybridConditional>& conditional) | ||||
|       : Base(conditional) {} | ||||
|   ///< Copy constructor
 | ||||
|   HybridBayesTreeClique(const HybridBayesTreeClique& clique) : Base(clique) {} | ||||
| 
 | ||||
|   virtual ~HybridBayesTreeClique() {} | ||||
| }; | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
|  |  | |||
|  | @ -24,7 +24,7 @@ | |||
| namespace gtsam { | ||||
| 
 | ||||
| /**
 | ||||
|  * Elimination Tree type for Hybrid | ||||
|  * Elimination Tree type for Hybrid Factor Graphs. | ||||
|  * | ||||
|  * @ingroup hybrid | ||||
|  */ | ||||
|  |  | |||
|  | @ -51,6 +51,8 @@ | |||
| #include <utility> | ||||
| #include <vector> | ||||
| 
 | ||||
| // #define HYBRID_TIMING
 | ||||
| 
 | ||||
| namespace gtsam { | ||||
| 
 | ||||
| template class EliminateableFactorGraph<HybridGaussianFactorGraph>; | ||||
|  | @ -90,7 +92,6 @@ GaussianMixtureFactor::Sum sumFrontals( | |||
|       if (auto cgmf = boost::dynamic_pointer_cast<GaussianMixtureFactor>(f)) { | ||||
|         sum = cgmf->add(sum); | ||||
|       } | ||||
| 
 | ||||
|       if (auto gm = boost::dynamic_pointer_cast<HybridConditional>(f)) { | ||||
|         sum = gm->asMixture()->add(sum); | ||||
|       } | ||||
|  | @ -187,7 +188,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, | |||
|   DiscreteKeys discreteSeparator(discreteSeparatorSet.begin(), | ||||
|                                  discreteSeparatorSet.end()); | ||||
| 
 | ||||
|   // sum out frontals, this is the factor on the separator
 | ||||
|   // sum out frontals, this is the factor 𝜏 on the separator
 | ||||
|   GaussianMixtureFactor::Sum sum = sumFrontals(factors); | ||||
| 
 | ||||
|   // If a tree leaf contains nullptr,
 | ||||
|  | @ -214,24 +215,35 @@ hybridElimination(const HybridGaussianFactorGraph &factors, | |||
|     if (graph.empty()) { | ||||
|       return {nullptr, nullptr}; | ||||
|     } | ||||
| 
 | ||||
| #ifdef HYBRID_TIMING | ||||
|     gttic_(hybrid_eliminate); | ||||
| #endif | ||||
| 
 | ||||
|     std::pair<boost::shared_ptr<GaussianConditional>, | ||||
|               boost::shared_ptr<GaussianFactor>> | ||||
|         result = EliminatePreferCholesky(graph, frontalKeys); | ||||
| 
 | ||||
|     if (keysOfEliminated.empty()) { | ||||
|       // Initialize the keysOfEliminated to be the keys of the
 | ||||
|       // eliminated GaussianConditional
 | ||||
|       keysOfEliminated = result.first->keys(); | ||||
|     } | ||||
|     if (keysOfSeparator.empty()) { | ||||
|       keysOfSeparator = result.second->keys(); | ||||
|     } | ||||
|     // Initialize the keysOfEliminated to be the keys of the
 | ||||
|     // eliminated GaussianConditional
 | ||||
|     keysOfEliminated = result.first->keys(); | ||||
|     keysOfSeparator = result.second->keys(); | ||||
| 
 | ||||
| #ifdef HYBRID_TIMING | ||||
|     gttoc_(hybrid_eliminate); | ||||
| #endif | ||||
| 
 | ||||
|     return result; | ||||
|   }; | ||||
| 
 | ||||
|   // Perform elimination!
 | ||||
|   DecisionTree<Key, EliminationPair> eliminationResults(sum, eliminate); | ||||
| 
 | ||||
| #ifdef HYBRID_TIMING | ||||
|   tictoc_print_(); | ||||
|   tictoc_reset_(); | ||||
| #endif | ||||
| 
 | ||||
|   // Separate out decision tree into conditionals and remaining factors.
 | ||||
|   auto pair = unzip(eliminationResults); | ||||
| 
 | ||||
|  | @ -245,11 +257,16 @@ hybridElimination(const HybridGaussianFactorGraph &factors, | |||
|   // DiscreteFactor, with the error for each discrete choice.
 | ||||
|   if (keysOfSeparator.empty()) { | ||||
|     VectorValues empty_values; | ||||
|     auto factorError = [&](const GaussianFactor::shared_ptr &factor) { | ||||
|       if (!factor) return 0.0;  // TODO(fan): does this make sense?
 | ||||
|       return exp(-factor->error(empty_values)); | ||||
|     auto factorProb = [&](const GaussianFactor::shared_ptr &factor) { | ||||
|       if (!factor) { | ||||
|         return 0.0;  // If nullptr, return 0.0 probability
 | ||||
|       } else { | ||||
|         double error = | ||||
|             0.5 * std::abs(factor->augmentedInformation().determinant()); | ||||
|         return std::exp(-error); | ||||
|       } | ||||
|     }; | ||||
|     DecisionTree<Key, double> fdt(separatorFactors, factorError); | ||||
|     DecisionTree<Key, double> fdt(separatorFactors, factorProb); | ||||
| 
 | ||||
|     auto discreteFactor = | ||||
|         boost::make_shared<DecisionTreeFactor>(discreteSeparator, fdt); | ||||
|  | @ -327,18 +344,20 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, | |||
|   // However this is also the case with iSAM2, so no pressure :)
 | ||||
| 
 | ||||
|   // PREPROCESS: Identify the nature of the current elimination
 | ||||
|   std::unordered_map<Key, DiscreteKey> mapFromKeyToDiscreteKey; | ||||
|   std::set<DiscreteKey> discreteSeparatorSet; | ||||
|   std::set<DiscreteKey> discreteFrontals; | ||||
| 
 | ||||
|   // First, identify the separator keys, i.e. all keys that are not frontal.
 | ||||
|   KeySet separatorKeys; | ||||
|   KeySet allContinuousKeys; | ||||
|   KeySet continuousFrontals; | ||||
|   KeySet continuousSeparator; | ||||
| 
 | ||||
|   // This initializes separatorKeys and mapFromKeyToDiscreteKey
 | ||||
|   for (auto &&factor : factors) { | ||||
|     separatorKeys.insert(factor->begin(), factor->end()); | ||||
|   } | ||||
|   // remove frontals from separator
 | ||||
|   for (auto &k : frontalKeys) { | ||||
|     separatorKeys.erase(k); | ||||
|   } | ||||
| 
 | ||||
|   // Build a map from keys to DiscreteKeys
 | ||||
|   std::unordered_map<Key, DiscreteKey> mapFromKeyToDiscreteKey; | ||||
|   for (auto &&factor : factors) { | ||||
|     if (!factor->isContinuous()) { | ||||
|       for (auto &k : factor->discreteKeys()) { | ||||
|         mapFromKeyToDiscreteKey[k.first] = k; | ||||
|  | @ -346,46 +365,50 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, | |||
|     } | ||||
|   } | ||||
| 
 | ||||
|   // remove frontals from separator
 | ||||
|   for (auto &k : frontalKeys) { | ||||
|     separatorKeys.erase(k); | ||||
|   } | ||||
| 
 | ||||
|   // Fill in discrete frontals and continuous frontals for the end result
 | ||||
|   // Fill in discrete frontals and continuous frontals.
 | ||||
|   std::set<DiscreteKey> discreteFrontals; | ||||
|   KeySet continuousFrontals; | ||||
|   for (auto &k : frontalKeys) { | ||||
|     if (mapFromKeyToDiscreteKey.find(k) != mapFromKeyToDiscreteKey.end()) { | ||||
|       discreteFrontals.insert(mapFromKeyToDiscreteKey.at(k)); | ||||
|     } else { | ||||
|       continuousFrontals.insert(k); | ||||
|       allContinuousKeys.insert(k); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   // Fill in discrete frontals and continuous frontals for the end result
 | ||||
|   // Fill in discrete discrete separator keys and continuous separator keys.
 | ||||
|   std::set<DiscreteKey> discreteSeparatorSet; | ||||
|   KeySet continuousSeparator; | ||||
|   for (auto &k : separatorKeys) { | ||||
|     if (mapFromKeyToDiscreteKey.find(k) != mapFromKeyToDiscreteKey.end()) { | ||||
|       discreteSeparatorSet.insert(mapFromKeyToDiscreteKey.at(k)); | ||||
|     } else { | ||||
|       continuousSeparator.insert(k); | ||||
|       allContinuousKeys.insert(k); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   // Check if we have any continuous keys:
 | ||||
|   const bool discrete_only = | ||||
|       continuousFrontals.empty() && continuousSeparator.empty(); | ||||
| 
 | ||||
|   // NOTE: We should really defer the product here because of pruning
 | ||||
| 
 | ||||
|   // Case 1: we are only dealing with continuous
 | ||||
|   if (mapFromKeyToDiscreteKey.empty() && !allContinuousKeys.empty()) { | ||||
|     return continuousElimination(factors, frontalKeys); | ||||
|   } | ||||
| 
 | ||||
|   // Case 2: we are only dealing with discrete
 | ||||
|   if (allContinuousKeys.empty()) { | ||||
|   if (discrete_only) { | ||||
|     // Case 1: we are only dealing with discrete
 | ||||
|     return discreteElimination(factors, frontalKeys); | ||||
|   } else { | ||||
|     // Case 2: we are only dealing with continuous
 | ||||
|     if (mapFromKeyToDiscreteKey.empty()) { | ||||
|       return continuousElimination(factors, frontalKeys); | ||||
|     } else { | ||||
|       // Case 3: We are now in the hybrid land!
 | ||||
| #ifdef HYBRID_TIMING | ||||
|       tictoc_reset_(); | ||||
| #endif | ||||
|       return hybridElimination(factors, frontalKeys, continuousSeparator, | ||||
|                                discreteSeparatorSet); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   // Case 3: We are now in the hybrid land!
 | ||||
|   return hybridElimination(factors, frontalKeys, continuousSeparator, | ||||
|                            discreteSeparatorSet); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************ */ | ||||
|  |  | |||
|  | @ -25,6 +25,7 @@ | |||
| #include <gtsam/inference/FactorGraph.h> | ||||
| #include <gtsam/inference/Ordering.h> | ||||
| #include <gtsam/linear/GaussianFactor.h> | ||||
| #include <gtsam/linear/VectorValues.h> | ||||
| 
 | ||||
| namespace gtsam { | ||||
| 
 | ||||
|  |  | |||
|  | @ -51,10 +51,11 @@ class HybridEliminationTree; | |||
|  */ | ||||
| class GTSAM_EXPORT HybridJunctionTree | ||||
|     : public JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> { | ||||
| 
 | ||||
|  public: | ||||
|   typedef JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> | ||||
|       Base;                                    ///< Base class
 | ||||
|   typedef HybridJunctionTree This;     ///< This class
 | ||||
|   typedef HybridJunctionTree This;             ///< This class
 | ||||
|   typedef boost::shared_ptr<This> shared_ptr;  ///< Shared pointer to this class
 | ||||
| 
 | ||||
|   /**
 | ||||
|  |  | |||
|  | @ -130,9 +130,11 @@ struct Switching { | |||
|    * @param K The total number of timesteps. | ||||
|    * @param between_sigma The stddev between poses. | ||||
|    * @param prior_sigma The stddev on priors (also used for measurements). | ||||
|    * @param measurements Vector of measurements for each timestep. | ||||
|    */ | ||||
|   Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1, | ||||
|             std::vector<double> measurements = {}) | ||||
|             std::vector<double> measurements = {}, | ||||
|             std::string discrete_transition_prob = "1/2 3/2") | ||||
|       : K(K) { | ||||
|     // Create DiscreteKeys for binary K modes.
 | ||||
|     for (size_t k = 0; k < K; k++) { | ||||
|  | @ -147,7 +149,7 @@ struct Switching { | |||
|     } | ||||
| 
 | ||||
|     // Create hybrid factor graph.
 | ||||
|     // Add a prior on X(1).
 | ||||
|     // Add a prior on X(0).
 | ||||
|     auto prior = boost::make_shared<PriorFactor<double>>( | ||||
|         X(0), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma)); | ||||
|     nonlinearFactorGraph.push_nonlinear(prior); | ||||
|  | @ -172,7 +174,7 @@ struct Switching { | |||
|     } | ||||
| 
 | ||||
|     // Add "mode chain"
 | ||||
|     addModeChain(&nonlinearFactorGraph); | ||||
|     addModeChain(&nonlinearFactorGraph, discrete_transition_prob); | ||||
| 
 | ||||
|     // Create the linearization point.
 | ||||
|     for (size_t k = 0; k < K; k++) { | ||||
|  | @ -201,13 +203,14 @@ struct Switching { | |||
|    * | ||||
|    * @param fg The nonlinear factor graph to which the mode chain is added. | ||||
|    */ | ||||
|   void addModeChain(HybridNonlinearFactorGraph *fg) { | ||||
|   void addModeChain(HybridNonlinearFactorGraph *fg, | ||||
|                     std::string discrete_transition_prob = "1/2 3/2") { | ||||
|     auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1"); | ||||
|     fg->push_discrete(prior); | ||||
|     for (size_t k = 0; k < K - 2; k++) { | ||||
|       auto parents = {modes[k]}; | ||||
|       auto conditional = boost::make_shared<DiscreteConditional>( | ||||
|           modes[k + 1], parents, "1/2 3/2"); | ||||
|           modes[k + 1], parents, discrete_transition_prob); | ||||
|       fg->push_discrete(conditional); | ||||
|     } | ||||
|   } | ||||
|  | @ -218,13 +221,14 @@ struct Switching { | |||
|    * | ||||
|    * @param fg The gaussian factor graph to which the mode chain is added. | ||||
|    */ | ||||
|   void addModeChain(HybridGaussianFactorGraph *fg) { | ||||
|   void addModeChain(HybridGaussianFactorGraph *fg, | ||||
|                     std::string discrete_transition_prob = "1/2 3/2") { | ||||
|     auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1"); | ||||
|     fg->push_discrete(prior); | ||||
|     for (size_t k = 0; k < K - 2; k++) { | ||||
|       auto parents = {modes[k]}; | ||||
|       auto conditional = boost::make_shared<DiscreteConditional>( | ||||
|           modes[k + 1], parents, "1/2 3/2"); | ||||
|           modes[k + 1], parents, discrete_transition_prob); | ||||
|       fg->push_discrete(conditional); | ||||
|     } | ||||
|   } | ||||
|  |  | |||
|  | @ -203,25 +203,6 @@ TEST(HybridBayesNet, Optimize) { | |||
|   EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test bayes net multifrontal optimize
 | ||||
| TEST(HybridBayesNet, OptimizeMultifrontal) { | ||||
|   Switching s(4); | ||||
| 
 | ||||
|   Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); | ||||
|   HybridBayesTree::shared_ptr hybridBayesTree = | ||||
|       s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering); | ||||
|   HybridValues delta = hybridBayesTree->optimize(); | ||||
| 
 | ||||
|   VectorValues expectedValues; | ||||
|   expectedValues.insert(X(0), -0.999904 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(1), -0.99029 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(2), -1.00971 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(3), -1.0001 * Vector1::Ones()); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test bayes net error
 | ||||
| TEST(HybridBayesNet, Error) { | ||||
|  |  | |||
|  | @ -32,6 +32,25 @@ using noiseModel::Isotropic; | |||
| using symbol_shorthand::M; | ||||
| using symbol_shorthand::X; | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test multifrontal optimize
 | ||||
| TEST(HybridBayesTree, OptimizeMultifrontal) { | ||||
|   Switching s(4); | ||||
| 
 | ||||
|   Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); | ||||
|   HybridBayesTree::shared_ptr hybridBayesTree = | ||||
|       s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering); | ||||
|   HybridValues delta = hybridBayesTree->optimize(); | ||||
| 
 | ||||
|   VectorValues expectedValues; | ||||
|   expectedValues.insert(X(0), -0.999904 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(1), -0.99029 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(2), -1.00971 * Vector1::Ones()); | ||||
|   expectedValues.insert(X(3), -1.0001 * Vector1::Ones()); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test for optimizing a HybridBayesTree with a given assignment.
 | ||||
| TEST(HybridBayesTree, OptimizeAssignment) { | ||||
|  | @ -105,7 +124,7 @@ TEST(HybridBayesTree, Optimize) { | |||
|     graph1.push_back(s.linearizedFactorGraph.at(i)); | ||||
|   } | ||||
| 
 | ||||
|   // Add the Gaussian factors, 1 prior on X(1),
 | ||||
|   // Add the Gaussian factors, 1 prior on X(0),
 | ||||
|   // 3 measurements on X(2), X(3), X(4)
 | ||||
|   graph1.push_back(s.linearizedFactorGraph.at(0)); | ||||
|   for (size_t i = 4; i <= 6; i++) { | ||||
|  | @ -136,6 +155,12 @@ TEST(HybridBayesTree, Optimize) { | |||
|     dfg.push_back( | ||||
|         boost::dynamic_pointer_cast<DecisionTreeFactor>(factor->inner())); | ||||
|   } | ||||
|    | ||||
|   // Add the probabilities for each branch
 | ||||
|   DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; | ||||
|   vector<double> probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656, | ||||
|                           0.037152205, 0.12248971,  0.07349729,  0.08}; | ||||
|   dfg.emplace_shared<DecisionTreeFactor>(discrete_keys, probs); | ||||
| 
 | ||||
|   DiscreteValues expectedMPE = dfg.optimize(); | ||||
|   VectorValues expectedValues = hybridBayesNet->optimize(expectedMPE); | ||||
|  |  | |||
|  | @ -15,6 +15,7 @@ | |||
|  * @author  Varun Agrawal | ||||
|  */ | ||||
| 
 | ||||
| #include <gtsam/discrete/DiscreteBayesNet.h> | ||||
| #include <gtsam/geometry/Pose2.h> | ||||
| #include <gtsam/hybrid/HybridBayesNet.h> | ||||
| #include <gtsam/hybrid/HybridNonlinearFactorGraph.h> | ||||
|  | @ -23,6 +24,7 @@ | |||
| #include <gtsam/hybrid/MixtureFactor.h> | ||||
| #include <gtsam/inference/Symbol.h> | ||||
| #include <gtsam/linear/GaussianBayesNet.h> | ||||
| #include <gtsam/linear/GaussianBayesTree.h> | ||||
| #include <gtsam/linear/GaussianFactorGraph.h> | ||||
| #include <gtsam/linear/JacobianFactor.h> | ||||
| #include <gtsam/linear/NoiseModel.h> | ||||
|  | @ -69,15 +71,40 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, | |||
|   return ordering; | ||||
| } | ||||
| 
 | ||||
| TEST(HybridEstimation, Full) { | ||||
|   size_t K = 3; | ||||
|   std::vector<double> measurements = {0, 1, 2}; | ||||
|   // Ground truth discrete seq
 | ||||
|   std::vector<size_t> discrete_seq = {1, 1, 0}; | ||||
|   // Switching example of robot moving in 1D
 | ||||
|   // with given measurements and equal mode priors.
 | ||||
|   Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); | ||||
|   HybridGaussianFactorGraph graph = switching.linearizedFactorGraph; | ||||
| 
 | ||||
|   Ordering hybridOrdering; | ||||
|   hybridOrdering += X(0); | ||||
|   hybridOrdering += X(1); | ||||
|   hybridOrdering += X(2); | ||||
|   hybridOrdering += M(0); | ||||
|   hybridOrdering += M(1); | ||||
|   HybridBayesNet::shared_ptr bayesNet = | ||||
|       graph.eliminateSequential(hybridOrdering); | ||||
| 
 | ||||
|   EXPECT_LONGS_EQUAL(5, bayesNet->size()); | ||||
| } | ||||
| 
 | ||||
| /****************************************************************************/ | ||||
| // Test approximate inference with an additional pruning step.
 | ||||
| TEST(HybridNonlinearISAM, Incremental) { | ||||
|   size_t K = 10; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6}; | ||||
| TEST(HybridEstimation, Incremental) { | ||||
|   size_t K = 15; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2, 2, 2,  3,  4,  5,  6, 6, | ||||
|                                       7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; | ||||
|   // Ground truth discrete seq
 | ||||
|   std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0}; | ||||
|   Switching switching(K, 1.0, 0.1, measurements); | ||||
|   // HybridNonlinearISAM smoother;
 | ||||
|   std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, | ||||
|                                       1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; | ||||
|   // Switching example of robot moving in 1D with given measurements and equal
 | ||||
|   // mode priors.
 | ||||
|   Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); | ||||
|   HybridSmoother smoother; | ||||
|   HybridNonlinearFactorGraph graph; | ||||
|   Values initial; | ||||
|  | @ -90,7 +117,6 @@ TEST(HybridNonlinearISAM, Incremental) { | |||
|   HybridGaussianFactorGraph bayesNet; | ||||
| 
 | ||||
|   for (size_t k = 1; k < K; k++) { | ||||
|     std::cout << ">>>>>>>>>>>>>>>>>>> k=" << k << std::endl; | ||||
|     // Motion Model
 | ||||
|     graph.push_back(switching.nonlinearFactorGraph.at(k)); | ||||
|     // Measurement
 | ||||
|  | @ -105,6 +131,7 @@ TEST(HybridNonlinearISAM, Incremental) { | |||
|     smoother.update(linearized, ordering, 3); | ||||
|     graph.resize(0); | ||||
|   } | ||||
| 
 | ||||
|   HybridValues delta = smoother.hybridBayesNet().optimize(); | ||||
| 
 | ||||
|   Values result = initial.retract(delta.continuous()); | ||||
|  | @ -122,6 +149,333 @@ TEST(HybridNonlinearISAM, Incremental) { | |||
|   EXPECT(assert_equal(expected_continuous, result)); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * @brief A function to get a specific 1D robot motion problem as a linearized | ||||
|  * factor graph. This is the problem P(X|Z, M), i.e. estimating the continuous | ||||
|  * positions given the measurements and discrete sequence. | ||||
|  * | ||||
|  * @param K The number of timesteps. | ||||
|  * @param measurements The vector of measurements for each timestep. | ||||
|  * @param discrete_seq The discrete sequence governing the motion of the robot. | ||||
|  * @param measurement_sigma Noise model sigma for measurements. | ||||
|  * @param between_sigma Noise model sigma for the between factor. | ||||
|  * @return GaussianFactorGraph::shared_ptr | ||||
|  */ | ||||
| GaussianFactorGraph::shared_ptr specificModesFactorGraph( | ||||
|     size_t K, const std::vector<double>& measurements, | ||||
|     const std::vector<size_t>& discrete_seq, double measurement_sigma = 0.1, | ||||
|     double between_sigma = 1.0) { | ||||
|   NonlinearFactorGraph graph; | ||||
|   Values linearizationPoint; | ||||
| 
 | ||||
|   // Add measurement factors
 | ||||
|   auto measurement_noise = noiseModel::Isotropic::Sigma(1, measurement_sigma); | ||||
|   for (size_t k = 0; k < K; k++) { | ||||
|     graph.emplace_shared<PriorFactor<double>>(X(k), measurements.at(k), | ||||
|                                               measurement_noise); | ||||
|     linearizationPoint.insert<double>(X(k), static_cast<double>(k + 1)); | ||||
|   } | ||||
| 
 | ||||
|   using MotionModel = BetweenFactor<double>; | ||||
| 
 | ||||
|   // Add "motion models".
 | ||||
|   auto motion_noise_model = noiseModel::Isotropic::Sigma(1, between_sigma); | ||||
|   for (size_t k = 0; k < K - 1; k++) { | ||||
|     auto motion_model = boost::make_shared<MotionModel>( | ||||
|         X(k), X(k + 1), discrete_seq.at(k), motion_noise_model); | ||||
|     graph.push_back(motion_model); | ||||
|   } | ||||
|   GaussianFactorGraph::shared_ptr linear_graph = | ||||
|       graph.linearize(linearizationPoint); | ||||
|   return linear_graph; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * @brief Get the discrete sequence from the integer `x`. | ||||
|  * | ||||
|  * @tparam K Template parameter so we can set the correct bitset size. | ||||
|  * @param x The integer to convert to a discrete binary sequence. | ||||
|  * @return std::vector<size_t> | ||||
|  */ | ||||
| template <size_t K> | ||||
| std::vector<size_t> getDiscreteSequence(size_t x) { | ||||
|   std::bitset<K - 1> seq = x; | ||||
|   std::vector<size_t> discrete_seq(K - 1); | ||||
|   for (size_t i = 0; i < K - 1; i++) { | ||||
|     // Save to discrete vector in reverse order
 | ||||
|     discrete_seq[K - 2 - i] = seq[i]; | ||||
|   } | ||||
|   return discrete_seq; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * @brief Helper method to get the tree of unnormalized probabilities | ||||
|  * as per the new elimination scheme. | ||||
|  * | ||||
|  * @param graph The HybridGaussianFactorGraph to eliminate. | ||||
|  * @return AlgebraicDecisionTree<Key> | ||||
|  */ | ||||
| AlgebraicDecisionTree<Key> probPrimeTree( | ||||
|     const HybridGaussianFactorGraph& graph) { | ||||
|   HybridBayesNet::shared_ptr bayesNet; | ||||
|   HybridGaussianFactorGraph::shared_ptr remainingGraph; | ||||
|   Ordering continuous(graph.continuousKeys()); | ||||
|   std::tie(bayesNet, remainingGraph) = | ||||
|       graph.eliminatePartialSequential(continuous); | ||||
| 
 | ||||
|   auto last_conditional = bayesNet->at(bayesNet->size() - 1); | ||||
|   DiscreteKeys discrete_keys = last_conditional->discreteKeys(); | ||||
| 
 | ||||
|   const std::vector<DiscreteValues> assignments = | ||||
|       DiscreteValues::CartesianProduct(discrete_keys); | ||||
| 
 | ||||
|   std::reverse(discrete_keys.begin(), discrete_keys.end()); | ||||
| 
 | ||||
|   vector<VectorValues::shared_ptr> vector_values; | ||||
|   for (const DiscreteValues& assignment : assignments) { | ||||
|     VectorValues values = bayesNet->optimize(assignment); | ||||
|     vector_values.push_back(boost::make_shared<VectorValues>(values)); | ||||
|   } | ||||
|   DecisionTree<Key, VectorValues::shared_ptr> delta_tree(discrete_keys, | ||||
|                                                          vector_values); | ||||
| 
 | ||||
|   std::vector<double> probPrimes; | ||||
|   for (const DiscreteValues& assignment : assignments) { | ||||
|     double error = 0.0; | ||||
|     VectorValues delta = *delta_tree(assignment); | ||||
|     for (auto factor : graph) { | ||||
|       if (factor->isHybrid()) { | ||||
|         auto f = boost::static_pointer_cast<GaussianMixtureFactor>(factor); | ||||
|         error += f->error(delta, assignment); | ||||
| 
 | ||||
|       } else if (factor->isContinuous()) { | ||||
|         auto f = boost::static_pointer_cast<HybridGaussianFactor>(factor); | ||||
|         error += f->inner()->error(delta); | ||||
|       } | ||||
|     } | ||||
|     probPrimes.push_back(exp(-error)); | ||||
|   } | ||||
|   AlgebraicDecisionTree<Key> probPrimeTree(discrete_keys, probPrimes); | ||||
|   return probPrimeTree; | ||||
| } | ||||
| 
 | ||||
| /****************************************************************************/ | ||||
| /**
 | ||||
|  * Test for correctness of different branches of the P'(Continuous | Discrete). | ||||
|  * The values should match those of P'(Continuous) for each discrete mode. | ||||
|  */ | ||||
| TEST(HybridEstimation, Probability) { | ||||
|   constexpr size_t K = 4; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2}; | ||||
|   double between_sigma = 1.0, measurement_sigma = 0.1; | ||||
| 
 | ||||
|   // Switching example of robot moving in 1D with
 | ||||
|   // given measurements and equal mode priors.
 | ||||
|   Switching switching(K, between_sigma, measurement_sigma, measurements, | ||||
|                       "1/1 1/1"); | ||||
|   auto graph = switching.linearizedFactorGraph; | ||||
|   Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); | ||||
| 
 | ||||
|   HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(ordering); | ||||
|   auto discreteConditional = bayesNet->atDiscrete(bayesNet->size() - 3); | ||||
| 
 | ||||
|   HybridValues hybrid_values = bayesNet->optimize(); | ||||
| 
 | ||||
|   // This is the correct sequence as designed
 | ||||
|   DiscreteValues discrete_seq; | ||||
|   discrete_seq[M(0)] = 1; | ||||
|   discrete_seq[M(1)] = 1; | ||||
|   discrete_seq[M(2)] = 0; | ||||
| 
 | ||||
|   EXPECT(assert_equal(discrete_seq, hybrid_values.discrete())); | ||||
| } | ||||
| 
 | ||||
| /****************************************************************************/ | ||||
| /**
 | ||||
|  * Test for correctness of different branches of the P'(Continuous | Discrete) | ||||
|  * in the multi-frontal setting. The values should match those of P'(Continuous) | ||||
|  * for each discrete mode. | ||||
|  */ | ||||
| TEST(HybridEstimation, ProbabilityMultifrontal) { | ||||
|   constexpr size_t K = 4; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2}; | ||||
| 
 | ||||
|   double between_sigma = 1.0, measurement_sigma = 0.1; | ||||
| 
 | ||||
|   // Switching example of robot moving in 1D with given measurements and equal
 | ||||
|   // mode priors.
 | ||||
|   Switching switching(K, between_sigma, measurement_sigma, measurements, | ||||
|                       "1/1 1/1"); | ||||
|   auto graph = switching.linearizedFactorGraph; | ||||
|   Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); | ||||
| 
 | ||||
|   // Get the tree of unnormalized probabilities for each mode sequence.
 | ||||
|   AlgebraicDecisionTree<Key> expected_probPrimeTree = probPrimeTree(graph); | ||||
| 
 | ||||
|   // Eliminate continuous
 | ||||
|   Ordering continuous_ordering(graph.continuousKeys()); | ||||
|   HybridBayesTree::shared_ptr bayesTree; | ||||
|   HybridGaussianFactorGraph::shared_ptr discreteGraph; | ||||
|   std::tie(bayesTree, discreteGraph) = | ||||
|       graph.eliminatePartialMultifrontal(continuous_ordering); | ||||
| 
 | ||||
|   // Get the last continuous conditional which will have all the discrete keys
 | ||||
|   Key last_continuous_key = | ||||
|       continuous_ordering.at(continuous_ordering.size() - 1); | ||||
|   auto last_conditional = (*bayesTree)[last_continuous_key]->conditional(); | ||||
|   DiscreteKeys discrete_keys = last_conditional->discreteKeys(); | ||||
| 
 | ||||
|   Ordering discrete(graph.discreteKeys()); | ||||
|   auto discreteBayesTree = | ||||
|       discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete); | ||||
| 
 | ||||
|   EXPECT_LONGS_EQUAL(1, discreteBayesTree->size()); | ||||
|   // DiscreteBayesTree should have only 1 clique
 | ||||
|   auto discrete_clique = (*discreteBayesTree)[discrete.at(0)]; | ||||
| 
 | ||||
|   std::set<HybridBayesTreeClique::shared_ptr> clique_set; | ||||
|   for (auto node : bayesTree->nodes()) { | ||||
|     clique_set.insert(node.second); | ||||
|   } | ||||
| 
 | ||||
|   // Set the root of the bayes tree as the discrete clique
 | ||||
|   for (auto clique : clique_set) { | ||||
|     if (clique->conditional()->parents() == | ||||
|         discrete_clique->conditional()->frontals()) { | ||||
|       discreteBayesTree->addClique(clique, discrete_clique); | ||||
| 
 | ||||
|     } else { | ||||
|       // Remove the clique from the children of the parents since it will get
 | ||||
|       // added again in addClique.
 | ||||
|       auto clique_it = std::find(clique->parent()->children.begin(), | ||||
|                                  clique->parent()->children.end(), clique); | ||||
|       clique->parent()->children.erase(clique_it); | ||||
|       discreteBayesTree->addClique(clique, clique->parent()); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   HybridValues hybrid_values = discreteBayesTree->optimize(); | ||||
| 
 | ||||
|   // This is the correct sequence as designed
 | ||||
|   DiscreteValues discrete_seq; | ||||
|   discrete_seq[M(0)] = 1; | ||||
|   discrete_seq[M(1)] = 1; | ||||
|   discrete_seq[M(2)] = 0; | ||||
| 
 | ||||
|   EXPECT(assert_equal(discrete_seq, hybrid_values.discrete())); | ||||
| } | ||||
| 
 | ||||
| /*********************************************************************************
 | ||||
|   // Create a hybrid nonlinear factor graph f(x0, x1, m0; z0, z1)
 | ||||
|  ********************************************************************************/ | ||||
| static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { | ||||
|   HybridNonlinearFactorGraph nfg; | ||||
| 
 | ||||
|   constexpr double sigma = 0.5;  // measurement noise
 | ||||
|   const auto noise_model = noiseModel::Isotropic::Sigma(1, sigma); | ||||
| 
 | ||||
|   // Add "measurement" factors:
 | ||||
|   nfg.emplace_nonlinear<PriorFactor<double>>(X(0), 0.0, noise_model); | ||||
|   nfg.emplace_nonlinear<PriorFactor<double>>(X(1), 1.0, noise_model); | ||||
| 
 | ||||
|   // Add mixture factor:
 | ||||
|   DiscreteKey m(M(0), 2); | ||||
|   const auto zero_motion = | ||||
|       boost::make_shared<BetweenFactor<double>>(X(0), X(1), 0, noise_model); | ||||
|   const auto one_motion = | ||||
|       boost::make_shared<BetweenFactor<double>>(X(0), X(1), 1, noise_model); | ||||
|   nfg.emplace_hybrid<MixtureFactor>( | ||||
|       KeyVector{X(0), X(1)}, DiscreteKeys{m}, | ||||
|       std::vector<NonlinearFactor::shared_ptr>{zero_motion, one_motion}); | ||||
| 
 | ||||
|   return nfg; | ||||
| } | ||||
| 
 | ||||
| /*********************************************************************************
 | ||||
|   // Create a hybrid nonlinear factor graph f(x0, x1, m0; z0, z1)
 | ||||
|  ********************************************************************************/ | ||||
| static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { | ||||
|   HybridNonlinearFactorGraph nfg = createHybridNonlinearFactorGraph(); | ||||
| 
 | ||||
|   Values initial; | ||||
|   double z0 = 0.0, z1 = 1.0; | ||||
|   initial.insert<double>(X(0), z0); | ||||
|   initial.insert<double>(X(1), z1); | ||||
|   return nfg.linearize(initial); | ||||
| } | ||||
| 
 | ||||
| /*********************************************************************************
 | ||||
|  * Do hybrid elimination and do regression test on discrete conditional. | ||||
|  ********************************************************************************/ | ||||
| TEST(HybridEstimation, eliminateSequentialRegression) { | ||||
|   // 1. Create the factor graph from the nonlinear factor graph.
 | ||||
|   HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); | ||||
| 
 | ||||
|   // 2. Eliminate into BN
 | ||||
|   const Ordering ordering = fg->getHybridOrdering(); | ||||
|   HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); | ||||
|   // GTSAM_PRINT(*bn);
 | ||||
| 
 | ||||
|   // TODO(dellaert): dc should be discrete conditional on m0, but it is an
 | ||||
|   // unnormalized factor? DiscreteKey m(M(0), 2); DiscreteConditional expected(m
 | ||||
|   // % "0.51341712/1"); auto dc = bn->back()->asDiscreteConditional();
 | ||||
|   // EXPECT(assert_equal(expected, *dc, 1e-9));
 | ||||
| } | ||||
| 
 | ||||
| /*********************************************************************************
 | ||||
|  * Test for correctness via sampling. | ||||
|  * | ||||
|  * Compute the conditional P(x0, m0, x1| z0, z1) | ||||
|  * with measurements z0, z1. To do so, we: | ||||
|  * 1. Start with the corresponding Factor Graph `FG`. | ||||
|  * 2. Eliminate the factor graph into a Bayes Net `BN`. | ||||
|  * 3. Sample from the Bayes Net. | ||||
|  * 4. Check that the ratio `BN(x)/FG(x) = constant` for all samples `x`. | ||||
|  ********************************************************************************/ | ||||
| TEST(HybridEstimation, CorrectnessViaSampling) { | ||||
|   // 1. Create the factor graph from the nonlinear factor graph.
 | ||||
|   HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); | ||||
| 
 | ||||
|   // 2. Eliminate into BN
 | ||||
|   const Ordering ordering = fg->getHybridOrdering(); | ||||
|   HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); | ||||
| 
 | ||||
|   // Set up sampling
 | ||||
|   std::mt19937_64 rng(11); | ||||
| 
 | ||||
|   // 3. Do sampling
 | ||||
|   int num_samples = 10; | ||||
| 
 | ||||
|   // Functor to compute the ratio between the
 | ||||
|   // Bayes net and the factor graph.
 | ||||
|   auto compute_ratio = | ||||
|       [](const HybridBayesNet::shared_ptr& bayesNet, | ||||
|          const HybridGaussianFactorGraph::shared_ptr& factorGraph, | ||||
|          const HybridValues& sample) -> double { | ||||
|     const DiscreteValues assignment = sample.discrete(); | ||||
|     // Compute in log form for numerical stability
 | ||||
|     double log_ratio = bayesNet->error(sample.continuous(), assignment) - | ||||
|                        factorGraph->error(sample.continuous(), assignment); | ||||
|     double ratio = exp(-log_ratio); | ||||
|     return ratio; | ||||
|   }; | ||||
| 
 | ||||
|   // The error evaluated by the factor graph and the Bayes net should differ by
 | ||||
|   // the normalizing term computed via the Bayes net determinant.
 | ||||
|   const HybridValues sample = bn->sample(&rng); | ||||
|   double ratio = compute_ratio(bn, fg, sample); | ||||
|   // regression
 | ||||
|   EXPECT_DOUBLES_EQUAL(1.0, ratio, 1e-9); | ||||
| 
 | ||||
|   // 4. Check that all samples == constant
 | ||||
|   for (size_t i = 0; i < num_samples; i++) { | ||||
|     // Sample from the bayes net
 | ||||
|     const HybridValues sample = bn->sample(&rng); | ||||
| 
 | ||||
|     EXPECT_DOUBLES_EQUAL(ratio, compute_ratio(bn, fg, sample), 1e-9); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| int main() { | ||||
|   TestResult tr; | ||||
|  |  | |||
|  | @ -182,7 +182,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { | |||
|        boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())})); | ||||
| 
 | ||||
|   hfg.add(DecisionTreeFactor(m1, {2, 8})); | ||||
|   hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); | ||||
|   // TODO(Varun) Adding extra discrete variable not connected to continuous
 | ||||
|   // variable throws segfault
 | ||||
|   //  hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
 | ||||
| 
 | ||||
|   HybridBayesTree::shared_ptr result = | ||||
|       hfg.eliminateMultifrontal(hfg.getHybridOrdering()); | ||||
|  | @ -569,6 +571,31 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) { | |||
| 
 | ||||
|   HybridGaussianFactorGraph graph = s.linearizedFactorGraph; | ||||
| 
 | ||||
|   Ordering hybridOrdering = graph.getHybridOrdering(); | ||||
|   HybridBayesNet::shared_ptr hybridBayesNet = | ||||
|       graph.eliminateSequential(hybridOrdering); | ||||
| 
 | ||||
|   HybridValues delta = hybridBayesNet->optimize(); | ||||
|   double error = graph.error(delta.continuous(), delta.discrete()); | ||||
| 
 | ||||
|   double expected_error = 0.490243199; | ||||
|   // regression
 | ||||
|   EXPECT(assert_equal(expected_error, error, 1e-9)); | ||||
| 
 | ||||
|   double probs = exp(-error); | ||||
|   double expected_probs = graph.probPrime(delta.continuous(), delta.discrete()); | ||||
| 
 | ||||
|   // regression
 | ||||
|   EXPECT(assert_equal(expected_probs, probs, 1e-7)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test hybrid gaussian factor graph error and unnormalized probabilities
 | ||||
| TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { | ||||
|   Switching s(3); | ||||
| 
 | ||||
|   HybridGaussianFactorGraph graph = s.linearizedFactorGraph; | ||||
| 
 | ||||
|   Ordering hybridOrdering = graph.getHybridOrdering(); | ||||
|   HybridBayesNet::shared_ptr hybridBayesNet = | ||||
|       graph.eliminateSequential(hybridOrdering); | ||||
|  |  | |||
|  | @ -164,7 +164,8 @@ TEST(HybridGaussianElimination, IncrementalInference) { | |||
|   discrete_ordering += M(0); | ||||
|   discrete_ordering += M(1); | ||||
|   HybridBayesTree::shared_ptr discreteBayesTree = | ||||
|       expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); | ||||
|       expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( | ||||
|           discrete_ordering); | ||||
| 
 | ||||
|   DiscreteValues m00; | ||||
|   m00[M(0)] = 0, m00[M(1)] = 0; | ||||
|  | @ -174,12 +175,12 @@ TEST(HybridGaussianElimination, IncrementalInference) { | |||
| 
 | ||||
|   auto discreteConditional = isam[M(1)]->conditional()->asDiscrete(); | ||||
| 
 | ||||
|   // Test if the probability values are as expected with regression tests.
 | ||||
|   // Test the probability values with regression tests.
 | ||||
|   DiscreteValues assignment; | ||||
|   EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); | ||||
|   EXPECT(assert_equal(0.0619233, m00_prob, 1e-5)); | ||||
|   assignment[M(0)] = 0; | ||||
|   assignment[M(1)] = 0; | ||||
|   EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); | ||||
|   EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); | ||||
|   assignment[M(0)] = 1; | ||||
|   assignment[M(1)] = 0; | ||||
|   EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); | ||||
|  | @ -192,11 +193,15 @@ TEST(HybridGaussianElimination, IncrementalInference) { | |||
| 
 | ||||
|   // Check if the clique conditional generated from incremental elimination
 | ||||
|   // matches that of batch elimination.
 | ||||
|   auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); | ||||
|   auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>( | ||||
|       (*expectedChordal)[M(1)]->conditional()->inner()); | ||||
|   auto expectedChordal = | ||||
|       expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal(); | ||||
|   auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>( | ||||
|       isam[M(1)]->conditional()->inner()); | ||||
|   // Account for the probability terms from evaluating continuous FGs
 | ||||
|   DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; | ||||
|   vector<double> probs = {0.061923317, 0.20415914, 0.18374323, 0.2}; | ||||
|   auto expectedConditional = | ||||
|       boost::make_shared<DecisionTreeFactor>(discrete_keys, probs); | ||||
|   EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); | ||||
| } | ||||
| 
 | ||||
|  |  | |||
|  | @ -385,11 +385,11 @@ TEST(HybridFactorGraph, Partial_Elimination) { | |||
| 
 | ||||
|   auto linearizedFactorGraph = self.linearizedFactorGraph; | ||||
| 
 | ||||
|   // Create ordering.
 | ||||
|   // Create ordering of only continuous variables.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t k = 0; k < self.K; k++) ordering += X(k); | ||||
| 
 | ||||
|   // Eliminate partially.
 | ||||
|   // Eliminate partially i.e. only continuous part.
 | ||||
|   HybridBayesNet::shared_ptr hybridBayesNet; | ||||
|   HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; | ||||
|   std::tie(hybridBayesNet, remainingFactorGraph) = | ||||
|  | @ -439,6 +439,7 @@ TEST(HybridFactorGraph, Full_Elimination) { | |||
|       auto df = dynamic_pointer_cast<HybridDiscreteFactor>(factor); | ||||
|       discrete_fg.push_back(df->inner()); | ||||
|     } | ||||
| 
 | ||||
|     ordering.clear(); | ||||
|     for (size_t k = 0; k < self.K - 1; k++) ordering += M(k); | ||||
|     discreteBayesNet = | ||||
|  | @ -586,7 +587,7 @@ factor 6: Discrete [m1 m0] | |||
|   // Expected output for hybridBayesNet.
 | ||||
|   string expected_hybridBayesNet = R"( | ||||
| size: 3 | ||||
| factor 0: Hybrid  P( x0 | x1 m0) | ||||
| conditional 0: Hybrid  P( x0 | x1 m0) | ||||
|  Discrete Keys = (m0, 2),  | ||||
|  Choice(m0)  | ||||
|  0 Leaf  p(x0 | x1) | ||||
|  | @ -601,7 +602,7 @@ factor 0: Hybrid  P( x0 | x1 m0) | |||
|   d = [ -9.95037 ] | ||||
|   No noise model | ||||
| 
 | ||||
| factor 1: Hybrid  P( x1 | x2 m0 m1) | ||||
| conditional 1: Hybrid  P( x1 | x2 m0 m1) | ||||
|  Discrete Keys = (m0, 2), (m1, 2),  | ||||
|  Choice(m1)  | ||||
|  0 Choice(m0)  | ||||
|  | @ -630,7 +631,7 @@ factor 1: Hybrid  P( x1 | x2 m0 m1) | |||
|   d = [ -10 ] | ||||
|   No noise model | ||||
| 
 | ||||
| factor 2: Hybrid  P( x2 | m0 m1) | ||||
| conditional 2: Hybrid  P( x2 | m0 m1) | ||||
|  Discrete Keys = (m0, 2), (m1, 2),  | ||||
|  Choice(m1)  | ||||
|  0 Choice(m0)  | ||||
|  |  | |||
|  | @ -152,7 +152,8 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
|   HybridBayesTree::shared_ptr expectedHybridBayesTree; | ||||
|   HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; | ||||
|   std::tie(expectedHybridBayesTree, expectedRemainingGraph) = | ||||
|       switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); | ||||
|       switching.linearizedFactorGraph | ||||
|           .BaseEliminateable::eliminatePartialMultifrontal(ordering); | ||||
| 
 | ||||
|   // The densities on X(1) should be the same
 | ||||
|   auto x0_conditional = dynamic_pointer_cast<GaussianMixture>( | ||||
|  | @ -181,7 +182,8 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
|   discrete_ordering += M(0); | ||||
|   discrete_ordering += M(1); | ||||
|   HybridBayesTree::shared_ptr discreteBayesTree = | ||||
|       expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); | ||||
|       expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( | ||||
|           discrete_ordering); | ||||
| 
 | ||||
|   DiscreteValues m00; | ||||
|   m00[M(0)] = 0, m00[M(1)] = 0; | ||||
|  | @ -192,12 +194,12 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
|   auto discreteConditional = | ||||
|       bayesTree[M(1)]->conditional()->asDiscrete(); | ||||
| 
 | ||||
|   // Test if the probability values are as expected with regression tests.
 | ||||
|   // Test the probability values with regression tests.
 | ||||
|   DiscreteValues assignment; | ||||
|   EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); | ||||
|   EXPECT(assert_equal(0.0619233, m00_prob, 1e-5)); | ||||
|   assignment[M(0)] = 0; | ||||
|   assignment[M(1)] = 0; | ||||
|   EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); | ||||
|   EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); | ||||
|   assignment[M(0)] = 1; | ||||
|   assignment[M(1)] = 0; | ||||
|   EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); | ||||
|  | @ -211,10 +213,13 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
|   // Check if the clique conditional generated from incremental elimination
 | ||||
|   // matches that of batch elimination.
 | ||||
|   auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); | ||||
|   auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>( | ||||
|       (*expectedChordal)[M(1)]->conditional()->inner()); | ||||
|   auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>( | ||||
|       bayesTree[M(1)]->conditional()->inner()); | ||||
|   // Account for the probability terms from evaluating continuous FGs
 | ||||
|   DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; | ||||
|   vector<double> probs = {0.061923317, 0.20415914, 0.18374323, 0.2}; | ||||
|   auto expectedConditional = | ||||
|       boost::make_shared<DecisionTreeFactor>(discrete_keys, probs); | ||||
|   EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); | ||||
| } | ||||
| 
 | ||||
|  | @ -249,7 +254,8 @@ TEST(HybridNonlinearISAM, Approx_inference) { | |||
|   HybridBayesTree::shared_ptr unprunedHybridBayesTree; | ||||
|   HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; | ||||
|   std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) = | ||||
|       switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); | ||||
|       switching.linearizedFactorGraph | ||||
|           .BaseEliminateable::eliminatePartialMultifrontal(ordering); | ||||
| 
 | ||||
|   size_t maxNrLeaves = 5; | ||||
|   incrementalHybrid.update(graph1, initial); | ||||
|  |  | |||
|  | @ -31,7 +31,14 @@ namespace gtsam { | |||
| template <class CONDITIONAL> | ||||
| void BayesNet<CONDITIONAL>::print(const std::string& s, | ||||
|                                   const KeyFormatter& formatter) const { | ||||
|   Base::print(s, formatter); | ||||
|   std::cout << (s.empty() ? "" : s + " ") << std::endl; | ||||
|   std::cout << "size: " << this->size() << std::endl; | ||||
|   for (size_t i = 0; i < this->size(); i++) { | ||||
|     const auto& conditional = this->at(i); | ||||
|     std::stringstream ss; | ||||
|     ss << "conditional " << i << ": "; | ||||
|     if (conditional) conditional->print(ss.str(), formatter); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
|  |  | |||
|  | @ -73,7 +73,7 @@ public: | |||
|   /**
 | ||||
|    * @brief Append new keys to the ordering as `ordering += keys`. | ||||
|    * | ||||
|    * @param key | ||||
|    * @param keys The key vector to append to this ordering. | ||||
|    * @return The ordering variable with appended keys. | ||||
|    */ | ||||
|   This& operator+=(KeyVector& keys); | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue