From 08393314ac380ece3573c51f3c1a4fc1fb09c8a3 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 25 Oct 2022 12:35:29 -0400 Subject: [PATCH 01/51] minor typo fixes --- gtsam/hybrid/tests/Switching.h | 2 +- gtsam/hybrid/tests/testHybridBayesTree.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index f9e1916d0..a28db4894 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -147,7 +147,7 @@ struct Switching { } // Create hybrid factor graph. - // Add a prior on X(1). + // Add a prior on X(0). auto prior = boost::make_shared>( X(0), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma)); nonlinearFactorGraph.push_nonlinear(prior); diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index 876c550cb..5de21322c 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -105,7 +105,7 @@ TEST(HybridBayesTree, Optimize) { graph1.push_back(s.linearizedFactorGraph.at(i)); } - // Add the Gaussian factors, 1 prior on X(1), + // Add the Gaussian factors, 1 prior on X(0), // 3 measurements on X(2), X(3), X(4) graph1.push_back(s.linearizedFactorGraph.at(0)); for (size_t i = 4; i <= 6; i++) { From 7dec7bb00d8572f0d964f2611ed2cde1ce6d89f1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 25 Oct 2022 12:36:15 -0400 Subject: [PATCH 02/51] remove if guards and add timing counters --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 32 ++++++++++++++++------ 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 041603fbd..9264089aa 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -214,24 +214,35 @@ hybridElimination(const HybridGaussianFactorGraph &factors, if (graph.empty()) { return {nullptr, nullptr}; } + +#ifdef HYBRID_TIMING + gttic_(hybrid_eliminate); +#endif + std::pair, boost::shared_ptr> result = EliminatePreferCholesky(graph, frontalKeys); - if (keysOfEliminated.empty()) { - // Initialize the keysOfEliminated to be the keys of the - // eliminated GaussianConditional - keysOfEliminated = result.first->keys(); - } - if (keysOfSeparator.empty()) { - keysOfSeparator = result.second->keys(); - } + // Initialize the keysOfEliminated to be the keys of the + // eliminated GaussianConditional + keysOfEliminated = result.first->keys(); + keysOfSeparator = result.second->keys(); + +#ifdef HYBRID_TIMING + gttoc_(hybrid_eliminate); +#endif + return result; }; // Perform elimination! DecisionTree eliminationResults(sum, eliminate); +#ifdef HYBRID_TIMING + tictoc_print_(); + tictoc_reset_(); +#endif + // Separate out decision tree into conditionals and remaining factors. auto pair = unzip(eliminationResults); @@ -244,6 +255,8 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // If there are no more continuous parents, then we should create here a // DiscreteFactor, with the error for each discrete choice. if (keysOfSeparator.empty()) { + // TODO(Varun) Use the math from the iMHS_Math-1-indexed document + // TODO(Varun) The prob of a leaf should be computed from the full Bayes Net VectorValues empty_values; auto factorError = [&](const GaussianFactor::shared_ptr &factor) { if (!factor) return 0.0; // TODO(fan): does this make sense? @@ -383,6 +396,9 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, return discreteElimination(factors, frontalKeys); } +#ifdef HYBRID_TIMING + tictoc_reset_(); +#endif // Case 3: We are now in the hybrid land! return hybridElimination(factors, frontalKeys, continuousSeparator, discreteSeparatorSet); From dcdcf30f529ade6cd825f6b156d001c7c762f873 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 25 Oct 2022 12:36:48 -0400 Subject: [PATCH 03/51] new WIP test to check the discrete probabilities after elimination --- gtsam/hybrid/tests/testHybridEstimation.cpp | 89 ++++++++++++++++++++- 1 file changed, 87 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 6be7566ae..d786528d3 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -71,7 +71,7 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, /****************************************************************************/ // Test approximate inference with an additional pruning step. -TEST(HybridNonlinearISAM, Incremental) { +TEST(HybridEstimation, Incremental) { size_t K = 10; std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6}; // Ground truth discrete seq @@ -90,7 +90,6 @@ TEST(HybridNonlinearISAM, Incremental) { HybridGaussianFactorGraph bayesNet; for (size_t k = 1; k < K; k++) { - std::cout << ">>>>>>>>>>>>>>>>>>> k=" << k << std::endl; // Motion Model graph.push_back(switching.nonlinearFactorGraph.at(k)); // Measurement @@ -122,6 +121,92 @@ TEST(HybridNonlinearISAM, Incremental) { EXPECT(assert_equal(expected_continuous, result)); } +/** + * @brief A function to get a specific 1D robot motion problem as a linearized + * factor graph. This is the problem P(X|Z, M), i.e. estimating the continuous + * positions given the measurements and discrete sequence. + * + * @param K The number of timesteps. + * @param measurements The vector of measurements for each timestep. + * @param discrete_seq The discrete sequence governing the motion of the robot. + * @param measurement_sigma Noise model sigma for measurements. + * @param between_sigma Noise model sigma for the between factor. + * @return GaussianFactorGraph::shared_ptr + */ +GaussianFactorGraph::shared_ptr specificProblem( + size_t K, const std::vector& measurements, + const std::vector& discrete_seq, double measurement_sigma = 0.1, + double between_sigma = 1.0) { + NonlinearFactorGraph graph; + Values linearizationPoint; + + // Add measurement factors + auto measurement_noise = noiseModel::Isotropic::Sigma(1, measurement_sigma); + for (size_t k = 0; k < K; k++) { + graph.emplace_shared>(X(k), measurements.at(k), + measurement_noise); + linearizationPoint.insert(X(k), static_cast(k + 1)); + } + + using MotionModel = BetweenFactor; + + // Add "motion models". + auto motion_noise_model = noiseModel::Isotropic::Sigma(1, between_sigma); + for (size_t k = 0; k < K - 1; k++) { + auto motion_model = boost::make_shared( + X(k), X(k + 1), discrete_seq.at(k), motion_noise_model); + graph.push_back(motion_model); + } + GaussianFactorGraph::shared_ptr linear_graph = + graph.linearize(linearizationPoint); + return linear_graph; +} + +/** + * @brief Get the discrete sequence from the integer `x`. + * + * @tparam K Template parameter so we can set the correct bitset size. + * @param x The integer to convert to a discrete binary sequence. + * @return std::vector + */ +template +std::vector getDiscreteSequence(size_t x) { + std::bitset seq = x; + std::vector discrete_seq(K - 1); + for (size_t i = 0; i < K - 1; i++) { + // Save to discrete vector in reverse order + discrete_seq[K - 2 - i] = seq[i]; + } + return discrete_seq; +} + +TEST(HybridEstimation, Probability) { + constexpr size_t K = 4; + std::vector measurements = {0, 1, 2, 2}; + + // This is the correct sequence + // std::vector discrete_seq = {1, 1, 0}; + + double between_sigma = 1.0, measurement_sigma = 0.1; + + for (size_t i = 0; i < pow(2, K - 1); i++) { + std::vector discrete_seq = getDiscreteSequence(i); + + GaussianFactorGraph::shared_ptr linear_graph = specificProblem( + K, measurements, discrete_seq, measurement_sigma, between_sigma); + + auto bayes_net = linear_graph->eliminateSequential(); + // graph.print(); + // bayes_net->print(); + VectorValues values = bayes_net->optimize(); + std::cout << i << " : " << linear_graph->probPrime(values) << std::endl; + } + // std::cout << linear_graph->error(values) << std::endl; + // // values.at(); + + // // linearizationPoint.retract(values).print(); +} + /* ************************************************************************* */ int main() { TestResult tr; From 1789bb74fe045b45e20e480a06053231b7e68ae7 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 25 Oct 2022 14:02:36 -0400 Subject: [PATCH 04/51] showing difference in computed probabilities --- gtsam/hybrid/tests/Switching.h | 1 + gtsam/hybrid/tests/testHybridEstimation.cpp | 10 +++++++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index a28db4894..76721edc4 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -130,6 +130,7 @@ struct Switching { * @param K The total number of timesteps. * @param between_sigma The stddev between poses. * @param prior_sigma The stddev on priors (also used for measurements). + * @param measurements Vector of measurements for each timestep. */ Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1, std::vector measurements = {}) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index d786528d3..9104ad584 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -201,10 +201,14 @@ TEST(HybridEstimation, Probability) { VectorValues values = bayes_net->optimize(); std::cout << i << " : " << linear_graph->probPrime(values) << std::endl; } - // std::cout << linear_graph->error(values) << std::endl; - // // values.at(); - // // linearizationPoint.retract(values).print(); + Switching switching(K, between_sigma, measurement_sigma, measurements); + auto graph = switching.linearizedFactorGraph; + Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); + HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(ordering); + const DecisionTreeFactor::shared_ptr decisionTree = + bayesNet->discreteConditionals(); + decisionTree->print(); } /* ************************************************************************* */ From 96afdffae86fab3fcd4af37e8f14bd75628fc4ff Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 31 Oct 2022 23:32:52 -0400 Subject: [PATCH 05/51] Increase the number of time steps for incremental test case --- gtsam/hybrid/tests/testHybridEstimation.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 9104ad584..7d276094e 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -72,12 +72,11 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, /****************************************************************************/ // Test approximate inference with an additional pruning step. TEST(HybridEstimation, Incremental) { - size_t K = 10; - std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6}; + size_t K = 15; + std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; // Ground truth discrete seq - std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0}; + std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; Switching switching(K, 1.0, 0.1, measurements); - // HybridNonlinearISAM smoother; HybridSmoother smoother; HybridNonlinearFactorGraph graph; Values initial; From a6d1a5747810f78d516ec53c026716a6398b39bb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Nov 2022 16:10:48 -0500 Subject: [PATCH 06/51] fix hybrid elimination --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 75 ++++++++++++++++++++-- gtsam/hybrid/HybridGaussianFactorGraph.h | 2 + 2 files changed, 72 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index d0d2b8d15..86d74ca22 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -51,6 +51,8 @@ #include #include +// #define HYBRID_TIMING + namespace gtsam { template class EliminateableFactorGraph; @@ -256,13 +258,15 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // DiscreteFactor, with the error for each discrete choice. if (keysOfSeparator.empty()) { // TODO(Varun) Use the math from the iMHS_Math-1-indexed document - // TODO(Varun) The prob of a leaf should be computed from the full Bayes Net VectorValues empty_values; - auto factorError = [&](const GaussianFactor::shared_ptr &factor) { - if (!factor) return 0.0; // TODO(fan): does this make sense? - return exp(-factor->error(empty_values)); + auto factorProb = [&](const GaussianFactor::shared_ptr &factor) { + if (!factor) { + return 0.0; // If nullptr, return 0.0 probability + } else { + return 1.0; + } }; - DecisionTree fdt(separatorFactors, factorError); + DecisionTree fdt(separatorFactors, factorProb); auto discreteFactor = boost::make_shared(discreteSeparator, fdt); @@ -488,4 +492,65 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::probPrime( return prob_tree; } +/* ************************************************************************ */ +boost::shared_ptr +HybridGaussianFactorGraph::eliminateHybridSequential() const { + Ordering continuous_ordering(this->continuousKeys()), + discrete_ordering(this->discreteKeys()); + + // Eliminate continuous + HybridBayesNet::shared_ptr bayesNet; + HybridGaussianFactorGraph::shared_ptr discreteGraph; + std::tie(bayesNet, discreteGraph) = + BaseEliminateable::eliminatePartialSequential( + continuous_ordering, EliminationTraitsType::DefaultEliminate); + + // Get the last continuous conditional which will have all the discrete keys + auto last_conditional = bayesNet->at(bayesNet->size() - 1); + // Get all the discrete assignments + DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + const std::vector assignments = + DiscreteValues::CartesianProduct(discrete_keys); + + // Reverse discrete keys order for correct tree construction + std::reverse(discrete_keys.begin(), discrete_keys.end()); + + // Create a decision tree of all the different VectorValues + std::vector vector_values; + for (const DiscreteValues &assignment : assignments) { + VectorValues values = bayesNet->optimize(assignment); + vector_values.push_back(boost::make_shared(values)); + } + DecisionTree delta_tree(discrete_keys, + vector_values); + + // Get the probPrime tree with the correct leaf probabilities + std::vector probPrimes; + for (const DiscreteValues &assignment : assignments) { + double error = 0.0; + VectorValues delta = *delta_tree(assignment); + for (auto factor : *this) { + if (factor->isHybrid()) { + auto f = boost::static_pointer_cast(factor); + error += f->error(delta, assignment); + + } else if (factor->isContinuous()) { + auto f = boost::static_pointer_cast(factor); + error += f->inner()->error(delta); + } + } + probPrimes.push_back(exp(-error)); + } + AlgebraicDecisionTree probPrimeTree(discrete_keys, probPrimes); + discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + + // Perform discrete elimination + HybridBayesNet::shared_ptr discreteBayesNet = + discreteGraph->eliminateSequential( + discrete_ordering, EliminationTraitsType::DefaultEliminate); + bayesNet->add(*discreteBayesNet); + + return bayesNet; +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index c7e9aa60d..21a5740db 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -190,6 +190,8 @@ class GTSAM_EXPORT HybridGaussianFactorGraph AlgebraicDecisionTree probPrime( const VectorValues& continuousValues) const; + boost::shared_ptr eliminateHybridSequential() const; + /** * @brief Return a Colamd constrained ordering where the discrete keys are * eliminated after the continuous keys. From 2f2f8c9ed513bbce011b90829d87b7af16737dbc Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Nov 2022 16:43:02 -0500 Subject: [PATCH 07/51] figured out how to get the correct continuous errors --- gtsam/hybrid/tests/testHybridEstimation.cpp | 159 ++++++++++++++------ 1 file changed, 111 insertions(+), 48 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 7d276094e..31325efe4 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -69,56 +69,57 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, return ordering; } -/****************************************************************************/ -// Test approximate inference with an additional pruning step. -TEST(HybridEstimation, Incremental) { - size_t K = 15; - std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; - // Ground truth discrete seq - std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; - Switching switching(K, 1.0, 0.1, measurements); - HybridSmoother smoother; - HybridNonlinearFactorGraph graph; - Values initial; +// /****************************************************************************/ +// // Test approximate inference with an additional pruning step. +// TEST(HybridEstimation, Incremental) { +// size_t K = 15; +// std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, +// 9, 9, 9, 10, 11, 11, 11, 11}; +// // Ground truth discrete seq +// std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, +// 0, 0, 1, 1, 0, 0, 0}; Switching switching(K, 1.0, 0.1, measurements); +// HybridSmoother smoother; +// HybridNonlinearFactorGraph graph; +// Values initial; - // Add the X(0) prior - graph.push_back(switching.nonlinearFactorGraph.at(0)); - initial.insert(X(0), switching.linearizationPoint.at(X(0))); +// // Add the X(0) prior +// graph.push_back(switching.nonlinearFactorGraph.at(0)); +// initial.insert(X(0), switching.linearizationPoint.at(X(0))); - HybridGaussianFactorGraph linearized; - HybridGaussianFactorGraph bayesNet; +// HybridGaussianFactorGraph linearized; +// HybridGaussianFactorGraph bayesNet; - for (size_t k = 1; k < K; k++) { - // Motion Model - graph.push_back(switching.nonlinearFactorGraph.at(k)); - // Measurement - graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1)); +// for (size_t k = 1; k < K; k++) { +// // Motion Model +// graph.push_back(switching.nonlinearFactorGraph.at(k)); +// // Measurement +// graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1)); - initial.insert(X(k), switching.linearizationPoint.at(X(k))); +// initial.insert(X(k), switching.linearizationPoint.at(X(k))); - bayesNet = smoother.hybridBayesNet(); - linearized = *graph.linearize(initial); - Ordering ordering = getOrdering(bayesNet, linearized); +// bayesNet = smoother.hybridBayesNet(); +// linearized = *graph.linearize(initial); +// Ordering ordering = getOrdering(bayesNet, linearized); - smoother.update(linearized, ordering, 3); - graph.resize(0); - } - HybridValues delta = smoother.hybridBayesNet().optimize(); +// smoother.update(linearized, ordering, 3); +// graph.resize(0); +// } +// HybridValues delta = smoother.hybridBayesNet().optimize(); - Values result = initial.retract(delta.continuous()); +// Values result = initial.retract(delta.continuous()); - DiscreteValues expected_discrete; - for (size_t k = 0; k < K - 1; k++) { - expected_discrete[M(k)] = discrete_seq[k]; - } - EXPECT(assert_equal(expected_discrete, delta.discrete())); +// DiscreteValues expected_discrete; +// for (size_t k = 0; k < K - 1; k++) { +// expected_discrete[M(k)] = discrete_seq[k]; +// } +// EXPECT(assert_equal(expected_discrete, delta.discrete())); - Values expected_continuous; - for (size_t k = 0; k < K; k++) { - expected_continuous.insert(X(k), measurements[k]); - } - EXPECT(assert_equal(expected_continuous, result)); -} +// Values expected_continuous; +// for (size_t k = 0; k < K; k++) { +// expected_continuous.insert(X(k), measurements[k]); +// } +// EXPECT(assert_equal(expected_continuous, result)); +// } /** * @brief A function to get a specific 1D robot motion problem as a linearized @@ -188,6 +189,7 @@ TEST(HybridEstimation, Probability) { double between_sigma = 1.0, measurement_sigma = 0.1; + std::vector expected_errors, expected_prob_primes; for (size_t i = 0; i < pow(2, K - 1); i++) { std::vector discrete_seq = getDiscreteSequence(i); @@ -195,19 +197,80 @@ TEST(HybridEstimation, Probability) { K, measurements, discrete_seq, measurement_sigma, between_sigma); auto bayes_net = linear_graph->eliminateSequential(); - // graph.print(); - // bayes_net->print(); + VectorValues values = bayes_net->optimize(); - std::cout << i << " : " << linear_graph->probPrime(values) << std::endl; + + expected_errors.push_back(linear_graph->error(values)); + expected_prob_primes.push_back(linear_graph->probPrime(values)); + std::cout << i << " : " << expected_errors.at(i) << "\t|\t" + << expected_prob_primes.at(i) << std::endl; } + // std::vector discrete_seq = getDiscreteSequence(0); + // GaussianFactorGraph::shared_ptr linear_graph = specificProblem( + // K, measurements, discrete_seq, measurement_sigma, between_sigma); + // auto bayes_net = linear_graph->eliminateSequential(); + // VectorValues values = bayes_net->optimize(); + // std::cout << "Total NLFG Error: " << linear_graph->error(values) << std::endl; + // std::cout << "===============" << std::endl; + Switching switching(K, between_sigma, measurement_sigma, measurements); auto graph = switching.linearizedFactorGraph; Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); - HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(ordering); - const DecisionTreeFactor::shared_ptr decisionTree = - bayesNet->discreteConditionals(); - decisionTree->print(); + + HybridBayesNet::shared_ptr bayesNet; + HybridGaussianFactorGraph::shared_ptr remainingGraph; + Ordering continuous(graph.continuousKeys()); + std::tie(bayesNet, remainingGraph) = + graph.eliminatePartialSequential(continuous); + + auto last_conditional = bayesNet->at(bayesNet->size() - 1); + DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + const std::vector assignments = + DiscreteValues::CartesianProduct(discrete_keys); + + vector vector_values; + for (const DiscreteValues& assignment : assignments) { + VectorValues values = bayesNet->optimize(assignment); + vector_values.push_back(boost::make_shared(values)); + } + std::reverse(discrete_keys.begin(), discrete_keys.end()); + DecisionTree delta_tree(discrete_keys, + vector_values); + + vector probPrimes; + for (const DiscreteValues& assignment : assignments) { + double error = 0.0; + VectorValues delta = *delta_tree(assignment); + for (auto factor : graph) { + if (factor->isHybrid()) { + auto f = boost::static_pointer_cast(factor); + error += f->error(delta, assignment); + + } else if (factor->isContinuous()) { + auto f = boost::static_pointer_cast(factor); + error += f->inner()->error(delta); + } + } + // std::cout << "\n" << std::endl; + // assignment.print(); + // std::cout << error << " | " << exp(-error) << std::endl; + probPrimes.push_back(exp(-error)); + } + AlgebraicDecisionTree expected_probPrimeTree(discrete_keys, probPrimes); + expected_probPrimeTree.print("", DefaultKeyFormatter); + + // remainingGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + + // Ordering discrete(graph.discreteKeys()); + // // remainingGraph->print("remainingGraph"); + // // discrete.print(); + // auto discreteBayesNet = remainingGraph->eliminateSequential(discrete); + // bayesNet->add(*discreteBayesNet); + // // bayesNet->print(); + + // HybridValues hybrid_values = bayesNet->optimize(); + // hybrid_values.discrete().print(); } /* ************************************************************************* */ From 98febf2f0cfbaf1e8138a938f620cab1f997bf61 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Nov 2022 18:24:21 -0500 Subject: [PATCH 08/51] allow optional discrete transition probability for Switching test fixture --- gtsam/hybrid/tests/Switching.h | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 76721edc4..4bf1678be 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -133,7 +133,8 @@ struct Switching { * @param measurements Vector of measurements for each timestep. */ Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1, - std::vector measurements = {}) + std::vector measurements = {}, + std::string discrete_transition_prob = "1/2 3/2") : K(K) { // Create DiscreteKeys for binary K modes. for (size_t k = 0; k < K; k++) { @@ -173,7 +174,7 @@ struct Switching { } // Add "mode chain" - addModeChain(&nonlinearFactorGraph); + addModeChain(&nonlinearFactorGraph, discrete_transition_prob); // Create the linearization point. for (size_t k = 0; k < K; k++) { @@ -202,13 +203,14 @@ struct Switching { * * @param fg The nonlinear factor graph to which the mode chain is added. */ - void addModeChain(HybridNonlinearFactorGraph *fg) { + void addModeChain(HybridNonlinearFactorGraph *fg, + std::string discrete_transition_prob = "1/2 3/2") { auto prior = boost::make_shared(modes[0], "1/1"); fg->push_discrete(prior); for (size_t k = 0; k < K - 2; k++) { auto parents = {modes[k]}; auto conditional = boost::make_shared( - modes[k + 1], parents, "1/2 3/2"); + modes[k + 1], parents, discrete_transition_prob); fg->push_discrete(conditional); } } @@ -219,13 +221,14 @@ struct Switching { * * @param fg The gaussian factor graph to which the mode chain is added. */ - void addModeChain(HybridGaussianFactorGraph *fg) { + void addModeChain(HybridGaussianFactorGraph *fg, + std::string discrete_transition_prob = "1/2 3/2") { auto prior = boost::make_shared(modes[0], "1/1"); fg->push_discrete(prior); for (size_t k = 0; k < K - 2; k++) { auto parents = {modes[k]}; auto conditional = boost::make_shared( - modes[k + 1], parents, "1/2 3/2"); + modes[k + 1], parents, discrete_transition_prob); fg->push_discrete(conditional); } } From 610a535b300e66df81971127bb16e0e5cbe2c925 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Nov 2022 18:25:23 -0500 Subject: [PATCH 09/51] set up unit test to verify that the probPrimeTree has the same values as individual factor graphs --- gtsam/hybrid/tests/testHybridEstimation.cpp | 202 ++++++++++++-------- 1 file changed, 120 insertions(+), 82 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 31325efe4..82ce3bf9d 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -69,57 +69,63 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, return ordering; } -// /****************************************************************************/ -// // Test approximate inference with an additional pruning step. -// TEST(HybridEstimation, Incremental) { -// size_t K = 15; -// std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, -// 9, 9, 9, 10, 11, 11, 11, 11}; -// // Ground truth discrete seq -// std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, -// 0, 0, 1, 1, 0, 0, 0}; Switching switching(K, 1.0, 0.1, measurements); -// HybridSmoother smoother; -// HybridNonlinearFactorGraph graph; -// Values initial; +/****************************************************************************/ +// Test approximate inference with an additional pruning step. +TEST(HybridEstimation, Incremental) { + // size_t K = 15; + // std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, + // 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; + // // Ground truth discrete seq + // std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, + // 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; + size_t K = 4; + std::vector measurements = {0, 1, 2, 2}; + // Ground truth discrete seq + std::vector discrete_seq = {1, 1, 0}; + Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); + HybridSmoother smoother; + HybridNonlinearFactorGraph graph; + Values initial; -// // Add the X(0) prior -// graph.push_back(switching.nonlinearFactorGraph.at(0)); -// initial.insert(X(0), switching.linearizationPoint.at(X(0))); + // Add the X(0) prior + graph.push_back(switching.nonlinearFactorGraph.at(0)); + initial.insert(X(0), switching.linearizationPoint.at(X(0))); -// HybridGaussianFactorGraph linearized; -// HybridGaussianFactorGraph bayesNet; + HybridGaussianFactorGraph linearized; + HybridGaussianFactorGraph bayesNet; -// for (size_t k = 1; k < K; k++) { -// // Motion Model -// graph.push_back(switching.nonlinearFactorGraph.at(k)); -// // Measurement -// graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1)); + for (size_t k = 1; k < K; k++) { + // Motion Model + graph.push_back(switching.nonlinearFactorGraph.at(k)); + // Measurement + graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1)); -// initial.insert(X(k), switching.linearizationPoint.at(X(k))); + initial.insert(X(k), switching.linearizationPoint.at(X(k))); -// bayesNet = smoother.hybridBayesNet(); -// linearized = *graph.linearize(initial); -// Ordering ordering = getOrdering(bayesNet, linearized); + bayesNet = smoother.hybridBayesNet(); + linearized = *graph.linearize(initial); + Ordering ordering = getOrdering(bayesNet, linearized); -// smoother.update(linearized, ordering, 3); -// graph.resize(0); -// } -// HybridValues delta = smoother.hybridBayesNet().optimize(); + smoother.update(linearized, ordering, 3); + graph.resize(0); + } -// Values result = initial.retract(delta.continuous()); + HybridValues delta = smoother.hybridBayesNet().optimize(); -// DiscreteValues expected_discrete; -// for (size_t k = 0; k < K - 1; k++) { -// expected_discrete[M(k)] = discrete_seq[k]; -// } -// EXPECT(assert_equal(expected_discrete, delta.discrete())); + Values result = initial.retract(delta.continuous()); -// Values expected_continuous; -// for (size_t k = 0; k < K; k++) { -// expected_continuous.insert(X(k), measurements[k]); -// } -// EXPECT(assert_equal(expected_continuous, result)); -// } + DiscreteValues expected_discrete; + for (size_t k = 0; k < K - 1; k++) { + expected_discrete[M(k)] = discrete_seq[k]; + } + EXPECT(assert_equal(expected_discrete, delta.discrete())); + + Values expected_continuous; + for (size_t k = 0; k < K; k++) { + expected_continuous.insert(X(k), measurements[k]); + } + EXPECT(assert_equal(expected_continuous, result)); +} /** * @brief A function to get a specific 1D robot motion problem as a linearized @@ -180,6 +186,50 @@ std::vector getDiscreteSequence(size_t x) { return discrete_seq; } +AlgebraicDecisionTree probPrimeTree( + const HybridGaussianFactorGraph& graph) { + HybridBayesNet::shared_ptr bayesNet; + HybridGaussianFactorGraph::shared_ptr remainingGraph; + Ordering continuous(graph.continuousKeys()); + std::tie(bayesNet, remainingGraph) = + graph.eliminatePartialSequential(continuous); + + auto last_conditional = bayesNet->at(bayesNet->size() - 1); + DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + + const std::vector assignments = + DiscreteValues::CartesianProduct(discrete_keys); + + std::reverse(discrete_keys.begin(), discrete_keys.end()); + + vector vector_values; + for (const DiscreteValues& assignment : assignments) { + VectorValues values = bayesNet->optimize(assignment); + vector_values.push_back(boost::make_shared(values)); + } + DecisionTree delta_tree(discrete_keys, + vector_values); + + std::vector probPrimes; + for (const DiscreteValues& assignment : assignments) { + double error = 0.0; + VectorValues delta = *delta_tree(assignment); + for (auto factor : graph) { + if (factor->isHybrid()) { + auto f = boost::static_pointer_cast(factor); + error += f->error(delta, assignment); + + } else if (factor->isContinuous()) { + auto f = boost::static_pointer_cast(factor); + error += f->inner()->error(delta); + } + } + probPrimes.push_back(exp(-error)); + } + AlgebraicDecisionTree probPrimeTree(discrete_keys, probPrimes); + return probPrimeTree; +} + TEST(HybridEstimation, Probability) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; @@ -202,63 +252,51 @@ TEST(HybridEstimation, Probability) { expected_errors.push_back(linear_graph->error(values)); expected_prob_primes.push_back(linear_graph->probPrime(values)); - std::cout << i << " : " << expected_errors.at(i) << "\t|\t" - << expected_prob_primes.at(i) << std::endl; } - // std::vector discrete_seq = getDiscreteSequence(0); - // GaussianFactorGraph::shared_ptr linear_graph = specificProblem( - // K, measurements, discrete_seq, measurement_sigma, between_sigma); - // auto bayes_net = linear_graph->eliminateSequential(); - // VectorValues values = bayes_net->optimize(); - // std::cout << "Total NLFG Error: " << linear_graph->error(values) << std::endl; - // std::cout << "===============" << std::endl; - Switching switching(K, between_sigma, measurement_sigma, measurements); auto graph = switching.linearizedFactorGraph; Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); - HybridBayesNet::shared_ptr bayesNet; - HybridGaussianFactorGraph::shared_ptr remainingGraph; - Ordering continuous(graph.continuousKeys()); - std::tie(bayesNet, remainingGraph) = - graph.eliminatePartialSequential(continuous); + AlgebraicDecisionTree expected_probPrimeTree = probPrimeTree(graph); + // Eliminate continuous + Ordering continuous_ordering(graph.continuousKeys()); + HybridBayesNet::shared_ptr bayesNet; + HybridGaussianFactorGraph::shared_ptr discreteGraph; + std::tie(bayesNet, discreteGraph) = + graph.eliminatePartialSequential(continuous_ordering); + + // Get the last continuous conditional which will have all the discrete keys auto last_conditional = bayesNet->at(bayesNet->size() - 1); DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + const std::vector assignments = DiscreteValues::CartesianProduct(discrete_keys); - vector vector_values; - for (const DiscreteValues& assignment : assignments) { - VectorValues values = bayesNet->optimize(assignment); - vector_values.push_back(boost::make_shared(values)); - } + // Reverse discrete keys order for correct tree construction std::reverse(discrete_keys.begin(), discrete_keys.end()); - DecisionTree delta_tree(discrete_keys, - vector_values); - vector probPrimes; - for (const DiscreteValues& assignment : assignments) { - double error = 0.0; - VectorValues delta = *delta_tree(assignment); - for (auto factor : graph) { - if (factor->isHybrid()) { - auto f = boost::static_pointer_cast(factor); - error += f->error(delta, assignment); + // Create a decision tree of all the different VectorValues + DecisionTree delta_tree = + graph.continuousDelta(discrete_keys, bayesNet, assignments); - } else if (factor->isContinuous()) { - auto f = boost::static_pointer_cast(factor); - error += f->inner()->error(delta); - } + AlgebraicDecisionTree probPrimeTree = + graph.continuousProbPrimes(discrete_keys, bayesNet, assignments); + + EXPECT(assert_equal(expected_probPrimeTree, probPrimeTree)); + + // Test if the probPrimeTree matches the probability of + // the individual factor graphs + for (size_t i = 0; i < pow(2, K - 1); i++) { + std::vector discrete_seq = getDiscreteSequence(i); + Assignment discrete_assignment; + for (size_t v = 0; v < discrete_seq.size(); v++) { + discrete_assignment[M(v)] = discrete_seq[v]; } - // std::cout << "\n" << std::endl; - // assignment.print(); - // std::cout << error << " | " << exp(-error) << std::endl; - probPrimes.push_back(exp(-error)); + EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i), + probPrimeTree(discrete_assignment), 1e-8); } - AlgebraicDecisionTree expected_probPrimeTree(discrete_keys, probPrimes); - expected_probPrimeTree.print("", DefaultKeyFormatter); // remainingGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); From 1815433cbbde1052237427f774a086b1eabe8430 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Nov 2022 18:29:49 -0500 Subject: [PATCH 10/51] add methods to perform correct elimination --- gtsam/hybrid/HybridBayesNet.cpp | 6 ++ gtsam/hybrid/HybridGaussianFactorGraph.cpp | 110 +++++++++++++++------ gtsam/hybrid/HybridGaussianFactorGraph.h | 39 ++++++++ 3 files changed, 123 insertions(+), 32 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index f0d53c416..7338873bc 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -229,6 +229,12 @@ HybridValues HybridBayesNet::optimize() const { /* ************************************************************************* */ VectorValues HybridBayesNet::optimize(const DiscreteValues &assignment) const { GaussianBayesNet gbn = this->choose(assignment); + + // Check if there exists a nullptr in the GaussianBayesNet + // If yes, return an empty VectorValues + if (std::find(gbn.begin(), gbn.end(), nullptr) != gbn.end()) { + return VectorValues(); + } return gbn.optimize(); } diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 86d74ca22..e018d1046 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -492,6 +492,75 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::probPrime( return prob_tree; } +/* ************************************************************************ */ +DecisionTree +HybridGaussianFactorGraph::continuousDelta( + const DiscreteKeys &discrete_keys, + const boost::shared_ptr &continuousBayesNet, + const std::vector &assignments) const { + // Create a decision tree of all the different VectorValues + std::vector vector_values; + for (const DiscreteValues &assignment : assignments) { + VectorValues values = continuousBayesNet->optimize(assignment); + vector_values.push_back(boost::make_shared(values)); + } + DecisionTree delta_tree(discrete_keys, + vector_values); + + return delta_tree; +} + +/* ************************************************************************ */ +AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( + const DiscreteKeys &discrete_keys, + const boost::shared_ptr &continuousBayesNet, + const std::vector &assignments) const { + // Create a decision tree of all the different VectorValues + DecisionTree delta_tree = + this->continuousDelta(discrete_keys, continuousBayesNet, assignments); + + // Get the probPrime tree with the correct leaf probabilities + std::vector probPrimes; + for (const DiscreteValues &assignment : assignments) { + VectorValues delta = *delta_tree(assignment); + + // If VectorValues is empty, it means this is a pruned branch. + // Set thr probPrime to 0.0. + if (delta.size() == 0) { + probPrimes.push_back(0.0); + continue; + } + + double error = 0.0; + + for (size_t idx = 0; idx < size(); idx++) { + auto factor = factors_.at(idx); + + if (factor->isHybrid()) { + if (auto c = boost::dynamic_pointer_cast(factor)) { + error += c->asMixture()->error(delta, assignment); + } + if (auto f = + boost::dynamic_pointer_cast(factor)) { + error += f->error(delta, assignment); + } + + } else if (factor->isContinuous()) { + if (auto f = + boost::dynamic_pointer_cast(factor)) { + error += f->inner()->error(delta); + } + if (auto cg = boost::dynamic_pointer_cast(factor)) { + error += cg->asGaussian()->error(delta); + } + } + } + probPrimes.push_back(exp(-error)); + } + AlgebraicDecisionTree probPrimeTree(discrete_keys, probPrimes); + return probPrimeTree; +} + /* ************************************************************************ */ boost::shared_ptr HybridGaussianFactorGraph::eliminateHybridSequential() const { @@ -502,52 +571,29 @@ HybridGaussianFactorGraph::eliminateHybridSequential() const { HybridBayesNet::shared_ptr bayesNet; HybridGaussianFactorGraph::shared_ptr discreteGraph; std::tie(bayesNet, discreteGraph) = - BaseEliminateable::eliminatePartialSequential( - continuous_ordering, EliminationTraitsType::DefaultEliminate); + BaseEliminateable::eliminatePartialSequential(continuous_ordering); // Get the last continuous conditional which will have all the discrete keys auto last_conditional = bayesNet->at(bayesNet->size() - 1); - // Get all the discrete assignments DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + const std::vector assignments = DiscreteValues::CartesianProduct(discrete_keys); + // Save a copy of the original discrete key ordering + DiscreteKeys orig_discrete_keys(discrete_keys); // Reverse discrete keys order for correct tree construction std::reverse(discrete_keys.begin(), discrete_keys.end()); - // Create a decision tree of all the different VectorValues - std::vector vector_values; - for (const DiscreteValues &assignment : assignments) { - VectorValues values = bayesNet->optimize(assignment); - vector_values.push_back(boost::make_shared(values)); - } - DecisionTree delta_tree(discrete_keys, - vector_values); + AlgebraicDecisionTree probPrimeTree = + continuousProbPrimes(discrete_keys, bayesNet, assignments); - // Get the probPrime tree with the correct leaf probabilities - std::vector probPrimes; - for (const DiscreteValues &assignment : assignments) { - double error = 0.0; - VectorValues delta = *delta_tree(assignment); - for (auto factor : *this) { - if (factor->isHybrid()) { - auto f = boost::static_pointer_cast(factor); - error += f->error(delta, assignment); - - } else if (factor->isContinuous()) { - auto f = boost::static_pointer_cast(factor); - error += f->inner()->error(delta); - } - } - probPrimes.push_back(exp(-error)); - } - AlgebraicDecisionTree probPrimeTree(discrete_keys, probPrimes); - discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + discreteGraph->add(DecisionTreeFactor(orig_discrete_keys, probPrimeTree)); // Perform discrete elimination HybridBayesNet::shared_ptr discreteBayesNet = - discreteGraph->eliminateSequential( - discrete_ordering, EliminationTraitsType::DefaultEliminate); + discreteGraph->eliminateSequential(discrete_ordering); + bayesNet->add(*discreteBayesNet); return bayesNet; diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 21a5740db..8c387ec9b 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -25,6 +25,7 @@ #include #include #include +#include namespace gtsam { @@ -190,6 +191,44 @@ class GTSAM_EXPORT HybridGaussianFactorGraph AlgebraicDecisionTree probPrime( const VectorValues& continuousValues) const; + /** + * @brief Compute the VectorValues solution for the continuous variables for + * each mode. + * + * @param discrete_keys The discrete keys which form all the modes. + * @param continuousBayesNet The Bayes Net representing the continuous + * eliminated variables. + * @param assignments List of all discrete assignments to create the final + * decision tree. + * @return DecisionTree + */ + DecisionTree continuousDelta( + const DiscreteKeys& discrete_keys, + const boost::shared_ptr& continuousBayesNet, + const std::vector& assignments) const; + + /** + * @brief Compute the unnormalized probabilities of the continuous variables + * for each of the modes. + * + * @param discrete_keys The discrete keys which form all the modes. + * @param continuousBayesNet The Bayes Net representing the continuous + * eliminated variables. + * @param assignments List of all discrete assignments to create the final + * decision tree. + * @return AlgebraicDecisionTree + */ + AlgebraicDecisionTree continuousProbPrimes( + const DiscreteKeys& discrete_keys, + const boost::shared_ptr& continuousBayesNet, + const std::vector& assignments) const; + + /** + * @brief Custom elimination function which computes the correct + * continuous probabilities. + * + * @return boost::shared_ptr + */ boost::shared_ptr eliminateHybridSequential() const; /** From 090cc4256bcb9c2442b0d4848ead379a346f3be6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Nov 2022 18:30:03 -0500 Subject: [PATCH 11/51] update HybridSmoother to use the new method --- gtsam/hybrid/HybridSmoother.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 07a7a4e77..12f6949ab 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -32,7 +32,7 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph, addConditionals(graph, hybridBayesNet_, ordering); // Eliminate. - auto bayesNetFragment = graph.eliminateSequential(ordering); + auto bayesNetFragment = graph.eliminateHybridSequential(); /// Prune if (maxNrLeaves) { From 083fd21d7a88ea0e5b687f825ff6e5bf90d6b714 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Nov 2022 18:38:45 -0500 Subject: [PATCH 12/51] use long sequence in HybridEstimation test --- gtsam/hybrid/tests/testHybridEstimation.cpp | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 82ce3bf9d..b5a77fa3c 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -72,16 +72,12 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, /****************************************************************************/ // Test approximate inference with an additional pruning step. TEST(HybridEstimation, Incremental) { - // size_t K = 15; - // std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, - // 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; - // // Ground truth discrete seq - // std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, - // 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; - size_t K = 4; - std::vector measurements = {0, 1, 2, 2}; + size_t K = 15; + std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, + 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; // Ground truth discrete seq - std::vector discrete_seq = {1, 1, 0}; + std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, + 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); HybridSmoother smoother; HybridNonlinearFactorGraph graph; From 64cd7c938ffc0e24d4420e2a56cbc516ea99801f Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Mon, 7 Nov 2022 18:42:51 -0500 Subject: [PATCH 13/51] add docs --- gtsam/hybrid/tests/testHybridEstimation.cpp | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index b5a77fa3c..1bcd3ad27 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -182,6 +182,13 @@ std::vector getDiscreteSequence(size_t x) { return discrete_seq; } +/** + * @brief Helper method to get the probPrimeTree + * as per the new elimination scheme. + * + * @param graph The HybridGaussianFactorGraph to eliminate. + * @return AlgebraicDecisionTree + */ AlgebraicDecisionTree probPrimeTree( const HybridGaussianFactorGraph& graph) { HybridBayesNet::shared_ptr bayesNet; @@ -226,6 +233,11 @@ AlgebraicDecisionTree probPrimeTree( return probPrimeTree; } +/****************************************************************************/ +/** + * Test for correctness of different branches of the P'(Continuous | Discrete). + * The values should match those of P'(Continuous) for each discrete mode. + */ TEST(HybridEstimation, Probability) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; From 3987b036a7a4b2d0ffe19cd2118d174016264121 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 8 Nov 2022 13:36:18 -0500 Subject: [PATCH 14/51] add optional ordering and fix bug --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 ++++++- gtsam/hybrid/HybridGaussianFactorGraph.h | 6 +++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index e018d1046..6024a59bc 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -563,7 +563,7 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( /* ************************************************************************ */ boost::shared_ptr -HybridGaussianFactorGraph::eliminateHybridSequential() const { +HybridGaussianFactorGraph::eliminateHybridSequential(const boost::optional continuous, const boost::optional discrete) const { Ordering continuous_ordering(this->continuousKeys()), discrete_ordering(this->discreteKeys()); @@ -577,6 +577,11 @@ HybridGaussianFactorGraph::eliminateHybridSequential() const { auto last_conditional = bayesNet->at(bayesNet->size() - 1); DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + // If not discrete variables, return the eliminated bayes net. + if (discrete_keys.size() == 0) { + return bayesNet; + } + const std::vector assignments = DiscreteValues::CartesianProduct(discrete_keys); diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 8c387ec9b..31a707579 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -227,9 +227,13 @@ class GTSAM_EXPORT HybridGaussianFactorGraph * @brief Custom elimination function which computes the correct * continuous probabilities. * + * @param continuous Optional ordering for all continuous variables. + * @param discrete Optional ordering for all discrete variables. * @return boost::shared_ptr */ - boost::shared_ptr eliminateHybridSequential() const; + boost::shared_ptr eliminateHybridSequential( + const boost::optional continuous, + const boost::optional discrete) const; /** * @brief Return a Colamd constrained ordering where the discrete keys are From 1a3b343537c8d0858ca2cfeee9859a4633563973 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 8 Nov 2022 14:00:44 -0500 Subject: [PATCH 15/51] minor clean up and get tests to pass --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 80 ++++++++++++++----- gtsam/hybrid/HybridGaussianFactorGraph.h | 21 +++-- .../tests/testHybridNonlinearFactorGraph.cpp | 12 ++- 3 files changed, 87 insertions(+), 26 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 6024a59bc..62d681665 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -512,9 +512,17 @@ HybridGaussianFactorGraph::continuousDelta( /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( - const DiscreteKeys &discrete_keys, - const boost::shared_ptr &continuousBayesNet, - const std::vector &assignments) const { + const DiscreteKeys &orig_discrete_keys, + const boost::shared_ptr &continuousBayesNet) const { + // Generate all possible assignments. + const std::vector assignments = + DiscreteValues::CartesianProduct(orig_discrete_keys); + + // Save a copy of the original discrete key ordering + DiscreteKeys discrete_keys(orig_discrete_keys); + // Reverse discrete keys order for correct tree construction + std::reverse(discrete_keys.begin(), discrete_keys.end()); + // Create a decision tree of all the different VectorValues DecisionTree delta_tree = this->continuousDelta(discrete_keys, continuousBayesNet, assignments); @@ -532,7 +540,7 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( } double error = 0.0; - + // Compute the error given the delta and the assignment. for (size_t idx = 0; idx < size(); idx++) { auto factor = factors_.at(idx); @@ -563,15 +571,21 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( /* ************************************************************************ */ boost::shared_ptr -HybridGaussianFactorGraph::eliminateHybridSequential(const boost::optional continuous, const boost::optional discrete) const { - Ordering continuous_ordering(this->continuousKeys()), - discrete_ordering(this->discreteKeys()); +HybridGaussianFactorGraph::eliminateHybridSequential( + const boost::optional continuous, + const boost::optional discrete, const Eliminate &function, + OptionalVariableIndex variableIndex) const { + Ordering continuous_ordering = + continuous ? *continuous : Ordering(this->continuousKeys()); + Ordering discrete_ordering = + discrete ? *discrete : Ordering(this->discreteKeys()); // Eliminate continuous HybridBayesNet::shared_ptr bayesNet; HybridGaussianFactorGraph::shared_ptr discreteGraph; std::tie(bayesNet, discreteGraph) = - BaseEliminateable::eliminatePartialSequential(continuous_ordering); + BaseEliminateable::eliminatePartialSequential(continuous_ordering, + function, variableIndex); // Get the last continuous conditional which will have all the discrete keys auto last_conditional = bayesNet->at(bayesNet->size() - 1); @@ -582,26 +596,54 @@ HybridGaussianFactorGraph::eliminateHybridSequential(const boost::optional assignments = - DiscreteValues::CartesianProduct(discrete_keys); - - // Save a copy of the original discrete key ordering - DiscreteKeys orig_discrete_keys(discrete_keys); - // Reverse discrete keys order for correct tree construction - std::reverse(discrete_keys.begin(), discrete_keys.end()); - AlgebraicDecisionTree probPrimeTree = - continuousProbPrimes(discrete_keys, bayesNet, assignments); + this->continuousProbPrimes(discrete_keys, bayesNet); - discreteGraph->add(DecisionTreeFactor(orig_discrete_keys, probPrimeTree)); + discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); // Perform discrete elimination HybridBayesNet::shared_ptr discreteBayesNet = - discreteGraph->eliminateSequential(discrete_ordering); + discreteGraph->BaseEliminateable::eliminateSequential( + discrete_ordering, function, variableIndex); bayesNet->add(*discreteBayesNet); return bayesNet; } +/* ************************************************************************ */ +boost::shared_ptr +HybridGaussianFactorGraph::eliminateSequential( + OptionalOrderingType orderingType, const Eliminate &function, + OptionalVariableIndex variableIndex) const { + return BaseEliminateable::eliminateSequential(orderingType, function, + variableIndex); +} + +/* ************************************************************************ */ +boost::shared_ptr +HybridGaussianFactorGraph::eliminateSequential( + const Ordering &ordering, const Eliminate &function, + OptionalVariableIndex variableIndex) const { + KeySet all_continuous_keys = this->continuousKeys(); + KeySet all_discrete_keys = this->discreteKeys(); + Ordering continuous_ordering, discrete_ordering; + + // Segregate the continuous and the discrete keys + for (auto &&key : ordering) { + if (std::find(all_continuous_keys.begin(), all_continuous_keys.end(), + key) != all_continuous_keys.end()) { + continuous_ordering.push_back(key); + } else if (std::find(all_discrete_keys.begin(), all_discrete_keys.end(), + key) != all_discrete_keys.end()) { + discrete_ordering.push_back(key); + } else { + throw std::runtime_error("Key in ordering not present in factors."); + } + } + + return this->eliminateHybridSequential(continuous_ordering, + discrete_ordering); +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 31a707579..1198cc8bc 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -214,14 +214,11 @@ class GTSAM_EXPORT HybridGaussianFactorGraph * @param discrete_keys The discrete keys which form all the modes. * @param continuousBayesNet The Bayes Net representing the continuous * eliminated variables. - * @param assignments List of all discrete assignments to create the final - * decision tree. * @return AlgebraicDecisionTree */ AlgebraicDecisionTree continuousProbPrimes( const DiscreteKeys& discrete_keys, - const boost::shared_ptr& continuousBayesNet, - const std::vector& assignments) const; + const boost::shared_ptr& continuousBayesNet) const; /** * @brief Custom elimination function which computes the correct @@ -232,8 +229,20 @@ class GTSAM_EXPORT HybridGaussianFactorGraph * @return boost::shared_ptr */ boost::shared_ptr eliminateHybridSequential( - const boost::optional continuous, - const boost::optional discrete) const; + const boost::optional continuous = boost::none, + const boost::optional discrete = boost::none, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + boost::shared_ptr eliminateSequential( + OptionalOrderingType orderingType = boost::none, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + boost::shared_ptr eliminateSequential( + const Ordering& ordering, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; /** * @brief Return a Colamd constrained ordering where the discrete keys are diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index f6889f132..f8c61baf7 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -372,7 +372,8 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { dynamic_pointer_cast(hybridDiscreteFactor->inner()); CHECK(discreteFactor); EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size()); - EXPECT(discreteFactor->root_->isLeaf() == false); + // All leaves should be probability 1 since this is not P*(X|M,Z) + EXPECT(discreteFactor->root_->isLeaf()); // TODO(Varun) Test emplace_discrete } @@ -439,6 +440,15 @@ TEST(HybridFactorGraph, Full_Elimination) { auto df = dynamic_pointer_cast(factor); discrete_fg.push_back(df->inner()); } + + // Get the probabilit P*(X | M, Z) + DiscreteKeys discrete_keys = + remainingFactorGraph_partial->at(2)->discreteKeys(); + AlgebraicDecisionTree probPrimeTree = + linearizedFactorGraph.continuousProbPrimes(discrete_keys, + hybridBayesNet_partial); + discrete_fg.add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + ordering.clear(); for (size_t k = 0; k < self.K - 1; k++) ordering += M(k); discreteBayesNet = From 1b168cefba27521005dca6dd6c3112f8240910d4 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 8 Nov 2022 14:12:32 -0500 Subject: [PATCH 16/51] update test in testHybridEstimation --- gtsam/hybrid/tests/testHybridEstimation.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 1bcd3ad27..492621228 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -290,7 +290,7 @@ TEST(HybridEstimation, Probability) { graph.continuousDelta(discrete_keys, bayesNet, assignments); AlgebraicDecisionTree probPrimeTree = - graph.continuousProbPrimes(discrete_keys, bayesNet, assignments); + graph.continuousProbPrimes(discrete_keys, bayesNet); EXPECT(assert_equal(expected_probPrimeTree, probPrimeTree)); From cb55af3a81093c05a4b41b7abec52d804050b86d Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 8 Nov 2022 14:20:51 -0500 Subject: [PATCH 17/51] separate HybridGaussianFactorGraph::error() using both continuous and discrete values --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 53 +++++++++++-------- gtsam/hybrid/HybridGaussianFactorGraph.h | 13 +++++ .../tests/testHybridGaussianFactorGraph.cpp | 25 +++++++++ 3 files changed, 68 insertions(+), 23 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 62d681665..425b92ff3 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -483,6 +483,34 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::error( return error_tree; } +/* ************************************************************************ */ +double HybridGaussianFactorGraph::error( + const VectorValues &continuousValues, + const DiscreteValues &discreteValues) const { + double error = 0.0; + for (size_t idx = 0; idx < size(); idx++) { + auto factor = factors_.at(idx); + + if (factor->isHybrid()) { + if (auto c = boost::dynamic_pointer_cast(factor)) { + error += c->asMixture()->error(continuousValues, discreteValues); + } + if (auto f = boost::dynamic_pointer_cast(factor)) { + error += f->error(continuousValues, discreteValues); + } + + } else if (factor->isContinuous()) { + if (auto f = boost::dynamic_pointer_cast(factor)) { + error += f->inner()->error(continuousValues); + } + if (auto cg = boost::dynamic_pointer_cast(factor)) { + error += cg->asGaussian()->error(continuousValues); + } + } + } + return error; +} + /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::probPrime( const VectorValues &continuousValues) const { @@ -539,32 +567,11 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( continue; } - double error = 0.0; // Compute the error given the delta and the assignment. - for (size_t idx = 0; idx < size(); idx++) { - auto factor = factors_.at(idx); - - if (factor->isHybrid()) { - if (auto c = boost::dynamic_pointer_cast(factor)) { - error += c->asMixture()->error(delta, assignment); - } - if (auto f = - boost::dynamic_pointer_cast(factor)) { - error += f->error(delta, assignment); - } - - } else if (factor->isContinuous()) { - if (auto f = - boost::dynamic_pointer_cast(factor)) { - error += f->inner()->error(delta); - } - if (auto cg = boost::dynamic_pointer_cast(factor)) { - error += cg->asGaussian()->error(delta); - } - } - } + double error = this->error(delta, assignment); probPrimes.push_back(exp(-error)); } + AlgebraicDecisionTree probPrimeTree(discrete_keys, probPrimes); return probPrimeTree; } diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 1198cc8bc..e2c8863ea 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -180,6 +180,19 @@ class GTSAM_EXPORT HybridGaussianFactorGraph */ AlgebraicDecisionTree error(const VectorValues& continuousValues) const; + /** + * @brief Compute error given a continuous vector values + * and a discrete assignment. + * + * @param continuousValues The continuous VectorValues + * for computing the error. + * @param discreteValues The specific discrete assignment + * whose error we wish to compute. + * @return double + */ + double error(const VectorValues& continuousValues, + const DiscreteValues& discreteValues) const; + /** * @brief Compute unnormalized probability for each discrete assignment, * and return as a tree. diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 7877461b6..98d6dc870 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -569,6 +569,31 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) { HybridGaussianFactorGraph graph = s.linearizedFactorGraph; + Ordering hybridOrdering = graph.getHybridOrdering(); + HybridBayesNet::shared_ptr hybridBayesNet = + graph.eliminateSequential(hybridOrdering); + + HybridValues delta = hybridBayesNet->optimize(); + double error = graph.error(delta.continuous(), delta.discrete()); + + double expected_error = 0.490243199; + // regression + EXPECT(assert_equal(expected_error, error, 1e-9)); + + double probs = exp(-error); + double expected_probs = exp(-expected_error); + + // regression + EXPECT(assert_equal(expected_probs, probs, 1e-7)); +} + +/* ****************************************************************************/ +// Test hybrid gaussian factor graph error and unnormalized probabilities +TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { + Switching s(3); + + HybridGaussianFactorGraph graph = s.linearizedFactorGraph; + Ordering hybridOrdering = graph.getHybridOrdering(); HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(hybridOrdering); From eb94ad90d29326f9230ef4d53916ee88b6740b69 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 8 Nov 2022 15:49:37 -0500 Subject: [PATCH 18/51] add HybridGaussianFactorGraph::probPrime method --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 8 ++++++++ gtsam/hybrid/HybridGaussianFactorGraph.h | 12 ++++++++++++ gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp | 2 +- 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 425b92ff3..983817f03 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -511,6 +511,14 @@ double HybridGaussianFactorGraph::error( return error; } +/* ************************************************************************ */ +double HybridGaussianFactorGraph::probPrime( + const VectorValues &continuousValues, + const DiscreteValues &discreteValues) const { + double error = this->error(continuousValues, discreteValues); + return std::exp(-error); +} + /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::probPrime( const VectorValues &continuousValues) const { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index e2c8863ea..88728b6bb 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -204,6 +204,18 @@ class GTSAM_EXPORT HybridGaussianFactorGraph AlgebraicDecisionTree probPrime( const VectorValues& continuousValues) const; + /** + * @brief Compute the unnormalized posterior probability for a continuous + * vector values given a specific assignment. + * + * @param continuousValues The vector values for which to compute the + * posterior probability. + * @param discreteValues The specific assignment to use for the computation. + * @return double + */ + double probPrime(const VectorValues& continuousValues, + const DiscreteValues& discreteValues) const; + /** * @brief Compute the VectorValues solution for the continuous variables for * each mode. diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 98d6dc870..b56b6b62a 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -581,7 +581,7 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrime) { EXPECT(assert_equal(expected_error, error, 1e-9)); double probs = exp(-error); - double expected_probs = exp(-expected_error); + double expected_probs = graph.probPrime(delta.continuous(), delta.discrete()); // regression EXPECT(assert_equal(expected_probs, probs, 1e-7)); From 0938159706f1d28e089c936f6a73834baab59367 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 9 Nov 2022 18:38:42 -0500 Subject: [PATCH 19/51] overload multifrontal elimination --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 182 +++++++++++++++++++-- gtsam/hybrid/HybridGaussianFactorGraph.h | 26 +++ 2 files changed, 192 insertions(+), 16 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 983817f03..a0c1b67da 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -546,6 +546,24 @@ HybridGaussianFactorGraph::continuousDelta( return delta_tree; } +/* ************************************************************************ */ +DecisionTree +HybridGaussianFactorGraph::continuousDelta( + const DiscreteKeys &discrete_keys, + const boost::shared_ptr &continuousBayesTree, + const std::vector &assignments) const { + // Create a decision tree of all the different VectorValues + std::vector vector_values; + for (const DiscreteValues &assignment : assignments) { + VectorValues values = continuousBayesTree->optimize(assignment); + vector_values.push_back(boost::make_shared(values)); + } + DecisionTree delta_tree(discrete_keys, + vector_values); + + return delta_tree; +} + /* ************************************************************************ */ AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( const DiscreteKeys &orig_discrete_keys, @@ -584,6 +602,67 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( return probPrimeTree; } +/* ************************************************************************ */ +AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( + const DiscreteKeys &orig_discrete_keys, + const boost::shared_ptr &continuousBayesTree) const { + // Generate all possible assignments. + const std::vector assignments = + DiscreteValues::CartesianProduct(orig_discrete_keys); + + // Save a copy of the original discrete key ordering + DiscreteKeys discrete_keys(orig_discrete_keys); + // Reverse discrete keys order for correct tree construction + std::reverse(discrete_keys.begin(), discrete_keys.end()); + + // Create a decision tree of all the different VectorValues + DecisionTree delta_tree = + this->continuousDelta(discrete_keys, continuousBayesTree, assignments); + + // Get the probPrime tree with the correct leaf probabilities + std::vector probPrimes; + for (const DiscreteValues &assignment : assignments) { + VectorValues delta = *delta_tree(assignment); + + // If VectorValues is empty, it means this is a pruned branch. + // Set thr probPrime to 0.0. + if (delta.size() == 0) { + probPrimes.push_back(0.0); + continue; + } + + // Compute the error given the delta and the assignment. + double error = this->error(delta, assignment); + probPrimes.push_back(exp(-error)); + } + + AlgebraicDecisionTree probPrimeTree(discrete_keys, probPrimes); + return probPrimeTree; +} + +/* ************************************************************************ */ +std::pair +HybridGaussianFactorGraph::separateContinuousDiscreteOrdering( + const Ordering &ordering) const { + KeySet all_continuous_keys = this->continuousKeys(); + KeySet all_discrete_keys = this->discreteKeys(); + Ordering continuous_ordering, discrete_ordering; + + for (auto &&key : ordering) { + if (std::find(all_continuous_keys.begin(), all_continuous_keys.end(), + key) != all_continuous_keys.end()) { + continuous_ordering.push_back(key); + } else if (std::find(all_discrete_keys.begin(), all_discrete_keys.end(), + key) != all_discrete_keys.end()) { + discrete_ordering.push_back(key); + } else { + throw std::runtime_error("Key in ordering not present in factors."); + } + } + + return std::make_pair(continuous_ordering, discrete_ordering); +} + /* ************************************************************************ */ boost::shared_ptr HybridGaussianFactorGraph::eliminateHybridSequential( @@ -640,25 +719,96 @@ boost::shared_ptr HybridGaussianFactorGraph::eliminateSequential( const Ordering &ordering, const Eliminate &function, OptionalVariableIndex variableIndex) const { - KeySet all_continuous_keys = this->continuousKeys(); - KeySet all_discrete_keys = this->discreteKeys(); - Ordering continuous_ordering, discrete_ordering; - // Segregate the continuous and the discrete keys - for (auto &&key : ordering) { - if (std::find(all_continuous_keys.begin(), all_continuous_keys.end(), - key) != all_continuous_keys.end()) { - continuous_ordering.push_back(key); - } else if (std::find(all_discrete_keys.begin(), all_discrete_keys.end(), - key) != all_discrete_keys.end()) { - discrete_ordering.push_back(key); - } else { - throw std::runtime_error("Key in ordering not present in factors."); - } + Ordering continuous_ordering, discrete_ordering; + std::tie(continuous_ordering, discrete_ordering) = + this->separateContinuousDiscreteOrdering(ordering); + + return this->eliminateHybridSequential(continuous_ordering, discrete_ordering, + function, variableIndex); +} + +/* ************************************************************************ */ +boost::shared_ptr +HybridGaussianFactorGraph::eliminateHybridMultifrontal( + const boost::optional continuous, + const boost::optional discrete, const Eliminate &function, + OptionalVariableIndex variableIndex) const { + Ordering continuous_ordering = + continuous ? *continuous : Ordering(this->continuousKeys()); + Ordering discrete_ordering = + discrete ? *discrete : Ordering(this->discreteKeys()); + + // Eliminate continuous + HybridBayesTree::shared_ptr bayesTree; + HybridGaussianFactorGraph::shared_ptr discreteGraph; + std::tie(bayesTree, discreteGraph) = + BaseEliminateable::eliminatePartialMultifrontal(continuous_ordering, + function, variableIndex); + + // Get the last continuous conditional which will have all the discrete + Key last_continuous_key = + continuous_ordering.at(continuous_ordering.size() - 1); + auto last_conditional = (*bayesTree)[last_continuous_key]->conditional(); + DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + + // If not discrete variables, return the eliminated bayes net. + if (discrete_keys.size() == 0) { + return bayesTree; } - return this->eliminateHybridSequential(continuous_ordering, - discrete_ordering); + AlgebraicDecisionTree probPrimeTree = + this->continuousProbPrimes(discrete_keys, bayesTree); + + discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + + auto updatedBayesTree = + discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete_ordering, + function); + + auto discrete_clique = (*updatedBayesTree)[discrete_ordering.at(0)]; + + // Set the root of the bayes tree as the discrete clique + for (auto node : bayesTree->nodes()) { + auto clique = node.second; + + if (clique->conditional()->parents() == + discrete_clique->conditional()->frontals()) { + updatedBayesTree->addClique(clique, discrete_clique); + + } else { + // Remove the clique from the children of the parents since it will get + // added again in addClique. + auto clique_it = std::find(clique->parent()->children.begin(), + clique->parent()->children.end(), clique); + clique->parent()->children.erase(clique_it); + updatedBayesTree->addClique(clique, clique->parent()); + } + } + return updatedBayesTree; +} + +/* ************************************************************************ */ +boost::shared_ptr +HybridGaussianFactorGraph::eliminateMultifrontal( + OptionalOrderingType orderingType, const Eliminate &function, + OptionalVariableIndex variableIndex) const { + return BaseEliminateable::eliminateMultifrontal(orderingType, function, + variableIndex); +} + +/* ************************************************************************ */ +boost::shared_ptr +HybridGaussianFactorGraph::eliminateMultifrontal( + const Ordering &ordering, const Eliminate &function, + OptionalVariableIndex variableIndex) const { + // Segregate the continuous and the discrete keys + Ordering continuous_ordering, discrete_ordering; + std::tie(continuous_ordering, discrete_ordering) = + this->separateContinuousDiscreteOrdering(ordering); + + return this->eliminateHybridMultifrontal( + continuous_ordering, discrete_ordering, function, variableIndex); } } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 88728b6bb..fb8ebbdc4 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -231,6 +231,10 @@ class GTSAM_EXPORT HybridGaussianFactorGraph const DiscreteKeys& discrete_keys, const boost::shared_ptr& continuousBayesNet, const std::vector& assignments) const; + DecisionTree continuousDelta( + const DiscreteKeys& discrete_keys, + const boost::shared_ptr& continuousBayesTree, + const std::vector& assignments) const; /** * @brief Compute the unnormalized probabilities of the continuous variables @@ -244,6 +248,12 @@ class GTSAM_EXPORT HybridGaussianFactorGraph AlgebraicDecisionTree continuousProbPrimes( const DiscreteKeys& discrete_keys, const boost::shared_ptr& continuousBayesNet) const; + AlgebraicDecisionTree continuousProbPrimes( + const DiscreteKeys& discrete_keys, + const boost::shared_ptr& continuousBayesTree) const; + + std::pair separateContinuousDiscreteOrdering( + const Ordering& ordering) const; /** * @brief Custom elimination function which computes the correct @@ -269,6 +279,22 @@ class GTSAM_EXPORT HybridGaussianFactorGraph const Eliminate& function = EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex = boost::none) const; + boost::shared_ptr eliminateHybridMultifrontal( + const boost::optional continuous = boost::none, + const boost::optional discrete = boost::none, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + boost::shared_ptr eliminateMultifrontal( + OptionalOrderingType orderingType = boost::none, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + boost::shared_ptr eliminateMultifrontal( + const Ordering& ordering, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + /** * @brief Return a Colamd constrained ordering where the discrete keys are * eliminated after the continuous keys. From 98d31866156d6cd5c5e448734cd760d0c5d1d492 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 9 Nov 2022 20:03:37 -0500 Subject: [PATCH 20/51] add copy constructor for HybridBayesTreeClique --- gtsam/hybrid/HybridBayesTree.h | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridBayesTree.h b/gtsam/hybrid/HybridBayesTree.h index 8af0af968..2d01aab76 100644 --- a/gtsam/hybrid/HybridBayesTree.h +++ b/gtsam/hybrid/HybridBayesTree.h @@ -50,9 +50,12 @@ class GTSAM_EXPORT HybridBayesTreeClique typedef boost::shared_ptr shared_ptr; typedef boost::weak_ptr weak_ptr; HybridBayesTreeClique() {} - virtual ~HybridBayesTreeClique() {} HybridBayesTreeClique(const boost::shared_ptr& conditional) : Base(conditional) {} + ///< Copy constructor + HybridBayesTreeClique(const HybridBayesTreeClique& clique) : Base(clique) {} + + virtual ~HybridBayesTreeClique() {} }; /* ************************************************************************* */ From 7ae4e57d6678a1d8fcff3e74ee39c24b9b156819 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 9 Nov 2022 20:04:21 -0500 Subject: [PATCH 21/51] fix HybridBayesTree::optimize to account for pruned nodes --- gtsam/hybrid/HybridBayesTree.cpp | 31 ++++++++++++++++++++++++------- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index 8fdedab44..c9c6afa79 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -73,6 +73,8 @@ struct HybridAssignmentData { GaussianBayesTree::sharedNode parentClique_; // The gaussian bayes tree that will be recursively created. GaussianBayesTree* gaussianbayesTree_; + // Flag indicating if all the nodes are valid. Used in optimize(). + bool valid_; /** * @brief Construct a new Hybrid Assignment Data object. @@ -83,10 +85,13 @@ struct HybridAssignmentData { */ HybridAssignmentData(const DiscreteValues& assignment, const GaussianBayesTree::sharedNode& parentClique, - GaussianBayesTree* gbt) + GaussianBayesTree* gbt, bool valid = true) : assignment_(assignment), parentClique_(parentClique), - gaussianbayesTree_(gbt) {} + gaussianbayesTree_(gbt), + valid_(valid) {} + + bool isValid() const { return valid_; } /** * @brief A function used during tree traversal that operates on each node @@ -101,6 +106,7 @@ struct HybridAssignmentData { HybridAssignmentData& parentData) { // Extract the gaussian conditional from the Hybrid clique HybridConditional::shared_ptr hybrid_conditional = node->conditional(); + GaussianConditional::shared_ptr conditional; if (hybrid_conditional->isHybrid()) { conditional = (*hybrid_conditional->asMixture())(parentData.assignment_); @@ -111,15 +117,21 @@ struct HybridAssignmentData { conditional = boost::make_shared(); } - // Create the GaussianClique for the current node - auto clique = boost::make_shared(conditional); - // Add the current clique to the GaussianBayesTree. - parentData.gaussianbayesTree_->addClique(clique, parentData.parentClique_); + GaussianBayesTree::sharedNode clique; + if (conditional) { + // Create the GaussianClique for the current node + clique = boost::make_shared(conditional); + // Add the current clique to the GaussianBayesTree. + parentData.gaussianbayesTree_->addClique(clique, + parentData.parentClique_); + } else { + parentData.valid_ = false; + } // Create new HybridAssignmentData where the current node is the parent // This will be passed down to the children nodes HybridAssignmentData data(parentData.assignment_, clique, - parentData.gaussianbayesTree_); + parentData.gaussianbayesTree_, parentData.valid_); return data; } }; @@ -138,6 +150,9 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const { visitorPost); } + if (!rootData.isValid()) { + return VectorValues(); + } VectorValues result = gbt.optimize(); // Return the optimized bayes net result. @@ -151,6 +166,8 @@ void HybridBayesTree::prune(const size_t maxNrLeaves) { DecisionTreeFactor prunedDecisionTree = decisionTree->prune(maxNrLeaves); decisionTree->root_ = prunedDecisionTree.root_; + // this->print(); + // decisionTree->print("", DefaultKeyFormatter); /// Helper struct for pruning the hybrid bayes tree. struct HybridPrunerData { From d54cf484de9ed9c86b387d4776faba13e367627e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 9 Nov 2022 20:04:51 -0500 Subject: [PATCH 22/51] fix creation of updatedBayesTree --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index a0c1b67da..d07b728b5 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -768,10 +768,13 @@ HybridGaussianFactorGraph::eliminateHybridMultifrontal( auto discrete_clique = (*updatedBayesTree)[discrete_ordering.at(0)]; - // Set the root of the bayes tree as the discrete clique + std::set clique_set; for (auto node : bayesTree->nodes()) { - auto clique = node.second; + clique_set.insert(node.second); + } + // Set the root of the bayes tree as the discrete clique + for (auto clique : clique_set) { if (clique->conditional()->parents() == discrete_clique->conditional()->frontals()) { updatedBayesTree->addClique(clique, discrete_clique); From 318f7384b5b3f61c50b037d311a8285f345a5ba9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 10 Nov 2022 10:53:04 -0500 Subject: [PATCH 23/51] fixup the final tests --- gtsam/hybrid/tests/testHybridBayesTree.cpp | 7 ++ gtsam/hybrid/tests/testHybridEstimation.cpp | 80 +++++++++++++++++++ .../tests/testHybridGaussianFactorGraph.cpp | 3 +- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 17 ++-- .../hybrid/tests/testHybridNonlinearISAM.cpp | 20 +++-- 5 files changed, 113 insertions(+), 14 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index 5de21322c..ab0f3c2d9 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -136,6 +136,13 @@ TEST(HybridBayesTree, Optimize) { dfg.push_back( boost::dynamic_pointer_cast(factor->inner())); } + + // Add the probabilities for each branch + DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; + vector probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656, + 0.037152205, 0.12248971, 0.07349729, 0.08}; + AlgebraicDecisionTree potentials(discrete_keys, probs); + dfg.emplace_shared(discrete_keys, probs); DiscreteValues expectedMPE = dfg.optimize(); VectorValues expectedValues = hybridBayesNet->optimize(expectedMPE); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 492621228..ec5d6fb7d 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -23,6 +23,7 @@ #include #include #include +#include #include #include #include @@ -319,6 +320,85 @@ TEST(HybridEstimation, Probability) { // hybrid_values.discrete().print(); } +/****************************************************************************/ +/** + * Test for correctness of different branches of the P'(Continuous | Discrete) + * in the multi-frontal setting. The values should match those of P'(Continuous) + * for each discrete mode. + */ +TEST(HybridEstimation, ProbabilityMultifrontal) { + constexpr size_t K = 4; + std::vector measurements = {0, 1, 2, 2}; + + // This is the correct sequence + // std::vector discrete_seq = {1, 1, 0}; + + double between_sigma = 1.0, measurement_sigma = 0.1; + + std::vector expected_errors, expected_prob_primes; + for (size_t i = 0; i < pow(2, K - 1); i++) { + std::vector discrete_seq = getDiscreteSequence(i); + + GaussianFactorGraph::shared_ptr linear_graph = specificProblem( + K, measurements, discrete_seq, measurement_sigma, between_sigma); + + auto bayes_tree = linear_graph->eliminateMultifrontal(); + + VectorValues values = bayes_tree->optimize(); + + std::cout << i << " " << linear_graph->error(values) << std::endl; + expected_errors.push_back(linear_graph->error(values)); + expected_prob_primes.push_back(linear_graph->probPrime(values)); + } + + Switching switching(K, between_sigma, measurement_sigma, measurements); + auto graph = switching.linearizedFactorGraph; + Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); + + AlgebraicDecisionTree expected_probPrimeTree = probPrimeTree(graph); + + // Eliminate continuous + Ordering continuous_ordering(graph.continuousKeys()); + HybridBayesTree::shared_ptr bayesTree; + HybridGaussianFactorGraph::shared_ptr discreteGraph; + std::tie(bayesTree, discreteGraph) = + graph.eliminatePartialMultifrontal(continuous_ordering); + + // Get the last continuous conditional which will have all the discrete keys + Key last_continuous_key = + continuous_ordering.at(continuous_ordering.size() - 1); + auto last_conditional = (*bayesTree)[last_continuous_key]->conditional(); + DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + + // Create a decision tree of all the different VectorValues + AlgebraicDecisionTree probPrimeTree = + graph.continuousProbPrimes(discrete_keys, bayesTree); + + EXPECT(assert_equal(expected_probPrimeTree, probPrimeTree)); + + // Test if the probPrimeTree matches the probability of + // the individual factor graphs + for (size_t i = 0; i < pow(2, K - 1); i++) { + std::vector discrete_seq = getDiscreteSequence(i); + Assignment discrete_assignment; + for (size_t v = 0; v < discrete_seq.size(); v++) { + discrete_assignment[M(v)] = discrete_seq[v]; + } + EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i), + probPrimeTree(discrete_assignment), 1e-8); + } + + discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + + // Ordering discrete(graph.discreteKeys()); + // auto discreteBayesTree = discreteGraph->eliminateMultifrontal(discrete); + // // DiscreteBayesTree should have only 1 clique + // bayesTree->addClique((*discreteBayesTree)[discrete.at(0)]); + + // // HybridValues hybrid_values = bayesNet->optimize(); + // // hybrid_values.discrete().print(); +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index b56b6b62a..248d71d5f 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -182,7 +182,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { boost::make_shared(X(1), I_3x3, Vector3::Ones())})); hfg.add(DecisionTreeFactor(m1, {2, 8})); - hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); + //TODO(Varun) Adding extra discrete variable not connected to continuous variable throws segfault + // hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal(hfg.getHybridOrdering()); diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 18ce7f10e..09403853f 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -165,7 +165,8 @@ TEST(HybridGaussianElimination, IncrementalInference) { discrete_ordering += M(0); discrete_ordering += M(1); HybridBayesTree::shared_ptr discreteBayesTree = - expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); + expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( + discrete_ordering); DiscreteValues m00; m00[M(0)] = 0, m00[M(1)] = 0; @@ -177,10 +178,10 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Test if the probability values are as expected with regression tests. DiscreteValues assignment; - EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); + EXPECT(assert_equal(0.166667, m00_prob, 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 0; - EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 0; EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); @@ -193,11 +194,15 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Check if the clique conditional generated from incremental elimination // matches that of batch elimination. - auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); - auto expectedConditional = dynamic_pointer_cast( - (*expectedChordal)[M(1)]->conditional()->inner()); + auto expectedChordal = + expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal(); auto actualConditional = dynamic_pointer_cast( isam[M(1)]->conditional()->inner()); + // Account for the probability terms from evaluating continuous FGs + DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; + vector probs = {0.061923317, 0.20415914, 0.18374323, 0.2}; + auto expectedConditional = + boost::make_shared(discrete_keys, probs); EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); } diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 3bdb5ed1e..e7e65b123 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -153,7 +153,8 @@ TEST(HybridNonlinearISAM, IncrementalInference) { HybridBayesTree::shared_ptr expectedHybridBayesTree; HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph; std::tie(expectedHybridBayesTree, expectedRemainingGraph) = - switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + switching.linearizedFactorGraph + .BaseEliminateable::eliminatePartialMultifrontal(ordering); // The densities on X(1) should be the same auto x0_conditional = dynamic_pointer_cast( @@ -182,7 +183,8 @@ TEST(HybridNonlinearISAM, IncrementalInference) { discrete_ordering += M(0); discrete_ordering += M(1); HybridBayesTree::shared_ptr discreteBayesTree = - expectedRemainingGraph->eliminateMultifrontal(discrete_ordering); + expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( + discrete_ordering); DiscreteValues m00; m00[M(0)] = 0, m00[M(1)] = 0; @@ -195,10 +197,10 @@ TEST(HybridNonlinearISAM, IncrementalInference) { // Test if the probability values are as expected with regression tests. DiscreteValues assignment; - EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5)); + EXPECT(assert_equal(0.166667, m00_prob, 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 0; - EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5)); + EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); assignment[M(0)] = 1; assignment[M(1)] = 0; EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5)); @@ -212,10 +214,13 @@ TEST(HybridNonlinearISAM, IncrementalInference) { // Check if the clique conditional generated from incremental elimination // matches that of batch elimination. auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal(); - auto expectedConditional = dynamic_pointer_cast( - (*expectedChordal)[M(1)]->conditional()->inner()); auto actualConditional = dynamic_pointer_cast( bayesTree[M(1)]->conditional()->inner()); + // Account for the probability terms from evaluating continuous FGs + DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}}; + vector probs = {0.061923317, 0.20415914, 0.18374323, 0.2}; + auto expectedConditional = + boost::make_shared(discrete_keys, probs); EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6)); } @@ -250,7 +255,8 @@ TEST(HybridNonlinearISAM, Approx_inference) { HybridBayesTree::shared_ptr unprunedHybridBayesTree; HybridGaussianFactorGraph::shared_ptr unprunedRemainingGraph; std::tie(unprunedHybridBayesTree, unprunedRemainingGraph) = - switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering); + switching.linearizedFactorGraph + .BaseEliminateable::eliminatePartialMultifrontal(ordering); size_t maxNrLeaves = 5; incrementalHybrid.update(graph1, initial); From 6e6bbfff4ce6c748c982badcd136fa7e39f231c6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Thu, 10 Nov 2022 10:53:17 -0500 Subject: [PATCH 24/51] update docstring for Ordering::+= --- gtsam/inference/Ordering.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gtsam/inference/Ordering.h b/gtsam/inference/Ordering.h index 97d5bf110..c9c6a6176 100644 --- a/gtsam/inference/Ordering.h +++ b/gtsam/inference/Ordering.h @@ -73,7 +73,7 @@ public: /** * @brief Append new keys to the ordering as `ordering += keys`. * - * @param key + * @param keys The key vector to append to this ordering. * @return The ordering variable with appended keys. */ This& operator+=(KeyVector& keys); From 5e2cdfdd3bd0eb16e79cedd2fc39cebf064368a2 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 12 Nov 2022 23:07:34 -0500 Subject: [PATCH 25/51] make continuousProbPrimes and continuousDeltas as templates --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 112 --------------------- gtsam/hybrid/HybridGaussianFactorGraph.h | 81 ++++++++++++--- 2 files changed, 69 insertions(+), 124 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index d07b728b5..4dc309e7b 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -528,118 +528,6 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::probPrime( return prob_tree; } -/* ************************************************************************ */ -DecisionTree -HybridGaussianFactorGraph::continuousDelta( - const DiscreteKeys &discrete_keys, - const boost::shared_ptr &continuousBayesNet, - const std::vector &assignments) const { - // Create a decision tree of all the different VectorValues - std::vector vector_values; - for (const DiscreteValues &assignment : assignments) { - VectorValues values = continuousBayesNet->optimize(assignment); - vector_values.push_back(boost::make_shared(values)); - } - DecisionTree delta_tree(discrete_keys, - vector_values); - - return delta_tree; -} - -/* ************************************************************************ */ -DecisionTree -HybridGaussianFactorGraph::continuousDelta( - const DiscreteKeys &discrete_keys, - const boost::shared_ptr &continuousBayesTree, - const std::vector &assignments) const { - // Create a decision tree of all the different VectorValues - std::vector vector_values; - for (const DiscreteValues &assignment : assignments) { - VectorValues values = continuousBayesTree->optimize(assignment); - vector_values.push_back(boost::make_shared(values)); - } - DecisionTree delta_tree(discrete_keys, - vector_values); - - return delta_tree; -} - -/* ************************************************************************ */ -AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( - const DiscreteKeys &orig_discrete_keys, - const boost::shared_ptr &continuousBayesNet) const { - // Generate all possible assignments. - const std::vector assignments = - DiscreteValues::CartesianProduct(orig_discrete_keys); - - // Save a copy of the original discrete key ordering - DiscreteKeys discrete_keys(orig_discrete_keys); - // Reverse discrete keys order for correct tree construction - std::reverse(discrete_keys.begin(), discrete_keys.end()); - - // Create a decision tree of all the different VectorValues - DecisionTree delta_tree = - this->continuousDelta(discrete_keys, continuousBayesNet, assignments); - - // Get the probPrime tree with the correct leaf probabilities - std::vector probPrimes; - for (const DiscreteValues &assignment : assignments) { - VectorValues delta = *delta_tree(assignment); - - // If VectorValues is empty, it means this is a pruned branch. - // Set thr probPrime to 0.0. - if (delta.size() == 0) { - probPrimes.push_back(0.0); - continue; - } - - // Compute the error given the delta and the assignment. - double error = this->error(delta, assignment); - probPrimes.push_back(exp(-error)); - } - - AlgebraicDecisionTree probPrimeTree(discrete_keys, probPrimes); - return probPrimeTree; -} - -/* ************************************************************************ */ -AlgebraicDecisionTree HybridGaussianFactorGraph::continuousProbPrimes( - const DiscreteKeys &orig_discrete_keys, - const boost::shared_ptr &continuousBayesTree) const { - // Generate all possible assignments. - const std::vector assignments = - DiscreteValues::CartesianProduct(orig_discrete_keys); - - // Save a copy of the original discrete key ordering - DiscreteKeys discrete_keys(orig_discrete_keys); - // Reverse discrete keys order for correct tree construction - std::reverse(discrete_keys.begin(), discrete_keys.end()); - - // Create a decision tree of all the different VectorValues - DecisionTree delta_tree = - this->continuousDelta(discrete_keys, continuousBayesTree, assignments); - - // Get the probPrime tree with the correct leaf probabilities - std::vector probPrimes; - for (const DiscreteValues &assignment : assignments) { - VectorValues delta = *delta_tree(assignment); - - // If VectorValues is empty, it means this is a pruned branch. - // Set thr probPrime to 0.0. - if (delta.size() == 0) { - probPrimes.push_back(0.0); - continue; - } - - // Compute the error given the delta and the assignment. - double error = this->error(delta, assignment); - probPrimes.push_back(exp(-error)); - } - - AlgebraicDecisionTree probPrimeTree(discrete_keys, probPrimes); - return probPrimeTree; -} - /* ************************************************************************ */ std::pair HybridGaussianFactorGraph::separateContinuousDiscreteOrdering( diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index fb8ebbdc4..d4da66400 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -220,44 +220,89 @@ class GTSAM_EXPORT HybridGaussianFactorGraph * @brief Compute the VectorValues solution for the continuous variables for * each mode. * + * @tparam BAYES Template on the type of Bayes graph, either a bayes net or a + * bayes tree. * @param discrete_keys The discrete keys which form all the modes. - * @param continuousBayesNet The Bayes Net representing the continuous + * @param continuousBayesNet The Bayes Net/Tree representing the continuous * eliminated variables. * @param assignments List of all discrete assignments to create the final * decision tree. * @return DecisionTree */ + template DecisionTree continuousDelta( const DiscreteKeys& discrete_keys, - const boost::shared_ptr& continuousBayesNet, - const std::vector& assignments) const; - DecisionTree continuousDelta( - const DiscreteKeys& discrete_keys, - const boost::shared_ptr& continuousBayesTree, - const std::vector& assignments) const; + const boost::shared_ptr& continuousBayesNet, + const std::vector& assignments) const { + // Create a decision tree of all the different VectorValues + std::vector vector_values; + for (const DiscreteValues& assignment : assignments) { + VectorValues values = continuousBayesNet->optimize(assignment); + vector_values.push_back(boost::make_shared(values)); + } + DecisionTree delta_tree(discrete_keys, + vector_values); + + return delta_tree; + } /** * @brief Compute the unnormalized probabilities of the continuous variables * for each of the modes. * + * @tparam BAYES Template on the type of Bayes graph, either a bayes net or a + * bayes tree. * @param discrete_keys The discrete keys which form all the modes. * @param continuousBayesNet The Bayes Net representing the continuous * eliminated variables. * @return AlgebraicDecisionTree */ + template AlgebraicDecisionTree continuousProbPrimes( const DiscreteKeys& discrete_keys, - const boost::shared_ptr& continuousBayesNet) const; - AlgebraicDecisionTree continuousProbPrimes( - const DiscreteKeys& discrete_keys, - const boost::shared_ptr& continuousBayesTree) const; + const boost::shared_ptr& continuousBayesNet) const { + // Generate all possible assignments. + const std::vector assignments = + DiscreteValues::CartesianProduct(discrete_keys); + + // Save a copy of the original discrete key ordering + DiscreteKeys reversed_discrete_keys(discrete_keys); + // Reverse discrete keys order for correct tree construction + std::reverse(reversed_discrete_keys.begin(), reversed_discrete_keys.end()); + + // Create a decision tree of all the different VectorValues + DecisionTree delta_tree = + this->continuousDelta(reversed_discrete_keys, continuousBayesNet, + assignments); + + // Get the probPrime tree with the correct leaf probabilities + std::vector probPrimes; + for (const DiscreteValues& assignment : assignments) { + VectorValues delta = *delta_tree(assignment); + + // If VectorValues is empty, it means this is a pruned branch. + // Set thr probPrime to 0.0. + if (delta.size() == 0) { + probPrimes.push_back(0.0); + continue; + } + + // Compute the error given the delta and the assignment. + double error = this->error(delta, assignment); + probPrimes.push_back(exp(-error)); + } + + AlgebraicDecisionTree probPrimeTree(reversed_discrete_keys, + probPrimes); + return probPrimeTree; + } std::pair separateContinuousDiscreteOrdering( const Ordering& ordering) const; /** * @brief Custom elimination function which computes the correct - * continuous probabilities. + * continuous probabilities. Returns a bayes net. * * @param continuous Optional ordering for all continuous variables. * @param discrete Optional ordering for all discrete variables. @@ -269,27 +314,39 @@ class GTSAM_EXPORT HybridGaussianFactorGraph const Eliminate& function = EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex = boost::none) const; + /// Sequential elimination overload for hybrid boost::shared_ptr eliminateSequential( OptionalOrderingType orderingType = boost::none, const Eliminate& function = EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex = boost::none) const; + /// Sequential elimination overload for hybrid boost::shared_ptr eliminateSequential( const Ordering& ordering, const Eliminate& function = EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex = boost::none) const; + /** + * @brief Custom elimination function which computes the correct + * continuous probabilities. Returns a bayes tree. + * + * @param continuous Optional ordering for all continuous variables. + * @param discrete Optional ordering for all discrete variables. + * @return boost::shared_ptr + */ boost::shared_ptr eliminateHybridMultifrontal( const boost::optional continuous = boost::none, const boost::optional discrete = boost::none, const Eliminate& function = EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex = boost::none) const; + /// Multifrontal elimination overload for hybrid boost::shared_ptr eliminateMultifrontal( OptionalOrderingType orderingType = boost::none, const Eliminate& function = EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex = boost::none) const; + /// Multifrontal elimination overload for hybrid boost::shared_ptr eliminateMultifrontal( const Ordering& ordering, const Eliminate& function = EliminationTraitsType::DefaultEliminate, From 239412956c3632f9da4985cae7583dd3a9c1f31e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Tue, 15 Nov 2022 00:50:03 -0500 Subject: [PATCH 26/51] address review comments --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 35 ++++-- gtsam/hybrid/HybridGaussianFactorGraph.h | 6 +- gtsam/hybrid/tests/testHybridBayesTree.cpp | 1 - gtsam/hybrid/tests/testHybridEstimation.cpp | 117 ++++++++++++------ gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 2 +- .../hybrid/tests/testHybridNonlinearISAM.cpp | 2 +- 6 files changed, 107 insertions(+), 56 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 4dc309e7b..621830338 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -557,9 +557,9 @@ HybridGaussianFactorGraph::eliminateHybridSequential( const boost::optional continuous, const boost::optional discrete, const Eliminate &function, OptionalVariableIndex variableIndex) const { - Ordering continuous_ordering = + const Ordering continuous_ordering = continuous ? *continuous : Ordering(this->continuousKeys()); - Ordering discrete_ordering = + const Ordering discrete_ordering = discrete ? *discrete : Ordering(this->discreteKeys()); // Eliminate continuous @@ -570,7 +570,8 @@ HybridGaussianFactorGraph::eliminateHybridSequential( function, variableIndex); // Get the last continuous conditional which will have all the discrete keys - auto last_conditional = bayesNet->at(bayesNet->size() - 1); + HybridConditional::shared_ptr last_conditional = + bayesNet->at(bayesNet->size() - 1); DiscreteKeys discrete_keys = last_conditional->discreteKeys(); // If not discrete variables, return the eliminated bayes net. @@ -578,9 +579,11 @@ HybridGaussianFactorGraph::eliminateHybridSequential( return bayesNet; } - AlgebraicDecisionTree probPrimeTree = + // DecisionTree for P'(X|M, Z) for all mode sequences M + const AlgebraicDecisionTree probPrimeTree = this->continuousProbPrimes(discrete_keys, bayesNet); + // Add the model selection factor P(M|Z) discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); // Perform discrete elimination @@ -622,9 +625,9 @@ HybridGaussianFactorGraph::eliminateHybridMultifrontal( const boost::optional continuous, const boost::optional discrete, const Eliminate &function, OptionalVariableIndex variableIndex) const { - Ordering continuous_ordering = + const Ordering continuous_ordering = continuous ? *continuous : Ordering(this->continuousKeys()); - Ordering discrete_ordering = + const Ordering discrete_ordering = discrete ? *discrete : Ordering(this->discreteKeys()); // Eliminate continuous @@ -635,9 +638,9 @@ HybridGaussianFactorGraph::eliminateHybridMultifrontal( function, variableIndex); // Get the last continuous conditional which will have all the discrete - Key last_continuous_key = - continuous_ordering.at(continuous_ordering.size() - 1); - auto last_conditional = (*bayesTree)[last_continuous_key]->conditional(); + const Key last_continuous_key = continuous_ordering.back(); + HybridConditional::shared_ptr last_conditional = + (*bayesTree)[last_continuous_key]->conditional(); DiscreteKeys discrete_keys = last_conditional->discreteKeys(); // If not discrete variables, return the eliminated bayes net. @@ -645,16 +648,24 @@ HybridGaussianFactorGraph::eliminateHybridMultifrontal( return bayesTree; } - AlgebraicDecisionTree probPrimeTree = + // DecisionTree for P'(X|M, Z) for all mode sequences M + const AlgebraicDecisionTree probPrimeTree = this->continuousProbPrimes(discrete_keys, bayesTree); + // Add the model selection factor P(M|Z) discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - auto updatedBayesTree = + // Eliminate discrete variables to get the discrete bayes tree. + // This bayes tree will be updated with the + // continuous variables as the child nodes. + HybridBayesTree::shared_ptr updatedBayesTree = discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete_ordering, function); - auto discrete_clique = (*updatedBayesTree)[discrete_ordering.at(0)]; + // Get the clique with all the discrete keys. + // There should only be 1 clique. + const HybridBayesTree::sharedClique discrete_clique = + (*updatedBayesTree)[discrete_ordering.at(0)]; std::set clique_set; for (auto node : bayesTree->nodes()) { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index d4da66400..47b94070f 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -217,8 +217,10 @@ class GTSAM_EXPORT HybridGaussianFactorGraph const DiscreteValues& discreteValues) const; /** - * @brief Compute the VectorValues solution for the continuous variables for - * each mode. + * @brief Helper method to compute the VectorValues solution for the + * continuous variables for each discrete mode. + * Used as a helper to compute q(\mu | M, Z) which is used by + * both P(X | M, Z) and P(M | Z). * * @tparam BAYES Template on the type of Bayes graph, either a bayes net or a * bayes tree. diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index ab0f3c2d9..9bc12c31d 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -141,7 +141,6 @@ TEST(HybridBayesTree, Optimize) { DiscreteKeys discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; vector probs = {0.012519475, 0.041280228, 0.075018647, 0.081663656, 0.037152205, 0.12248971, 0.07349729, 0.08}; - AlgebraicDecisionTree potentials(discrete_keys, probs); dfg.emplace_shared(discrete_keys, probs); DiscreteValues expectedMPE = dfg.optimize(); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index ec5d6fb7d..431e5769f 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -79,6 +79,8 @@ TEST(HybridEstimation, Incremental) { // Ground truth discrete seq std::vector discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; + // Switching example of robot moving in 1D with given measurements and equal + // mode priors. Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); HybridSmoother smoother; HybridNonlinearFactorGraph graph; @@ -136,7 +138,7 @@ TEST(HybridEstimation, Incremental) { * @param between_sigma Noise model sigma for the between factor. * @return GaussianFactorGraph::shared_ptr */ -GaussianFactorGraph::shared_ptr specificProblem( +GaussianFactorGraph::shared_ptr specificModesFactorGraph( size_t K, const std::vector& measurements, const std::vector& discrete_seq, double measurement_sigma = 0.1, double between_sigma = 1.0) { @@ -184,7 +186,7 @@ std::vector getDiscreteSequence(size_t x) { } /** - * @brief Helper method to get the probPrimeTree + * @brief Helper method to get the tree of unnormalized probabilities * as per the new elimination scheme. * * @param graph The HybridGaussianFactorGraph to eliminate. @@ -242,18 +244,15 @@ AlgebraicDecisionTree probPrimeTree( TEST(HybridEstimation, Probability) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; - - // This is the correct sequence - // std::vector discrete_seq = {1, 1, 0}; - double between_sigma = 1.0, measurement_sigma = 0.1; std::vector expected_errors, expected_prob_primes; + std::map> discrete_seq_map; for (size_t i = 0; i < pow(2, K - 1); i++) { - std::vector discrete_seq = getDiscreteSequence(i); + discrete_seq_map[i] = getDiscreteSequence(i); - GaussianFactorGraph::shared_ptr linear_graph = specificProblem( - K, measurements, discrete_seq, measurement_sigma, between_sigma); + GaussianFactorGraph::shared_ptr linear_graph = specificModesFactorGraph( + K, measurements, discrete_seq_map[i], measurement_sigma, between_sigma); auto bayes_net = linear_graph->eliminateSequential(); @@ -263,7 +262,10 @@ TEST(HybridEstimation, Probability) { expected_prob_primes.push_back(linear_graph->probPrime(values)); } - Switching switching(K, between_sigma, measurement_sigma, measurements); + // Switching example of robot moving in 1D with given measurements and equal + // mode priors. + Switching switching(K, between_sigma, measurement_sigma, measurements, + "1/1 1/1"); auto graph = switching.linearizedFactorGraph; Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); @@ -298,26 +300,30 @@ TEST(HybridEstimation, Probability) { // Test if the probPrimeTree matches the probability of // the individual factor graphs for (size_t i = 0; i < pow(2, K - 1); i++) { - std::vector discrete_seq = getDiscreteSequence(i); Assignment discrete_assignment; - for (size_t v = 0; v < discrete_seq.size(); v++) { - discrete_assignment[M(v)] = discrete_seq[v]; + for (size_t v = 0; v < discrete_seq_map[i].size(); v++) { + discrete_assignment[M(v)] = discrete_seq_map[i][v]; } EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i), probPrimeTree(discrete_assignment), 1e-8); } - // remainingGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - // Ordering discrete(graph.discreteKeys()); - // // remainingGraph->print("remainingGraph"); - // // discrete.print(); - // auto discreteBayesNet = remainingGraph->eliminateSequential(discrete); - // bayesNet->add(*discreteBayesNet); - // // bayesNet->print(); + Ordering discrete(graph.discreteKeys()); + auto discreteBayesNet = + discreteGraph->BaseEliminateable::eliminateSequential(discrete); + bayesNet->add(*discreteBayesNet); - // HybridValues hybrid_values = bayesNet->optimize(); - // hybrid_values.discrete().print(); + HybridValues hybrid_values = bayesNet->optimize(); + + // This is the correct sequence as designed + DiscreteValues discrete_seq; + discrete_seq[M(0)] = 1; + discrete_seq[M(1)] = 1; + discrete_seq[M(2)] = 0; + + EXPECT(assert_equal(discrete_seq, hybrid_values.discrete())); } /****************************************************************************/ @@ -330,31 +336,34 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { constexpr size_t K = 4; std::vector measurements = {0, 1, 2, 2}; - // This is the correct sequence - // std::vector discrete_seq = {1, 1, 0}; - double between_sigma = 1.0, measurement_sigma = 0.1; + // For each discrete mode sequence, create the individual factor graphs and + // optimize each. std::vector expected_errors, expected_prob_primes; + std::map> discrete_seq_map; for (size_t i = 0; i < pow(2, K - 1); i++) { - std::vector discrete_seq = getDiscreteSequence(i); + discrete_seq_map[i] = getDiscreteSequence(i); - GaussianFactorGraph::shared_ptr linear_graph = specificProblem( - K, measurements, discrete_seq, measurement_sigma, between_sigma); + GaussianFactorGraph::shared_ptr linear_graph = specificModesFactorGraph( + K, measurements, discrete_seq_map[i], measurement_sigma, between_sigma); auto bayes_tree = linear_graph->eliminateMultifrontal(); VectorValues values = bayes_tree->optimize(); - std::cout << i << " " << linear_graph->error(values) << std::endl; expected_errors.push_back(linear_graph->error(values)); expected_prob_primes.push_back(linear_graph->probPrime(values)); } - Switching switching(K, between_sigma, measurement_sigma, measurements); + // Switching example of robot moving in 1D with given measurements and equal + // mode priors. + Switching switching(K, between_sigma, measurement_sigma, measurements, + "1/1 1/1"); auto graph = switching.linearizedFactorGraph; Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); + // Get the tree of unnormalized probabilities for each mode sequence. AlgebraicDecisionTree expected_probPrimeTree = probPrimeTree(graph); // Eliminate continuous @@ -379,10 +388,9 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { // Test if the probPrimeTree matches the probability of // the individual factor graphs for (size_t i = 0; i < pow(2, K - 1); i++) { - std::vector discrete_seq = getDiscreteSequence(i); Assignment discrete_assignment; - for (size_t v = 0; v < discrete_seq.size(); v++) { - discrete_assignment[M(v)] = discrete_seq[v]; + for (size_t v = 0; v < discrete_seq_map[i].size(); v++) { + discrete_assignment[M(v)] = discrete_seq_map[i][v]; } EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i), probPrimeTree(discrete_assignment), 1e-8); @@ -390,13 +398,44 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - // Ordering discrete(graph.discreteKeys()); - // auto discreteBayesTree = discreteGraph->eliminateMultifrontal(discrete); - // // DiscreteBayesTree should have only 1 clique - // bayesTree->addClique((*discreteBayesTree)[discrete.at(0)]); + Ordering discrete(graph.discreteKeys()); + auto discreteBayesTree = + discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete); - // // HybridValues hybrid_values = bayesNet->optimize(); - // // hybrid_values.discrete().print(); + EXPECT_LONGS_EQUAL(1, discreteBayesTree->size()); + // DiscreteBayesTree should have only 1 clique + auto discrete_clique = (*discreteBayesTree)[discrete.at(0)]; + + std::set clique_set; + for (auto node : bayesTree->nodes()) { + clique_set.insert(node.second); + } + + // Set the root of the bayes tree as the discrete clique + for (auto clique : clique_set) { + if (clique->conditional()->parents() == + discrete_clique->conditional()->frontals()) { + discreteBayesTree->addClique(clique, discrete_clique); + + } else { + // Remove the clique from the children of the parents since it will get + // added again in addClique. + auto clique_it = std::find(clique->parent()->children.begin(), + clique->parent()->children.end(), clique); + clique->parent()->children.erase(clique_it); + discreteBayesTree->addClique(clique, clique->parent()); + } + } + + HybridValues hybrid_values = discreteBayesTree->optimize(); + + // This is the correct sequence as designed + DiscreteValues discrete_seq; + discrete_seq[M(0)] = 1; + discrete_seq[M(1)] = 1; + discrete_seq[M(2)] = 0; + + EXPECT(assert_equal(discrete_seq, hybrid_values.discrete())); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 09403853f..4ba6f88a5 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -176,7 +176,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional(); - // Test if the probability values are as expected with regression tests. + // Test the probability values with regression tests. DiscreteValues assignment; EXPECT(assert_equal(0.166667, m00_prob, 1e-5)); assignment[M(0)] = 0; diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index e7e65b123..46d5c4324 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -195,7 +195,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) { auto discreteConditional = bayesTree[M(1)]->conditional()->asDiscreteConditional(); - // Test if the probability values are as expected with regression tests. + // Test the probability values with regression tests. DiscreteValues assignment; EXPECT(assert_equal(0.166667, m00_prob, 1e-5)); assignment[M(0)] = 0; From 05b2d3169f0a9e72e750c679b4dd92c75b7e0cda Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 3 Dec 2022 06:43:46 +0530 Subject: [PATCH 27/51] better printing --- gtsam/hybrid/GaussianMixture.cpp | 2 +- gtsam/hybrid/HybridFactor.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/GaussianMixture.cpp b/gtsam/hybrid/GaussianMixture.cpp index 4819eda65..0b6fcdff7 100644 --- a/gtsam/hybrid/GaussianMixture.cpp +++ b/gtsam/hybrid/GaussianMixture.cpp @@ -105,7 +105,7 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const { /* *******************************************************************************/ void GaussianMixture::print(const std::string &s, const KeyFormatter &formatter) const { - std::cout << s; + std::cout << (s.empty() ? "" : s + "\n"); if (isContinuous()) std::cout << "Continuous "; if (isDiscrete()) std::cout << "Discrete "; if (isHybrid()) std::cout << "Hybrid "; diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index 1216fd922..b25e97f05 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -81,7 +81,7 @@ bool HybridFactor::equals(const HybridFactor &lf, double tol) const { /* ************************************************************************ */ void HybridFactor::print(const std::string &s, const KeyFormatter &formatter) const { - std::cout << s; + std::cout << (s.empty() ? "" : s + "\n"); if (isContinuous_) std::cout << "Continuous "; if (isDiscrete_) std::cout << "Discrete "; if (isHybrid_) std::cout << "Hybrid "; From 3eaf4cc910fa0cdf34023c4ebd0bf4b37354499b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 3 Dec 2022 17:00:51 +0530 Subject: [PATCH 28/51] move multifrontal optimize test to testHybridBayesTree and update docstrings --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 3 +-- gtsam/hybrid/tests/testHybridBayesNet.cpp | 19 ------------------- gtsam/hybrid/tests/testHybridBayesTree.cpp | 19 +++++++++++++++++++ .../tests/testHybridNonlinearFactorGraph.cpp | 4 ++-- 4 files changed, 22 insertions(+), 23 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 621830338..1d52a24af 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -257,7 +257,6 @@ hybridElimination(const HybridGaussianFactorGraph &factors, // If there are no more continuous parents, then we should create here a // DiscreteFactor, with the error for each discrete choice. if (keysOfSeparator.empty()) { - // TODO(Varun) Use the math from the iMHS_Math-1-indexed document VectorValues empty_values; auto factorProb = [&](const GaussianFactor::shared_ptr &factor) { if (!factor) { @@ -574,7 +573,7 @@ HybridGaussianFactorGraph::eliminateHybridSequential( bayesNet->at(bayesNet->size() - 1); DiscreteKeys discrete_keys = last_conditional->discreteKeys(); - // If not discrete variables, return the eliminated bayes net. + // If no discrete variables, return the eliminated bayes net. if (discrete_keys.size() == 0) { return bayesNet; } diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 8b8ca976b..d2951ddaa 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -164,25 +164,6 @@ TEST(HybridBayesNet, Optimize) { EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); } -/* ****************************************************************************/ -// Test bayes net multifrontal optimize -TEST(HybridBayesNet, OptimizeMultifrontal) { - Switching s(4); - - Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); - HybridBayesTree::shared_ptr hybridBayesTree = - s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering); - HybridValues delta = hybridBayesTree->optimize(); - - VectorValues expectedValues; - expectedValues.insert(X(0), -0.999904 * Vector1::Ones()); - expectedValues.insert(X(1), -0.99029 * Vector1::Ones()); - expectedValues.insert(X(2), -1.00971 * Vector1::Ones()); - expectedValues.insert(X(3), -1.0001 * Vector1::Ones()); - - EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); -} - /* ****************************************************************************/ // Test bayes net error TEST(HybridBayesNet, Error) { diff --git a/gtsam/hybrid/tests/testHybridBayesTree.cpp b/gtsam/hybrid/tests/testHybridBayesTree.cpp index 9bc12c31d..3992aa023 100644 --- a/gtsam/hybrid/tests/testHybridBayesTree.cpp +++ b/gtsam/hybrid/tests/testHybridBayesTree.cpp @@ -32,6 +32,25 @@ using noiseModel::Isotropic; using symbol_shorthand::M; using symbol_shorthand::X; +/* ****************************************************************************/ +// Test multifrontal optimize +TEST(HybridBayesTree, OptimizeMultifrontal) { + Switching s(4); + + Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering(); + HybridBayesTree::shared_ptr hybridBayesTree = + s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering); + HybridValues delta = hybridBayesTree->optimize(); + + VectorValues expectedValues; + expectedValues.insert(X(0), -0.999904 * Vector1::Ones()); + expectedValues.insert(X(1), -0.99029 * Vector1::Ones()); + expectedValues.insert(X(2), -1.00971 * Vector1::Ones()); + expectedValues.insert(X(3), -1.0001 * Vector1::Ones()); + + EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5)); +} + /* ****************************************************************************/ // Test for optimizing a HybridBayesTree with a given assignment. TEST(HybridBayesTree, OptimizeAssignment) { diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index f8c61baf7..e3b7f761a 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -386,11 +386,11 @@ TEST(HybridFactorGraph, Partial_Elimination) { auto linearizedFactorGraph = self.linearizedFactorGraph; - // Create ordering. + // Create ordering of only continuous variables. Ordering ordering; for (size_t k = 0; k < self.K; k++) ordering += X(k); - // Eliminate partially. + // Eliminate partially i.e. only continuous part. HybridBayesNet::shared_ptr hybridBayesNet; HybridGaussianFactorGraph::shared_ptr remainingFactorGraph; std::tie(hybridBayesNet, remainingFactorGraph) = From cd3cfa0faa5ebcacc05d7ccbdfc21bcdf505d0f9 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 3 Dec 2022 17:14:11 +0530 Subject: [PATCH 29/51] moved sequential elimination code to HybridEliminationTree --- gtsam/hybrid/HybridEliminationTree.cpp | 12 ++- gtsam/hybrid/HybridEliminationTree.h | 107 +++++++++++++++++++++ gtsam/hybrid/HybridFactor.cpp | 2 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 68 ------------- gtsam/hybrid/HybridGaussianFactorGraph.h | 26 ----- gtsam/hybrid/HybridSmoother.cpp | 2 +- 6 files changed, 119 insertions(+), 98 deletions(-) diff --git a/gtsam/hybrid/HybridEliminationTree.cpp b/gtsam/hybrid/HybridEliminationTree.cpp index c2df2dd60..fe9190571 100644 --- a/gtsam/hybrid/HybridEliminationTree.cpp +++ b/gtsam/hybrid/HybridEliminationTree.cpp @@ -27,12 +27,20 @@ template class EliminationTree; HybridEliminationTree::HybridEliminationTree( const HybridGaussianFactorGraph& factorGraph, const VariableIndex& structure, const Ordering& order) - : Base(factorGraph, structure, order) {} + : Base(factorGraph, structure, order), + graph_(factorGraph), + variable_index_(structure) { + // Segregate the continuous and the discrete keys + std::tie(continuous_ordering_, discrete_ordering_) = + graph_.separateContinuousDiscreteOrdering(order); +} /* ************************************************************************* */ HybridEliminationTree::HybridEliminationTree( const HybridGaussianFactorGraph& factorGraph, const Ordering& order) - : Base(factorGraph, order) {} + : Base(factorGraph, order), + graph_(factorGraph), + variable_index_(VariableIndex(factorGraph)) {} /* ************************************************************************* */ bool HybridEliminationTree::equals(const This& other, double tol) const { diff --git a/gtsam/hybrid/HybridEliminationTree.h b/gtsam/hybrid/HybridEliminationTree.h index b2dd1bd9c..dfa88bf4e 100644 --- a/gtsam/hybrid/HybridEliminationTree.h +++ b/gtsam/hybrid/HybridEliminationTree.h @@ -33,6 +33,12 @@ class GTSAM_EXPORT HybridEliminationTree private: friend class ::EliminationTreeTester; + Ordering continuous_ordering_, discrete_ordering_; + /// Used to store the original factor graph to eliminate + HybridGaussianFactorGraph graph_; + /// Store the provided variable index. + VariableIndex variable_index_; + public: typedef EliminationTree Base; ///< Base class @@ -66,6 +72,107 @@ class GTSAM_EXPORT HybridEliminationTree /** Test whether the tree is equal to another */ bool equals(const This& other, double tol = 1e-9) const; + + /** + * @brief Helper method to eliminate continuous variables. + * + * If no continuous variables exist, return an empty bayes net + * and the original graph. + * + * @param function Elimination function for hybrid elimination. + * @return std::pair, + * boost::shared_ptr > + */ + std::pair, boost::shared_ptr> + eliminateContinuous(Eliminate function) const { + if (continuous_ordering_.size() > 0) { + This continuous_etree(graph_, variable_index_, continuous_ordering_); + return continuous_etree.Base::eliminate(function); + + } else { + BayesNetType::shared_ptr bayesNet = boost::make_shared(); + FactorGraphType::shared_ptr discreteGraph = + boost::make_shared(graph_); + return std::make_pair(bayesNet, discreteGraph); + } + } + + /** + * @brief Helper method to eliminate the discrete variables after the + * continuous variables have been eliminated. + * + * If there are no discrete variables, return an empty bayes net and the + * discreteGraph which is passed in. + * + * @param function Elimination function + * @param discreteGraph The factor graph with the factor ϕ(X | M, Z). + * @return std::pair, + * boost::shared_ptr > + */ + std::pair, boost::shared_ptr> + eliminateDiscrete(Eliminate function, + const FactorGraphType::shared_ptr& discreteGraph) const { + BayesNetType::shared_ptr discreteBayesNet; + FactorGraphType::shared_ptr finalGraph; + if (discrete_ordering_.size() > 0) { + This discrete_etree(*discreteGraph, VariableIndex(*discreteGraph), + discrete_ordering_); + + std::tie(discreteBayesNet, finalGraph) = + discrete_etree.Base::eliminate(function); + + } else { + discreteBayesNet = boost::make_shared(); + finalGraph = discreteGraph; + } + + return std::make_pair(discreteBayesNet, finalGraph); + } + + /** + * @brief Override the EliminationTree eliminate method + * so we can perform hybrid elimination correctly. + * + * @param function + * @return std::pair, + * boost::shared_ptr > + */ + std::pair, boost::shared_ptr> + eliminate(Eliminate function) const { + // Perform continuous elimination + BayesNetType::shared_ptr bayesNet; + FactorGraphType::shared_ptr discreteGraph; + std::tie(bayesNet, discreteGraph) = this->eliminateContinuous(function); + + // If we have eliminated continuous variables + // and have discrete variables to eliminate, + // then compute P(X | M, Z) + if (continuous_ordering_.size() > 0 && discrete_ordering_.size() > 0) { + // Get the last continuous conditional + // which will have all the discrete keys + HybridConditional::shared_ptr last_conditional = + bayesNet->at(bayesNet->size() - 1); + DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + + // DecisionTree for P'(X|M, Z) for all mode sequences M + const AlgebraicDecisionTree probPrimeTree = + graph_.continuousProbPrimes(discrete_keys, bayesNet); + + // Add the model selection factor P(M|Z) + discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + } + + // Perform discrete elimination + BayesNetType::shared_ptr discreteBayesNet; + FactorGraphType::shared_ptr finalGraph; + std::tie(discreteBayesNet, finalGraph) = + eliminateDiscrete(function, discreteGraph); + + // Add the discrete conditionals to the hybrid conditionals + bayesNet->add(*discreteBayesNet); + + return std::make_pair(bayesNet, finalGraph); + } }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridFactor.cpp b/gtsam/hybrid/HybridFactor.cpp index b25e97f05..1216fd922 100644 --- a/gtsam/hybrid/HybridFactor.cpp +++ b/gtsam/hybrid/HybridFactor.cpp @@ -81,7 +81,7 @@ bool HybridFactor::equals(const HybridFactor &lf, double tol) const { /* ************************************************************************ */ void HybridFactor::print(const std::string &s, const KeyFormatter &formatter) const { - std::cout << (s.empty() ? "" : s + "\n"); + std::cout << s; if (isContinuous_) std::cout << "Continuous "; if (isDiscrete_) std::cout << "Discrete "; if (isHybrid_) std::cout << "Hybrid "; diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 1d52a24af..1afe4f38a 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -550,74 +550,6 @@ HybridGaussianFactorGraph::separateContinuousDiscreteOrdering( return std::make_pair(continuous_ordering, discrete_ordering); } -/* ************************************************************************ */ -boost::shared_ptr -HybridGaussianFactorGraph::eliminateHybridSequential( - const boost::optional continuous, - const boost::optional discrete, const Eliminate &function, - OptionalVariableIndex variableIndex) const { - const Ordering continuous_ordering = - continuous ? *continuous : Ordering(this->continuousKeys()); - const Ordering discrete_ordering = - discrete ? *discrete : Ordering(this->discreteKeys()); - - // Eliminate continuous - HybridBayesNet::shared_ptr bayesNet; - HybridGaussianFactorGraph::shared_ptr discreteGraph; - std::tie(bayesNet, discreteGraph) = - BaseEliminateable::eliminatePartialSequential(continuous_ordering, - function, variableIndex); - - // Get the last continuous conditional which will have all the discrete keys - HybridConditional::shared_ptr last_conditional = - bayesNet->at(bayesNet->size() - 1); - DiscreteKeys discrete_keys = last_conditional->discreteKeys(); - - // If no discrete variables, return the eliminated bayes net. - if (discrete_keys.size() == 0) { - return bayesNet; - } - - // DecisionTree for P'(X|M, Z) for all mode sequences M - const AlgebraicDecisionTree probPrimeTree = - this->continuousProbPrimes(discrete_keys, bayesNet); - - // Add the model selection factor P(M|Z) - discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - - // Perform discrete elimination - HybridBayesNet::shared_ptr discreteBayesNet = - discreteGraph->BaseEliminateable::eliminateSequential( - discrete_ordering, function, variableIndex); - - bayesNet->add(*discreteBayesNet); - - return bayesNet; -} - -/* ************************************************************************ */ -boost::shared_ptr -HybridGaussianFactorGraph::eliminateSequential( - OptionalOrderingType orderingType, const Eliminate &function, - OptionalVariableIndex variableIndex) const { - return BaseEliminateable::eliminateSequential(orderingType, function, - variableIndex); -} - -/* ************************************************************************ */ -boost::shared_ptr -HybridGaussianFactorGraph::eliminateSequential( - const Ordering &ordering, const Eliminate &function, - OptionalVariableIndex variableIndex) const { - // Segregate the continuous and the discrete keys - Ordering continuous_ordering, discrete_ordering; - std::tie(continuous_ordering, discrete_ordering) = - this->separateContinuousDiscreteOrdering(ordering); - - return this->eliminateHybridSequential(continuous_ordering, discrete_ordering, - function, variableIndex); -} - /* ************************************************************************ */ boost::shared_ptr HybridGaussianFactorGraph::eliminateHybridMultifrontal( diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index 47b94070f..a0d80b154 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -302,32 +302,6 @@ class GTSAM_EXPORT HybridGaussianFactorGraph std::pair separateContinuousDiscreteOrdering( const Ordering& ordering) const; - /** - * @brief Custom elimination function which computes the correct - * continuous probabilities. Returns a bayes net. - * - * @param continuous Optional ordering for all continuous variables. - * @param discrete Optional ordering for all discrete variables. - * @return boost::shared_ptr - */ - boost::shared_ptr eliminateHybridSequential( - const boost::optional continuous = boost::none, - const boost::optional discrete = boost::none, - const Eliminate& function = EliminationTraitsType::DefaultEliminate, - OptionalVariableIndex variableIndex = boost::none) const; - - /// Sequential elimination overload for hybrid - boost::shared_ptr eliminateSequential( - OptionalOrderingType orderingType = boost::none, - const Eliminate& function = EliminationTraitsType::DefaultEliminate, - OptionalVariableIndex variableIndex = boost::none) const; - - /// Sequential elimination overload for hybrid - boost::shared_ptr eliminateSequential( - const Ordering& ordering, - const Eliminate& function = EliminationTraitsType::DefaultEliminate, - OptionalVariableIndex variableIndex = boost::none) const; - /** * @brief Custom elimination function which computes the correct * continuous probabilities. Returns a bayes tree. diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 12f6949ab..7400053bf 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -32,7 +32,7 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph, addConditionals(graph, hybridBayesNet_, ordering); // Eliminate. - auto bayesNetFragment = graph.eliminateHybridSequential(); + auto bayesNetFragment = graph.eliminateSequential(); /// Prune if (maxNrLeaves) { From 15fffeb18adf952424789cf6835123bc97b2eb1b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 4 Dec 2022 14:30:01 +0530 Subject: [PATCH 30/51] add getters to HybridEliminationTree --- gtsam/hybrid/HybridEliminationTree.h | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/gtsam/hybrid/HybridEliminationTree.h b/gtsam/hybrid/HybridEliminationTree.h index dfa88bf4e..65d614ca3 100644 --- a/gtsam/hybrid/HybridEliminationTree.h +++ b/gtsam/hybrid/HybridEliminationTree.h @@ -173,6 +173,13 @@ class GTSAM_EXPORT HybridEliminationTree return std::make_pair(bayesNet, finalGraph); } + + Ordering continuousOrdering() const { return continuous_ordering_; } + Ordering discreteOrdering() const { return discrete_ordering_; } + + /// Store the provided variable index. + VariableIndex variableIndex() const { return variable_index_; } + HybridGaussianFactorGraph graph() const { return graph_; } }; } // namespace gtsam From addbe2a5a57bfab3851a51a7193d92f3e2110bfa Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 4 Dec 2022 14:55:17 +0530 Subject: [PATCH 31/51] override eliminate in HybridJunctionTree --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 93 ------------ gtsam/hybrid/HybridGaussianFactorGraph.h | 26 +--- gtsam/hybrid/HybridJunctionTree.cpp | 139 +++++++++++++++++- gtsam/hybrid/HybridJunctionTree.h | 67 ++++++++- .../tests/testHybridGaussianFactorGraph.cpp | 7 +- 5 files changed, 209 insertions(+), 123 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 1afe4f38a..c430fac2c 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -550,98 +550,5 @@ HybridGaussianFactorGraph::separateContinuousDiscreteOrdering( return std::make_pair(continuous_ordering, discrete_ordering); } -/* ************************************************************************ */ -boost::shared_ptr -HybridGaussianFactorGraph::eliminateHybridMultifrontal( - const boost::optional continuous, - const boost::optional discrete, const Eliminate &function, - OptionalVariableIndex variableIndex) const { - const Ordering continuous_ordering = - continuous ? *continuous : Ordering(this->continuousKeys()); - const Ordering discrete_ordering = - discrete ? *discrete : Ordering(this->discreteKeys()); - - // Eliminate continuous - HybridBayesTree::shared_ptr bayesTree; - HybridGaussianFactorGraph::shared_ptr discreteGraph; - std::tie(bayesTree, discreteGraph) = - BaseEliminateable::eliminatePartialMultifrontal(continuous_ordering, - function, variableIndex); - - // Get the last continuous conditional which will have all the discrete - const Key last_continuous_key = continuous_ordering.back(); - HybridConditional::shared_ptr last_conditional = - (*bayesTree)[last_continuous_key]->conditional(); - DiscreteKeys discrete_keys = last_conditional->discreteKeys(); - - // If not discrete variables, return the eliminated bayes net. - if (discrete_keys.size() == 0) { - return bayesTree; - } - - // DecisionTree for P'(X|M, Z) for all mode sequences M - const AlgebraicDecisionTree probPrimeTree = - this->continuousProbPrimes(discrete_keys, bayesTree); - - // Add the model selection factor P(M|Z) - discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - - // Eliminate discrete variables to get the discrete bayes tree. - // This bayes tree will be updated with the - // continuous variables as the child nodes. - HybridBayesTree::shared_ptr updatedBayesTree = - discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete_ordering, - function); - - // Get the clique with all the discrete keys. - // There should only be 1 clique. - const HybridBayesTree::sharedClique discrete_clique = - (*updatedBayesTree)[discrete_ordering.at(0)]; - - std::set clique_set; - for (auto node : bayesTree->nodes()) { - clique_set.insert(node.second); - } - - // Set the root of the bayes tree as the discrete clique - for (auto clique : clique_set) { - if (clique->conditional()->parents() == - discrete_clique->conditional()->frontals()) { - updatedBayesTree->addClique(clique, discrete_clique); - - } else { - // Remove the clique from the children of the parents since it will get - // added again in addClique. - auto clique_it = std::find(clique->parent()->children.begin(), - clique->parent()->children.end(), clique); - clique->parent()->children.erase(clique_it); - updatedBayesTree->addClique(clique, clique->parent()); - } - } - return updatedBayesTree; -} - -/* ************************************************************************ */ -boost::shared_ptr -HybridGaussianFactorGraph::eliminateMultifrontal( - OptionalOrderingType orderingType, const Eliminate &function, - OptionalVariableIndex variableIndex) const { - return BaseEliminateable::eliminateMultifrontal(orderingType, function, - variableIndex); -} - -/* ************************************************************************ */ -boost::shared_ptr -HybridGaussianFactorGraph::eliminateMultifrontal( - const Ordering &ordering, const Eliminate &function, - OptionalVariableIndex variableIndex) const { - // Segregate the continuous and the discrete keys - Ordering continuous_ordering, discrete_ordering; - std::tie(continuous_ordering, discrete_ordering) = - this->separateContinuousDiscreteOrdering(ordering); - - return this->eliminateHybridMultifrontal( - continuous_ordering, discrete_ordering, function, variableIndex); -} } // namespace gtsam diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.h b/gtsam/hybrid/HybridGaussianFactorGraph.h index a0d80b154..210ce50e9 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.h +++ b/gtsam/hybrid/HybridGaussianFactorGraph.h @@ -302,31 +302,7 @@ class GTSAM_EXPORT HybridGaussianFactorGraph std::pair separateContinuousDiscreteOrdering( const Ordering& ordering) const; - /** - * @brief Custom elimination function which computes the correct - * continuous probabilities. Returns a bayes tree. - * - * @param continuous Optional ordering for all continuous variables. - * @param discrete Optional ordering for all discrete variables. - * @return boost::shared_ptr - */ - boost::shared_ptr eliminateHybridMultifrontal( - const boost::optional continuous = boost::none, - const boost::optional discrete = boost::none, - const Eliminate& function = EliminationTraitsType::DefaultEliminate, - OptionalVariableIndex variableIndex = boost::none) const; - - /// Multifrontal elimination overload for hybrid - boost::shared_ptr eliminateMultifrontal( - OptionalOrderingType orderingType = boost::none, - const Eliminate& function = EliminationTraitsType::DefaultEliminate, - OptionalVariableIndex variableIndex = boost::none) const; - - /// Multifrontal elimination overload for hybrid - boost::shared_ptr eliminateMultifrontal( - const Ordering& ordering, - const Eliminate& function = EliminationTraitsType::DefaultEliminate, - OptionalVariableIndex variableIndex = boost::none) const; + /** * @brief Return a Colamd constrained ordering where the discrete keys are diff --git a/gtsam/hybrid/HybridJunctionTree.cpp b/gtsam/hybrid/HybridJunctionTree.cpp index 422c200a4..f233d4bef 100644 --- a/gtsam/hybrid/HybridJunctionTree.cpp +++ b/gtsam/hybrid/HybridJunctionTree.cpp @@ -142,7 +142,8 @@ struct HybridConstructorTraversalData { /* ************************************************************************* */ HybridJunctionTree::HybridJunctionTree( - const HybridEliminationTree& eliminationTree) { + const HybridEliminationTree& eliminationTree) + : etree_(eliminationTree) { gttic(JunctionTree_FromEliminationTree); // Here we rely on the BayesNet having been produced by this elimination tree, // such that the conditionals are arranged in DFS post-order. We traverse the @@ -169,4 +170,140 @@ HybridJunctionTree::HybridJunctionTree( Base::remainingFactors_ = eliminationTree.remainingFactors(); } +/* ************************************************************************* */ +std::pair, + boost::shared_ptr> +HybridJunctionTree::eliminateContinuous( + const Eliminate& function, const HybridGaussianFactorGraph& graph, + const Ordering& continuous_ordering) const { + HybridBayesTree::shared_ptr continuousBayesTree; + HybridGaussianFactorGraph::shared_ptr discreteGraph; + + if (continuous_ordering.size() > 0) { + HybridEliminationTree continuous_etree(graph, etree_.variableIndex(), + continuous_ordering); + + This continuous_junction_tree(continuous_etree); + std::tie(continuousBayesTree, discreteGraph) = + continuous_junction_tree.Base::eliminate(function); + + } else { + continuousBayesTree = boost::make_shared(); + discreteGraph = boost::make_shared(graph); + } + + return std::make_pair(continuousBayesTree, discreteGraph); +} +/* ************************************************************************* */ +std::pair, + boost::shared_ptr> +HybridJunctionTree::eliminateDiscrete( + const Eliminate& function, + const HybridBayesTree::shared_ptr& continuousBayesTree, + const HybridGaussianFactorGraph::shared_ptr& discreteGraph, + const Ordering& discrete_ordering) const { + HybridBayesTree::shared_ptr updatedBayesTree; + HybridGaussianFactorGraph::shared_ptr finalGraph; + if (discrete_ordering.size() > 0) { + HybridEliminationTree discrete_etree( + *discreteGraph, VariableIndex(*discreteGraph), discrete_ordering); + + This discrete_junction_tree(discrete_etree); + + std::tie(updatedBayesTree, finalGraph) = + discrete_junction_tree.Base::eliminate(function); + + // Get the clique with all the discrete keys. + // There should only be 1 clique. + const HybridBayesTree::sharedClique discrete_clique = + (*updatedBayesTree)[discrete_ordering.at(0)]; + + std::set clique_set; + for (auto node : continuousBayesTree->nodes()) { + clique_set.insert(node.second); + } + + // Set the root of the bayes tree as the discrete clique + for (auto clique : clique_set) { + if (clique->conditional()->parents() == + discrete_clique->conditional()->frontals()) { + updatedBayesTree->addClique(clique, discrete_clique); + + } else { + if (clique->parent()) { + // Remove the clique from the children of the parents since it will + // get added again in addClique. + auto clique_it = std::find(clique->parent()->children.begin(), + clique->parent()->children.end(), clique); + clique->parent()->children.erase(clique_it); + updatedBayesTree->addClique(clique, clique->parent()); + } else { + updatedBayesTree->addClique(clique); + } + } + } + } else { + updatedBayesTree = continuousBayesTree; + finalGraph = discreteGraph; + } + + return std::make_pair(updatedBayesTree, finalGraph); +} + +/* ************************************************************************* */ +boost::shared_ptr HybridJunctionTree::addProbPrimes( + const HybridGaussianFactorGraph& graph, + const HybridBayesTree::shared_ptr& continuousBayesTree, + const HybridGaussianFactorGraph::shared_ptr& discreteGraph, + const Ordering& continuous_ordering, + const Ordering& discrete_ordering) const { + // If we have eliminated continuous variables + // and have discrete variables to eliminate, + // then compute P(X | M, Z) + if (continuous_ordering.size() > 0 && discrete_ordering.size() > 0) { + // Collect all the discrete keys + DiscreteKeys discrete_keys; + for (auto node : continuousBayesTree->nodes()) { + auto node_dkeys = node.second->conditional()->discreteKeys(); + discrete_keys.insert(discrete_keys.end(), node_dkeys.begin(), + node_dkeys.end()); + } + // Remove duplicates and convert back to DiscreteKeys + std::set dkeys_set(discrete_keys.begin(), discrete_keys.end()); + discrete_keys = DiscreteKeys(dkeys_set.begin(), dkeys_set.end()); + + // DecisionTree for P'(X|M, Z) for all mode sequences M + const AlgebraicDecisionTree probPrimeTree = + graph.continuousProbPrimes(discrete_keys, continuousBayesTree); + + // Add the model selection factor P(M|Z) + discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + } + + return discreteGraph; +} + +/* ************************************************************************* */ +std::pair +HybridJunctionTree::eliminate(const Eliminate& function) const { + Ordering continuous_ordering = etree_.continuousOrdering(); + Ordering discrete_ordering = etree_.discreteOrdering(); + + FactorGraphType graph = etree_.graph(); + + // Eliminate continuous + BayesTreeType::shared_ptr continuousBayesTree; + FactorGraphType::shared_ptr discreteGraph; + std::tie(continuousBayesTree, discreteGraph) = + this->eliminateContinuous(function, graph, continuous_ordering); + + FactorGraphType::shared_ptr updatedDiscreteGraph = + this->addProbPrimes(graph, continuousBayesTree, discreteGraph, + continuous_ordering, discrete_ordering); + + // Eliminate discrete variables to get the discrete bayes tree. + return this->eliminateDiscrete(function, continuousBayesTree, + updatedDiscreteGraph, discrete_ordering); +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridJunctionTree.h b/gtsam/hybrid/HybridJunctionTree.h index 4b0c369a8..2dc13d5e3 100644 --- a/gtsam/hybrid/HybridJunctionTree.h +++ b/gtsam/hybrid/HybridJunctionTree.h @@ -51,10 +51,15 @@ class HybridEliminationTree; */ class GTSAM_EXPORT HybridJunctionTree : public JunctionTree { + /// Record the elimination tree for use in hybrid elimination. + HybridEliminationTree etree_; + /// Store the provided variable index. + VariableIndex variable_index_; + public: typedef JunctionTree Base; ///< Base class - typedef HybridJunctionTree This; ///< This class + typedef HybridJunctionTree This; ///< This class typedef boost::shared_ptr shared_ptr; ///< Shared pointer to this class /** @@ -67,6 +72,66 @@ class GTSAM_EXPORT HybridJunctionTree * @return The elimination tree */ HybridJunctionTree(const HybridEliminationTree& eliminationTree); + + /** + * @brief + * + * @param function + * @param graph + * @param continuous_ordering + * @return std::pair, + * boost::shared_ptr> + */ + std::pair, + boost::shared_ptr> + eliminateContinuous(const Eliminate& function, + const HybridGaussianFactorGraph& graph, + const Ordering& continuous_ordering) const; + + /** + * @brief + * + * @param function + * @param continuousBayesTree + * @param discreteGraph + * @param discrete_ordering + * @return std::pair, + * boost::shared_ptr> + */ + std::pair, + boost::shared_ptr> + eliminateDiscrete(const Eliminate& function, + const HybridBayesTree::shared_ptr& continuousBayesTree, + const HybridGaussianFactorGraph::shared_ptr& discreteGraph, + const Ordering& discrete_ordering) const; + + /** + * @brief + * + * @param graph + * @param continuousBayesTree + * @param discreteGraph + * @param continuous_ordering + * @param discrete_ordering + * @return boost::shared_ptr + */ + boost::shared_ptr addProbPrimes( + const HybridGaussianFactorGraph& graph, + const HybridBayesTree::shared_ptr& continuousBayesTree, + const HybridGaussianFactorGraph::shared_ptr& discreteGraph, + const Ordering& continuous_ordering, + const Ordering& discrete_ordering) const; + + /** + * @brief + * + * @param function + * @return std::pair, + * boost::shared_ptr> + */ + std::pair, + boost::shared_ptr> + eliminate(const Eliminate& function) const; }; } // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 248d71d5f..6288bcd93 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -182,8 +182,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { boost::make_shared(X(1), I_3x3, Vector3::Ones())})); hfg.add(DecisionTreeFactor(m1, {2, 8})); - //TODO(Varun) Adding extra discrete variable not connected to continuous variable throws segfault - // hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); + // TODO(Varun) Adding extra discrete variable not connected to continuous + // variable throws segfault + // hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); HybridBayesTree::shared_ptr result = hfg.eliminateMultifrontal(hfg.getHybridOrdering()); @@ -276,7 +277,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full); // 9 cliques in the bayes tree and 0 remaining variables to eliminate. - EXPECT_LONGS_EQUAL(9, hbt->size()); + EXPECT_LONGS_EQUAL(7, hbt->size()); EXPECT_LONGS_EQUAL(0, remaining->size()); /* From ae0b3e3902d929bd47bec4579634c2825986d5ce Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 4 Dec 2022 18:21:22 +0530 Subject: [PATCH 32/51] split up the eliminate method to constituent parts --- gtsam/hybrid/HybridEliminationTree.cpp | 88 ++++++++++++++++++++++++ gtsam/hybrid/HybridEliminationTree.h | 95 ++++++-------------------- 2 files changed, 107 insertions(+), 76 deletions(-) diff --git a/gtsam/hybrid/HybridEliminationTree.cpp b/gtsam/hybrid/HybridEliminationTree.cpp index fe9190571..e54105919 100644 --- a/gtsam/hybrid/HybridEliminationTree.cpp +++ b/gtsam/hybrid/HybridEliminationTree.cpp @@ -47,4 +47,92 @@ bool HybridEliminationTree::equals(const This& other, double tol) const { return Base::equals(other, tol); } +/* ************************************************************************* */ +std::pair, + boost::shared_ptr> +HybridEliminationTree::eliminateContinuous(Eliminate function) const { + if (continuous_ordering_.size() > 0) { + This continuous_etree(graph_, variable_index_, continuous_ordering_); + return continuous_etree.Base::eliminate(function); + + } else { + HybridBayesNet::shared_ptr bayesNet = boost::make_shared(); + HybridGaussianFactorGraph::shared_ptr discreteGraph = + boost::make_shared(graph_); + return std::make_pair(bayesNet, discreteGraph); + } +} + +/* ************************************************************************* */ +boost::shared_ptr +HybridEliminationTree::addProbPrimes( + const HybridBayesNet::shared_ptr& continuousBayesNet, + const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const { + if (continuous_ordering_.size() > 0 && discrete_ordering_.size() > 0) { + // Get the last continuous conditional + // which will have all the discrete keys + HybridConditional::shared_ptr last_conditional = + continuousBayesNet->at(continuousBayesNet->size() - 1); + DiscreteKeys discrete_keys = last_conditional->discreteKeys(); + + // DecisionTree for P'(X|M, Z) for all mode sequences M + const AlgebraicDecisionTree probPrimeTree = + graph_.continuousProbPrimes(discrete_keys, continuousBayesNet); + + // Add the model selection factor P(M|Z) + discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); + } + return discreteGraph; +} + +/* ************************************************************************* */ +std::pair, + boost::shared_ptr> +HybridEliminationTree::eliminateDiscrete( + Eliminate function, + const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const { + HybridBayesNet::shared_ptr discreteBayesNet; + HybridGaussianFactorGraph::shared_ptr finalGraph; + if (discrete_ordering_.size() > 0) { + This discrete_etree(*discreteGraph, VariableIndex(*discreteGraph), + discrete_ordering_); + + std::tie(discreteBayesNet, finalGraph) = + discrete_etree.Base::eliminate(function); + + } else { + discreteBayesNet = boost::make_shared(); + finalGraph = discreteGraph; + } + + return std::make_pair(discreteBayesNet, finalGraph); +} + +/* ************************************************************************* */ +std::pair, + boost::shared_ptr> +HybridEliminationTree::eliminate(Eliminate function) const { + // Perform continuous elimination + HybridBayesNet::shared_ptr bayesNet; + HybridGaussianFactorGraph::shared_ptr discreteGraph; + std::tie(bayesNet, discreteGraph) = this->eliminateContinuous(function); + + // If we have eliminated continuous variables + // and have discrete variables to eliminate, + // then compute P(X | M, Z) + HybridGaussianFactorGraph::shared_ptr updatedDiscreteGraph = + addProbPrimes(bayesNet, discreteGraph); + + // Perform discrete elimination + HybridBayesNet::shared_ptr discreteBayesNet; + HybridGaussianFactorGraph::shared_ptr finalGraph; + std::tie(discreteBayesNet, finalGraph) = + eliminateDiscrete(function, updatedDiscreteGraph); + + // Add the discrete conditionals to the hybrid conditionals + bayesNet->add(*discreteBayesNet); + + return std::make_pair(bayesNet, finalGraph); +} + } // namespace gtsam diff --git a/gtsam/hybrid/HybridEliminationTree.h b/gtsam/hybrid/HybridEliminationTree.h index 65d614ca3..9186e04a8 100644 --- a/gtsam/hybrid/HybridEliminationTree.h +++ b/gtsam/hybrid/HybridEliminationTree.h @@ -80,22 +80,12 @@ class GTSAM_EXPORT HybridEliminationTree * and the original graph. * * @param function Elimination function for hybrid elimination. - * @return std::pair, - * boost::shared_ptr > + * @return std::pair, + * boost::shared_ptr > */ - std::pair, boost::shared_ptr> - eliminateContinuous(Eliminate function) const { - if (continuous_ordering_.size() > 0) { - This continuous_etree(graph_, variable_index_, continuous_ordering_); - return continuous_etree.Base::eliminate(function); - - } else { - BayesNetType::shared_ptr bayesNet = boost::make_shared(); - FactorGraphType::shared_ptr discreteGraph = - boost::make_shared(graph_); - return std::make_pair(bayesNet, discreteGraph); - } - } + std::pair, + boost::shared_ptr> + eliminateContinuous(Eliminate function) const; /** * @brief Helper method to eliminate the discrete variables after the @@ -104,75 +94,28 @@ class GTSAM_EXPORT HybridEliminationTree * If there are no discrete variables, return an empty bayes net and the * discreteGraph which is passed in. * - * @param function Elimination function + * @param function Hybrid elimination function * @param discreteGraph The factor graph with the factor ϕ(X | M, Z). - * @return std::pair, - * boost::shared_ptr > + * @return std::pair, + * boost::shared_ptr > */ - std::pair, boost::shared_ptr> - eliminateDiscrete(Eliminate function, - const FactorGraphType::shared_ptr& discreteGraph) const { - BayesNetType::shared_ptr discreteBayesNet; - FactorGraphType::shared_ptr finalGraph; - if (discrete_ordering_.size() > 0) { - This discrete_etree(*discreteGraph, VariableIndex(*discreteGraph), - discrete_ordering_); - - std::tie(discreteBayesNet, finalGraph) = - discrete_etree.Base::eliminate(function); - - } else { - discreteBayesNet = boost::make_shared(); - finalGraph = discreteGraph; - } - - return std::make_pair(discreteBayesNet, finalGraph); - } + std::pair, + boost::shared_ptr> + eliminateDiscrete( + Eliminate function, + const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const; /** * @brief Override the EliminationTree eliminate method * so we can perform hybrid elimination correctly. * - * @param function - * @return std::pair, - * boost::shared_ptr > + * @param function Hybrid elimination function + * @return std::pair, + * boost::shared_ptr > */ - std::pair, boost::shared_ptr> - eliminate(Eliminate function) const { - // Perform continuous elimination - BayesNetType::shared_ptr bayesNet; - FactorGraphType::shared_ptr discreteGraph; - std::tie(bayesNet, discreteGraph) = this->eliminateContinuous(function); - - // If we have eliminated continuous variables - // and have discrete variables to eliminate, - // then compute P(X | M, Z) - if (continuous_ordering_.size() > 0 && discrete_ordering_.size() > 0) { - // Get the last continuous conditional - // which will have all the discrete keys - HybridConditional::shared_ptr last_conditional = - bayesNet->at(bayesNet->size() - 1); - DiscreteKeys discrete_keys = last_conditional->discreteKeys(); - - // DecisionTree for P'(X|M, Z) for all mode sequences M - const AlgebraicDecisionTree probPrimeTree = - graph_.continuousProbPrimes(discrete_keys, bayesNet); - - // Add the model selection factor P(M|Z) - discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - } - - // Perform discrete elimination - BayesNetType::shared_ptr discreteBayesNet; - FactorGraphType::shared_ptr finalGraph; - std::tie(discreteBayesNet, finalGraph) = - eliminateDiscrete(function, discreteGraph); - - // Add the discrete conditionals to the hybrid conditionals - bayesNet->add(*discreteBayesNet); - - return std::make_pair(bayesNet, finalGraph); - } + std::pair, + boost::shared_ptr> + eliminate(Eliminate function) const; Ordering continuousOrdering() const { return continuous_ordering_; } Ordering discreteOrdering() const { return discrete_ordering_; } From bed56e06932c8ab6b5875e6ae6b9026befdfe785 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 4 Dec 2022 18:21:57 +0530 Subject: [PATCH 33/51] mark helper methods as protected and add docstrings --- gtsam/hybrid/HybridEliminationTree.h | 18 ++++++++ gtsam/hybrid/HybridJunctionTree.cpp | 13 +++--- gtsam/hybrid/HybridJunctionTree.h | 63 +++++++++++++++------------- 3 files changed, 59 insertions(+), 35 deletions(-) diff --git a/gtsam/hybrid/HybridEliminationTree.h b/gtsam/hybrid/HybridEliminationTree.h index 9186e04a8..9b6854026 100644 --- a/gtsam/hybrid/HybridEliminationTree.h +++ b/gtsam/hybrid/HybridEliminationTree.h @@ -73,6 +73,7 @@ class GTSAM_EXPORT HybridEliminationTree /** Test whether the tree is equal to another */ bool equals(const This& other, double tol = 1e-9) const; + protected: /** * @brief Helper method to eliminate continuous variables. * @@ -87,6 +88,22 @@ class GTSAM_EXPORT HybridEliminationTree boost::shared_ptr> eliminateContinuous(Eliminate function) const; + /** + * @brief Compute the unnormalized probability P'(X | M, Z) + * for the factor graph in each leaf of the discrete tree. + * The discrete decision tree formed as a result is added to the + * `discreteGraph` for discrete elimination. + * + * @param continuousBayesNet The bayes nets corresponding to + * the eliminated continuous variables. + * @param discreteGraph Factor graph consisting of factors + * on discrete variables only. + * @return boost::shared_ptr + */ + boost::shared_ptr addProbPrimes( + const HybridBayesNet::shared_ptr& continuousBayesNet, + const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const; + /** * @brief Helper method to eliminate the discrete variables after the * continuous variables have been eliminated. @@ -105,6 +122,7 @@ class GTSAM_EXPORT HybridEliminationTree Eliminate function, const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const; + public: /** * @brief Override the EliminationTree eliminate method * so we can perform hybrid elimination correctly. diff --git a/gtsam/hybrid/HybridJunctionTree.cpp b/gtsam/hybrid/HybridJunctionTree.cpp index f233d4bef..43b043e30 100644 --- a/gtsam/hybrid/HybridJunctionTree.cpp +++ b/gtsam/hybrid/HybridJunctionTree.cpp @@ -252,11 +252,11 @@ HybridJunctionTree::eliminateDiscrete( /* ************************************************************************* */ boost::shared_ptr HybridJunctionTree::addProbPrimes( - const HybridGaussianFactorGraph& graph, const HybridBayesTree::shared_ptr& continuousBayesTree, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph, - const Ordering& continuous_ordering, - const Ordering& discrete_ordering) const { + const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const { + Ordering continuous_ordering = etree_.continuousOrdering(); + Ordering discrete_ordering = etree_.discreteOrdering(); + // If we have eliminated continuous variables // and have discrete variables to eliminate, // then compute P(X | M, Z) @@ -272,6 +272,8 @@ boost::shared_ptr HybridJunctionTree::addProbPrimes( std::set dkeys_set(discrete_keys.begin(), discrete_keys.end()); discrete_keys = DiscreteKeys(dkeys_set.begin(), dkeys_set.end()); + FactorGraphType graph = etree_.graph(); + // DecisionTree for P'(X|M, Z) for all mode sequences M const AlgebraicDecisionTree probPrimeTree = graph.continuousProbPrimes(discrete_keys, continuousBayesTree); @@ -298,8 +300,7 @@ HybridJunctionTree::eliminate(const Eliminate& function) const { this->eliminateContinuous(function, graph, continuous_ordering); FactorGraphType::shared_ptr updatedDiscreteGraph = - this->addProbPrimes(graph, continuousBayesTree, discreteGraph, - continuous_ordering, discrete_ordering); + this->addProbPrimes(continuousBayesTree, discreteGraph); // Eliminate discrete variables to get the discrete bayes tree. return this->eliminateDiscrete(function, continuousBayesTree, diff --git a/gtsam/hybrid/HybridJunctionTree.h b/gtsam/hybrid/HybridJunctionTree.h index 2dc13d5e3..d0473c33d 100644 --- a/gtsam/hybrid/HybridJunctionTree.h +++ b/gtsam/hybrid/HybridJunctionTree.h @@ -73,14 +73,15 @@ class GTSAM_EXPORT HybridJunctionTree */ HybridJunctionTree(const HybridEliminationTree& eliminationTree); + protected: /** - * @brief - * - * @param function - * @param graph - * @param continuous_ordering + * @brief Eliminate all the continuous variables from the factor graph. + * + * @param function The hybrid elimination function. + * @param graph The factor graph to eliminate. + * @param continuous_ordering The ordering of continuous variables. * @return std::pair, - * boost::shared_ptr> + * boost::shared_ptr> */ std::pair, boost::shared_ptr> @@ -89,14 +90,17 @@ class GTSAM_EXPORT HybridJunctionTree const Ordering& continuous_ordering) const; /** - * @brief - * - * @param function - * @param continuousBayesTree - * @param discreteGraph - * @param discrete_ordering + * @brief Eliminate all the discrete variables in the hybrid factor graph. + * + * @param function The hybrid elimination function. + * @param continuousBayesTree The bayes tree corresponding to + * the eliminated continuous variables. + * @param discreteGraph Factor graph of factors containing + * only discrete variables. + * @param discrete_ordering The elimination ordering for + * the discrete variables. * @return std::pair, - * boost::shared_ptr> + * boost::shared_ptr> */ std::pair, boost::shared_ptr> @@ -106,28 +110,29 @@ class GTSAM_EXPORT HybridJunctionTree const Ordering& discrete_ordering) const; /** - * @brief - * - * @param graph - * @param continuousBayesTree - * @param discreteGraph - * @param continuous_ordering - * @param discrete_ordering - * @return boost::shared_ptr + * @brief Compute the unnormalized probability P'(X | M, Z) + * for the factor graph in each leaf of the discrete tree. + * The discrete decision tree formed as a result is added to the + * `discreteGraph` for discrete elimination. + * + * @param continuousBayesTree The bayes tree corresponding to + * the eliminated continuous variables. + * @param discreteGraph Factor graph consisting of factors + * on discrete variables only. + * @return boost::shared_ptr */ boost::shared_ptr addProbPrimes( - const HybridGaussianFactorGraph& graph, const HybridBayesTree::shared_ptr& continuousBayesTree, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph, - const Ordering& continuous_ordering, - const Ordering& discrete_ordering) const; + const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const; + public: /** - * @brief - * - * @param function + * @brief Override the eliminate method so we can + * perform hybrid elimination correctly. + * + * @param function The hybrid elimination function. * @return std::pair, - * boost::shared_ptr> + * boost::shared_ptr> */ std::pair, boost::shared_ptr> From 5fc114fad27ba308e5819e6c59dd4402200dd9bb Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 4 Dec 2022 18:24:16 +0530 Subject: [PATCH 34/51] more unit tests --- gtsam/hybrid/tests/testHybridEstimation.cpp | 25 +++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 431e5769f..85be0ab31 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -70,6 +70,28 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors, return ordering; } +TEST(HybridEstimation, Full) { + size_t K = 3; + std::vector measurements = {0, 1, 2}; + // Ground truth discrete seq + std::vector discrete_seq = {1, 1, 0}; + // Switching example of robot moving in 1D + // with given measurements and equal mode priors. + Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); + HybridGaussianFactorGraph graph = switching.linearizedFactorGraph; + + Ordering hybridOrdering; + hybridOrdering += X(0); + hybridOrdering += X(1); + hybridOrdering += X(2); + hybridOrdering += M(0); + hybridOrdering += M(1); + HybridBayesNet::shared_ptr bayesNet = + graph.eliminateSequential(hybridOrdering); + + EXPECT_LONGS_EQUAL(5, bayesNet->size()); +} + /****************************************************************************/ // Test approximate inference with an additional pruning step. TEST(HybridEstimation, Incremental) { @@ -311,8 +333,7 @@ TEST(HybridEstimation, Probability) { discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); Ordering discrete(graph.discreteKeys()); - auto discreteBayesNet = - discreteGraph->BaseEliminateable::eliminateSequential(discrete); + auto discreteBayesNet = discreteGraph->eliminateSequential(); bayesNet->add(*discreteBayesNet); HybridValues hybrid_values = bayesNet->optimize(); From 22e4a733e0bf6dfa2b27947b679da27472753747 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 4 Dec 2022 18:36:36 +0530 Subject: [PATCH 35/51] Add details about the role of the HybridEliminationTree in hybrid elimination --- gtsam/hybrid/HybridEliminationTree.h | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/gtsam/hybrid/HybridEliminationTree.h b/gtsam/hybrid/HybridEliminationTree.h index 9b6854026..4802c2f89 100644 --- a/gtsam/hybrid/HybridEliminationTree.h +++ b/gtsam/hybrid/HybridEliminationTree.h @@ -24,7 +24,22 @@ namespace gtsam { /** - * Elimination Tree type for Hybrid + * Elimination Tree type for Hybrid Factor Graphs. + * + * The elimination tree helps in elimination by specifying the parents to which + * the "combined factor" after elimination should be assigned to. + * + * In the case of hybrid elimination, we use the elimination tree to store the + * intermediate results. + * Concretely, we wish to estimate the unnormalized probability P'(X | M, Z) for + * each mode assignment M. + * P'(X | M, Z) can be computed by estimating X* which maximizes the continuous + * factor graph given the discrete modes, and then computing the unnormalized + * probability as P(X=X* | M, Z). + * + * This is done by eliminating the (continuous) factor graph present at each + * leaf of the discrete tree formed for the discrete sequence and using the + * inferred X* to compute the `probPrime`. * * @ingroup hybrid */ From 0596b2f543a7327d4e89d2999c578883756956ae Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 10 Dec 2022 09:46:26 +0530 Subject: [PATCH 36/51] remove unrequired code --- gtsam/hybrid/HybridEliminationTree.cpp | 100 +----------------- gtsam/hybrid/HybridEliminationTree.h | 90 ---------------- gtsam/hybrid/HybridJunctionTree.cpp | 140 +------------------------ gtsam/hybrid/HybridJunctionTree.h | 69 ------------ 4 files changed, 3 insertions(+), 396 deletions(-) diff --git a/gtsam/hybrid/HybridEliminationTree.cpp b/gtsam/hybrid/HybridEliminationTree.cpp index e54105919..c2df2dd60 100644 --- a/gtsam/hybrid/HybridEliminationTree.cpp +++ b/gtsam/hybrid/HybridEliminationTree.cpp @@ -27,112 +27,16 @@ template class EliminationTree; HybridEliminationTree::HybridEliminationTree( const HybridGaussianFactorGraph& factorGraph, const VariableIndex& structure, const Ordering& order) - : Base(factorGraph, structure, order), - graph_(factorGraph), - variable_index_(structure) { - // Segregate the continuous and the discrete keys - std::tie(continuous_ordering_, discrete_ordering_) = - graph_.separateContinuousDiscreteOrdering(order); -} + : Base(factorGraph, structure, order) {} /* ************************************************************************* */ HybridEliminationTree::HybridEliminationTree( const HybridGaussianFactorGraph& factorGraph, const Ordering& order) - : Base(factorGraph, order), - graph_(factorGraph), - variable_index_(VariableIndex(factorGraph)) {} + : Base(factorGraph, order) {} /* ************************************************************************* */ bool HybridEliminationTree::equals(const This& other, double tol) const { return Base::equals(other, tol); } -/* ************************************************************************* */ -std::pair, - boost::shared_ptr> -HybridEliminationTree::eliminateContinuous(Eliminate function) const { - if (continuous_ordering_.size() > 0) { - This continuous_etree(graph_, variable_index_, continuous_ordering_); - return continuous_etree.Base::eliminate(function); - - } else { - HybridBayesNet::shared_ptr bayesNet = boost::make_shared(); - HybridGaussianFactorGraph::shared_ptr discreteGraph = - boost::make_shared(graph_); - return std::make_pair(bayesNet, discreteGraph); - } -} - -/* ************************************************************************* */ -boost::shared_ptr -HybridEliminationTree::addProbPrimes( - const HybridBayesNet::shared_ptr& continuousBayesNet, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const { - if (continuous_ordering_.size() > 0 && discrete_ordering_.size() > 0) { - // Get the last continuous conditional - // which will have all the discrete keys - HybridConditional::shared_ptr last_conditional = - continuousBayesNet->at(continuousBayesNet->size() - 1); - DiscreteKeys discrete_keys = last_conditional->discreteKeys(); - - // DecisionTree for P'(X|M, Z) for all mode sequences M - const AlgebraicDecisionTree probPrimeTree = - graph_.continuousProbPrimes(discrete_keys, continuousBayesNet); - - // Add the model selection factor P(M|Z) - discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - } - return discreteGraph; -} - -/* ************************************************************************* */ -std::pair, - boost::shared_ptr> -HybridEliminationTree::eliminateDiscrete( - Eliminate function, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const { - HybridBayesNet::shared_ptr discreteBayesNet; - HybridGaussianFactorGraph::shared_ptr finalGraph; - if (discrete_ordering_.size() > 0) { - This discrete_etree(*discreteGraph, VariableIndex(*discreteGraph), - discrete_ordering_); - - std::tie(discreteBayesNet, finalGraph) = - discrete_etree.Base::eliminate(function); - - } else { - discreteBayesNet = boost::make_shared(); - finalGraph = discreteGraph; - } - - return std::make_pair(discreteBayesNet, finalGraph); -} - -/* ************************************************************************* */ -std::pair, - boost::shared_ptr> -HybridEliminationTree::eliminate(Eliminate function) const { - // Perform continuous elimination - HybridBayesNet::shared_ptr bayesNet; - HybridGaussianFactorGraph::shared_ptr discreteGraph; - std::tie(bayesNet, discreteGraph) = this->eliminateContinuous(function); - - // If we have eliminated continuous variables - // and have discrete variables to eliminate, - // then compute P(X | M, Z) - HybridGaussianFactorGraph::shared_ptr updatedDiscreteGraph = - addProbPrimes(bayesNet, discreteGraph); - - // Perform discrete elimination - HybridBayesNet::shared_ptr discreteBayesNet; - HybridGaussianFactorGraph::shared_ptr finalGraph; - std::tie(discreteBayesNet, finalGraph) = - eliminateDiscrete(function, updatedDiscreteGraph); - - // Add the discrete conditionals to the hybrid conditionals - bayesNet->add(*discreteBayesNet); - - return std::make_pair(bayesNet, finalGraph); -} - } // namespace gtsam diff --git a/gtsam/hybrid/HybridEliminationTree.h b/gtsam/hybrid/HybridEliminationTree.h index 4802c2f89..941fefa5a 100644 --- a/gtsam/hybrid/HybridEliminationTree.h +++ b/gtsam/hybrid/HybridEliminationTree.h @@ -26,21 +26,6 @@ namespace gtsam { /** * Elimination Tree type for Hybrid Factor Graphs. * - * The elimination tree helps in elimination by specifying the parents to which - * the "combined factor" after elimination should be assigned to. - * - * In the case of hybrid elimination, we use the elimination tree to store the - * intermediate results. - * Concretely, we wish to estimate the unnormalized probability P'(X | M, Z) for - * each mode assignment M. - * P'(X | M, Z) can be computed by estimating X* which maximizes the continuous - * factor graph given the discrete modes, and then computing the unnormalized - * probability as P(X=X* | M, Z). - * - * This is done by eliminating the (continuous) factor graph present at each - * leaf of the discrete tree formed for the discrete sequence and using the - * inferred X* to compute the `probPrime`. - * * @ingroup hybrid */ class GTSAM_EXPORT HybridEliminationTree @@ -48,12 +33,6 @@ class GTSAM_EXPORT HybridEliminationTree private: friend class ::EliminationTreeTester; - Ordering continuous_ordering_, discrete_ordering_; - /// Used to store the original factor graph to eliminate - HybridGaussianFactorGraph graph_; - /// Store the provided variable index. - VariableIndex variable_index_; - public: typedef EliminationTree Base; ///< Base class @@ -87,75 +66,6 @@ class GTSAM_EXPORT HybridEliminationTree /** Test whether the tree is equal to another */ bool equals(const This& other, double tol = 1e-9) const; - - protected: - /** - * @brief Helper method to eliminate continuous variables. - * - * If no continuous variables exist, return an empty bayes net - * and the original graph. - * - * @param function Elimination function for hybrid elimination. - * @return std::pair, - * boost::shared_ptr > - */ - std::pair, - boost::shared_ptr> - eliminateContinuous(Eliminate function) const; - - /** - * @brief Compute the unnormalized probability P'(X | M, Z) - * for the factor graph in each leaf of the discrete tree. - * The discrete decision tree formed as a result is added to the - * `discreteGraph` for discrete elimination. - * - * @param continuousBayesNet The bayes nets corresponding to - * the eliminated continuous variables. - * @param discreteGraph Factor graph consisting of factors - * on discrete variables only. - * @return boost::shared_ptr - */ - boost::shared_ptr addProbPrimes( - const HybridBayesNet::shared_ptr& continuousBayesNet, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const; - - /** - * @brief Helper method to eliminate the discrete variables after the - * continuous variables have been eliminated. - * - * If there are no discrete variables, return an empty bayes net and the - * discreteGraph which is passed in. - * - * @param function Hybrid elimination function - * @param discreteGraph The factor graph with the factor ϕ(X | M, Z). - * @return std::pair, - * boost::shared_ptr > - */ - std::pair, - boost::shared_ptr> - eliminateDiscrete( - Eliminate function, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const; - - public: - /** - * @brief Override the EliminationTree eliminate method - * so we can perform hybrid elimination correctly. - * - * @param function Hybrid elimination function - * @return std::pair, - * boost::shared_ptr > - */ - std::pair, - boost::shared_ptr> - eliminate(Eliminate function) const; - - Ordering continuousOrdering() const { return continuous_ordering_; } - Ordering discreteOrdering() const { return discrete_ordering_; } - - /// Store the provided variable index. - VariableIndex variableIndex() const { return variable_index_; } - HybridGaussianFactorGraph graph() const { return graph_; } }; } // namespace gtsam diff --git a/gtsam/hybrid/HybridJunctionTree.cpp b/gtsam/hybrid/HybridJunctionTree.cpp index 43b043e30..422c200a4 100644 --- a/gtsam/hybrid/HybridJunctionTree.cpp +++ b/gtsam/hybrid/HybridJunctionTree.cpp @@ -142,8 +142,7 @@ struct HybridConstructorTraversalData { /* ************************************************************************* */ HybridJunctionTree::HybridJunctionTree( - const HybridEliminationTree& eliminationTree) - : etree_(eliminationTree) { + const HybridEliminationTree& eliminationTree) { gttic(JunctionTree_FromEliminationTree); // Here we rely on the BayesNet having been produced by this elimination tree, // such that the conditionals are arranged in DFS post-order. We traverse the @@ -170,141 +169,4 @@ HybridJunctionTree::HybridJunctionTree( Base::remainingFactors_ = eliminationTree.remainingFactors(); } -/* ************************************************************************* */ -std::pair, - boost::shared_ptr> -HybridJunctionTree::eliminateContinuous( - const Eliminate& function, const HybridGaussianFactorGraph& graph, - const Ordering& continuous_ordering) const { - HybridBayesTree::shared_ptr continuousBayesTree; - HybridGaussianFactorGraph::shared_ptr discreteGraph; - - if (continuous_ordering.size() > 0) { - HybridEliminationTree continuous_etree(graph, etree_.variableIndex(), - continuous_ordering); - - This continuous_junction_tree(continuous_etree); - std::tie(continuousBayesTree, discreteGraph) = - continuous_junction_tree.Base::eliminate(function); - - } else { - continuousBayesTree = boost::make_shared(); - discreteGraph = boost::make_shared(graph); - } - - return std::make_pair(continuousBayesTree, discreteGraph); -} -/* ************************************************************************* */ -std::pair, - boost::shared_ptr> -HybridJunctionTree::eliminateDiscrete( - const Eliminate& function, - const HybridBayesTree::shared_ptr& continuousBayesTree, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph, - const Ordering& discrete_ordering) const { - HybridBayesTree::shared_ptr updatedBayesTree; - HybridGaussianFactorGraph::shared_ptr finalGraph; - if (discrete_ordering.size() > 0) { - HybridEliminationTree discrete_etree( - *discreteGraph, VariableIndex(*discreteGraph), discrete_ordering); - - This discrete_junction_tree(discrete_etree); - - std::tie(updatedBayesTree, finalGraph) = - discrete_junction_tree.Base::eliminate(function); - - // Get the clique with all the discrete keys. - // There should only be 1 clique. - const HybridBayesTree::sharedClique discrete_clique = - (*updatedBayesTree)[discrete_ordering.at(0)]; - - std::set clique_set; - for (auto node : continuousBayesTree->nodes()) { - clique_set.insert(node.second); - } - - // Set the root of the bayes tree as the discrete clique - for (auto clique : clique_set) { - if (clique->conditional()->parents() == - discrete_clique->conditional()->frontals()) { - updatedBayesTree->addClique(clique, discrete_clique); - - } else { - if (clique->parent()) { - // Remove the clique from the children of the parents since it will - // get added again in addClique. - auto clique_it = std::find(clique->parent()->children.begin(), - clique->parent()->children.end(), clique); - clique->parent()->children.erase(clique_it); - updatedBayesTree->addClique(clique, clique->parent()); - } else { - updatedBayesTree->addClique(clique); - } - } - } - } else { - updatedBayesTree = continuousBayesTree; - finalGraph = discreteGraph; - } - - return std::make_pair(updatedBayesTree, finalGraph); -} - -/* ************************************************************************* */ -boost::shared_ptr HybridJunctionTree::addProbPrimes( - const HybridBayesTree::shared_ptr& continuousBayesTree, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const { - Ordering continuous_ordering = etree_.continuousOrdering(); - Ordering discrete_ordering = etree_.discreteOrdering(); - - // If we have eliminated continuous variables - // and have discrete variables to eliminate, - // then compute P(X | M, Z) - if (continuous_ordering.size() > 0 && discrete_ordering.size() > 0) { - // Collect all the discrete keys - DiscreteKeys discrete_keys; - for (auto node : continuousBayesTree->nodes()) { - auto node_dkeys = node.second->conditional()->discreteKeys(); - discrete_keys.insert(discrete_keys.end(), node_dkeys.begin(), - node_dkeys.end()); - } - // Remove duplicates and convert back to DiscreteKeys - std::set dkeys_set(discrete_keys.begin(), discrete_keys.end()); - discrete_keys = DiscreteKeys(dkeys_set.begin(), dkeys_set.end()); - - FactorGraphType graph = etree_.graph(); - - // DecisionTree for P'(X|M, Z) for all mode sequences M - const AlgebraicDecisionTree probPrimeTree = - graph.continuousProbPrimes(discrete_keys, continuousBayesTree); - - // Add the model selection factor P(M|Z) - discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - } - - return discreteGraph; -} - -/* ************************************************************************* */ -std::pair -HybridJunctionTree::eliminate(const Eliminate& function) const { - Ordering continuous_ordering = etree_.continuousOrdering(); - Ordering discrete_ordering = etree_.discreteOrdering(); - - FactorGraphType graph = etree_.graph(); - - // Eliminate continuous - BayesTreeType::shared_ptr continuousBayesTree; - FactorGraphType::shared_ptr discreteGraph; - std::tie(continuousBayesTree, discreteGraph) = - this->eliminateContinuous(function, graph, continuous_ordering); - - FactorGraphType::shared_ptr updatedDiscreteGraph = - this->addProbPrimes(continuousBayesTree, discreteGraph); - - // Eliminate discrete variables to get the discrete bayes tree. - return this->eliminateDiscrete(function, continuousBayesTree, - updatedDiscreteGraph, discrete_ordering); -} - } // namespace gtsam diff --git a/gtsam/hybrid/HybridJunctionTree.h b/gtsam/hybrid/HybridJunctionTree.h index d0473c33d..29fa24809 100644 --- a/gtsam/hybrid/HybridJunctionTree.h +++ b/gtsam/hybrid/HybridJunctionTree.h @@ -51,10 +51,6 @@ class HybridEliminationTree; */ class GTSAM_EXPORT HybridJunctionTree : public JunctionTree { - /// Record the elimination tree for use in hybrid elimination. - HybridEliminationTree etree_; - /// Store the provided variable index. - VariableIndex variable_index_; public: typedef JunctionTree @@ -72,71 +68,6 @@ class GTSAM_EXPORT HybridJunctionTree * @return The elimination tree */ HybridJunctionTree(const HybridEliminationTree& eliminationTree); - - protected: - /** - * @brief Eliminate all the continuous variables from the factor graph. - * - * @param function The hybrid elimination function. - * @param graph The factor graph to eliminate. - * @param continuous_ordering The ordering of continuous variables. - * @return std::pair, - * boost::shared_ptr> - */ - std::pair, - boost::shared_ptr> - eliminateContinuous(const Eliminate& function, - const HybridGaussianFactorGraph& graph, - const Ordering& continuous_ordering) const; - - /** - * @brief Eliminate all the discrete variables in the hybrid factor graph. - * - * @param function The hybrid elimination function. - * @param continuousBayesTree The bayes tree corresponding to - * the eliminated continuous variables. - * @param discreteGraph Factor graph of factors containing - * only discrete variables. - * @param discrete_ordering The elimination ordering for - * the discrete variables. - * @return std::pair, - * boost::shared_ptr> - */ - std::pair, - boost::shared_ptr> - eliminateDiscrete(const Eliminate& function, - const HybridBayesTree::shared_ptr& continuousBayesTree, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph, - const Ordering& discrete_ordering) const; - - /** - * @brief Compute the unnormalized probability P'(X | M, Z) - * for the factor graph in each leaf of the discrete tree. - * The discrete decision tree formed as a result is added to the - * `discreteGraph` for discrete elimination. - * - * @param continuousBayesTree The bayes tree corresponding to - * the eliminated continuous variables. - * @param discreteGraph Factor graph consisting of factors - * on discrete variables only. - * @return boost::shared_ptr - */ - boost::shared_ptr addProbPrimes( - const HybridBayesTree::shared_ptr& continuousBayesTree, - const HybridGaussianFactorGraph::shared_ptr& discreteGraph) const; - - public: - /** - * @brief Override the eliminate method so we can - * perform hybrid elimination correctly. - * - * @param function The hybrid elimination function. - * @return std::pair, - * boost::shared_ptr> - */ - std::pair, - boost::shared_ptr> - eliminate(const Eliminate& function) const; }; } // namespace gtsam From 62bc9f20a3b36fbcb1840b9d600d68239f549063 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 10 Dec 2022 10:35:46 +0530 Subject: [PATCH 37/51] update hybrid elimination and corresponding tests --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 12 +++-- gtsam/hybrid/HybridSmoother.cpp | 2 +- gtsam/hybrid/tests/testHybridEstimation.cpp | 49 +++++-------------- .../tests/testHybridGaussianFactorGraph.cpp | 2 +- gtsam/hybrid/tests/testHybridGaussianISAM.cpp | 2 +- .../tests/testHybridNonlinearFactorGraph.cpp | 11 +---- .../hybrid/tests/testHybridNonlinearISAM.cpp | 2 +- 7 files changed, 25 insertions(+), 55 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index c430fac2c..de237b049 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -172,8 +172,13 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } } + // std::cout << "Eliminate For MPE" << std::endl; auto result = EliminateForMPE(dfg, frontalKeys); - + // std::cout << "discrete elimination done!" << std::endl; + // dfg.print(); + // std::cout << "\n\n\n" << std::endl; + // result.first->print(); + // result.second->print(); return {boost::make_shared(result.first), boost::make_shared(result.second)}; } @@ -262,7 +267,9 @@ hybridElimination(const HybridGaussianFactorGraph &factors, if (!factor) { return 0.0; // If nullptr, return 0.0 probability } else { - return 1.0; + double error = + 0.5 * std::abs(factor->augmentedInformation().determinant()); + return std::exp(-error); } }; DecisionTree fdt(separatorFactors, factorProb); @@ -550,5 +557,4 @@ HybridGaussianFactorGraph::separateContinuousDiscreteOrdering( return std::make_pair(continuous_ordering, discrete_ordering); } - } // namespace gtsam diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 7400053bf..07a7a4e77 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -32,7 +32,7 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph, addConditionals(graph, hybridBayesNet_, ordering); // Eliminate. - auto bayesNetFragment = graph.eliminateSequential(); + auto bayesNetFragment = graph.eliminateSequential(ordering); /// Prune if (maxNrLeaves) { diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 85be0ab31..39c5eea15 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -15,6 +15,7 @@ * @author Varun Agrawal */ +#include #include #include #include @@ -280,8 +281,10 @@ TEST(HybridEstimation, Probability) { VectorValues values = bayes_net->optimize(); - expected_errors.push_back(linear_graph->error(values)); - expected_prob_primes.push_back(linear_graph->probPrime(values)); + double error = linear_graph->error(values); + expected_errors.push_back(error); + double prob_prime = linear_graph->probPrime(values); + expected_prob_primes.push_back(prob_prime); } // Switching example of robot moving in 1D with given measurements and equal @@ -291,51 +294,21 @@ TEST(HybridEstimation, Probability) { auto graph = switching.linearizedFactorGraph; Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); - AlgebraicDecisionTree expected_probPrimeTree = probPrimeTree(graph); - - // Eliminate continuous - Ordering continuous_ordering(graph.continuousKeys()); - HybridBayesNet::shared_ptr bayesNet; - HybridGaussianFactorGraph::shared_ptr discreteGraph; - std::tie(bayesNet, discreteGraph) = - graph.eliminatePartialSequential(continuous_ordering); - - // Get the last continuous conditional which will have all the discrete keys - auto last_conditional = bayesNet->at(bayesNet->size() - 1); - DiscreteKeys discrete_keys = last_conditional->discreteKeys(); - - const std::vector assignments = - DiscreteValues::CartesianProduct(discrete_keys); - - // Reverse discrete keys order for correct tree construction - std::reverse(discrete_keys.begin(), discrete_keys.end()); - - // Create a decision tree of all the different VectorValues - DecisionTree delta_tree = - graph.continuousDelta(discrete_keys, bayesNet, assignments); - - AlgebraicDecisionTree probPrimeTree = - graph.continuousProbPrimes(discrete_keys, bayesNet); - - EXPECT(assert_equal(expected_probPrimeTree, probPrimeTree)); + HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(ordering); + auto discreteConditional = bayesNet->atDiscrete(bayesNet->size() - 3); // Test if the probPrimeTree matches the probability of // the individual factor graphs for (size_t i = 0; i < pow(2, K - 1); i++) { - Assignment discrete_assignment; + DiscreteValues discrete_assignment; for (size_t v = 0; v < discrete_seq_map[i].size(); v++) { discrete_assignment[M(v)] = discrete_seq_map[i][v]; } - EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i), - probPrimeTree(discrete_assignment), 1e-8); + double discrete_transition_prob = 0.25; + EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i) * discrete_transition_prob, + (*discreteConditional)(discrete_assignment), 1e-8); } - discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - - Ordering discrete(graph.discreteKeys()); - auto discreteBayesNet = discreteGraph->eliminateSequential(); - bayesNet->add(*discreteBayesNet); - HybridValues hybrid_values = bayesNet->optimize(); // This is the correct sequence as designed diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 6288bcd93..8954f2503 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -277,7 +277,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { std::tie(hbt, remaining) = hfg.eliminatePartialMultifrontal(ordering_full); // 9 cliques in the bayes tree and 0 remaining variables to eliminate. - EXPECT_LONGS_EQUAL(7, hbt->size()); + EXPECT_LONGS_EQUAL(9, hbt->size()); EXPECT_LONGS_EQUAL(0, remaining->size()); /* diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index 4ba6f88a5..8e7902132 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -178,7 +178,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { // Test the probability values with regression tests. DiscreteValues assignment; - EXPECT(assert_equal(0.166667, m00_prob, 1e-5)); + EXPECT(assert_equal(0.0619233, m00_prob, 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 0; EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index e3b7f761a..a0a6f7d95 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -372,8 +372,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { dynamic_pointer_cast(hybridDiscreteFactor->inner()); CHECK(discreteFactor); EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size()); - // All leaves should be probability 1 since this is not P*(X|M,Z) - EXPECT(discreteFactor->root_->isLeaf()); + EXPECT(discreteFactor->root_->isLeaf() == false); // TODO(Varun) Test emplace_discrete } @@ -441,14 +440,6 @@ TEST(HybridFactorGraph, Full_Elimination) { discrete_fg.push_back(df->inner()); } - // Get the probabilit P*(X | M, Z) - DiscreteKeys discrete_keys = - remainingFactorGraph_partial->at(2)->discreteKeys(); - AlgebraicDecisionTree probPrimeTree = - linearizedFactorGraph.continuousProbPrimes(discrete_keys, - hybridBayesNet_partial); - discrete_fg.add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - ordering.clear(); for (size_t k = 0; k < self.K - 1; k++) ordering += M(k); discreteBayesNet = diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 46d5c4324..2a1932ac7 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -197,7 +197,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) { // Test the probability values with regression tests. DiscreteValues assignment; - EXPECT(assert_equal(0.166667, m00_prob, 1e-5)); + EXPECT(assert_equal(0.0619233, m00_prob, 1e-5)); assignment[M(0)] = 0; assignment[M(1)] = 0; EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5)); From 6beffeb0c1d0eb4d098aef7ac88d198e9b3bd9c6 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 10 Dec 2022 12:46:48 +0530 Subject: [PATCH 38/51] remove commented out code --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index de237b049..5c18a94b5 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -172,13 +172,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors, } } - // std::cout << "Eliminate For MPE" << std::endl; auto result = EliminateForMPE(dfg, frontalKeys); - // std::cout << "discrete elimination done!" << std::endl; - // dfg.print(); - // std::cout << "\n\n\n" << std::endl; - // result.first->print(); - // result.second->print(); + return {boost::make_shared(result.first), boost::make_shared(result.second)}; } From 812bf52c11ed39597ceb4fa21e2ae1fab20b2938 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Wed, 21 Dec 2022 15:08:24 +0530 Subject: [PATCH 39/51] minor cleanup --- gtsam/hybrid/HybridBayesTree.cpp | 4 +--- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 3 +-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/gtsam/hybrid/HybridBayesTree.cpp b/gtsam/hybrid/HybridBayesTree.cpp index c9c6afa79..b706fb745 100644 --- a/gtsam/hybrid/HybridBayesTree.cpp +++ b/gtsam/hybrid/HybridBayesTree.cpp @@ -14,7 +14,7 @@ * @brief Hybrid Bayes Tree, the result of eliminating a * HybridJunctionTree * @date Mar 11, 2022 - * @author Fan Jiang + * @author Fan Jiang, Varun Agrawal */ #include @@ -166,8 +166,6 @@ void HybridBayesTree::prune(const size_t maxNrLeaves) { DecisionTreeFactor prunedDecisionTree = decisionTree->prune(maxNrLeaves); decisionTree->root_ = prunedDecisionTree.root_; - // this->print(); - // decisionTree->print("", DefaultKeyFormatter); /// Helper struct for pruning the hybrid bayes tree. struct HybridPrunerData { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 5c18a94b5..c8707a586 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -92,7 +92,6 @@ GaussianMixtureFactor::Sum sumFrontals( if (auto cgmf = boost::dynamic_pointer_cast(f)) { sum = cgmf->add(sum); } - if (auto gm = boost::dynamic_pointer_cast(f)) { sum = gm->asMixture()->add(sum); } @@ -189,7 +188,7 @@ hybridElimination(const HybridGaussianFactorGraph &factors, DiscreteKeys discreteSeparator(discreteSeparatorSet.begin(), discreteSeparatorSet.end()); - // sum out frontals, this is the factor on the separator + // sum out frontals, this is the factor 𝜏 on the separator GaussianMixtureFactor::Sum sum = sumFrontals(factors); // If a tree leaf contains nullptr, From ffd1802ceaafd574517335bd34ddd525c8e1227b Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 23 Dec 2022 20:19:23 +0530 Subject: [PATCH 40/51] add optional model parameter to sample method --- gtsam/linear/GaussianBayesNet.cpp | 10 ++++++---- gtsam/linear/GaussianBayesNet.h | 6 ++++-- gtsam/linear/GaussianConditional.cpp | 22 +++++++++++++++------- gtsam/linear/GaussianConditional.h | 7 ++++--- 4 files changed, 29 insertions(+), 16 deletions(-) diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 6dcf662a9..8db301aa5 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -59,16 +59,18 @@ namespace gtsam { } /* ************************************************************************ */ - VectorValues GaussianBayesNet::sample(std::mt19937_64* rng) const { + VectorValues GaussianBayesNet::sample(std::mt19937_64* rng, + const SharedDiagonal& model) const { VectorValues result; // no missing variables -> create an empty vector - return sample(result, rng); + return sample(result, rng, model); } VectorValues GaussianBayesNet::sample(VectorValues result, - std::mt19937_64* rng) const { + std::mt19937_64* rng, + const SharedDiagonal& model) const { // sample each node in reverse topological sort order (parents first) for (auto cg : boost::adaptors::reverse(*this)) { - const VectorValues sampled = cg->sample(result, rng); + const VectorValues sampled = cg->sample(result, rng, model); result.insert(sampled); } return result; diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index 83328576f..e6dae6126 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -101,7 +101,8 @@ namespace gtsam { * std::mt19937_64 rng(42); * auto sample = gbn.sample(&rng); */ - VectorValues sample(std::mt19937_64* rng) const; + VectorValues sample(std::mt19937_64* rng, + const SharedDiagonal& model = nullptr) const; /** * Sample from an incomplete BayesNet, given missing variables @@ -110,7 +111,8 @@ namespace gtsam { * VectorValues given = ...; * auto sample = gbn.sample(given, &rng); */ - VectorValues sample(VectorValues given, std::mt19937_64* rng) const; + VectorValues sample(VectorValues given, std::mt19937_64* rng, + const SharedDiagonal& model = nullptr) const; /// Sample using ancestral sampling, use default rng VectorValues sample() const; diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 951577641..1a6620d62 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -279,30 +279,38 @@ namespace gtsam { /* ************************************************************************ */ VectorValues GaussianConditional::sample(const VectorValues& parentsValues, - std::mt19937_64* rng) const { + std::mt19937_64* rng, + const SharedDiagonal& model) const { if (nrFrontals() != 1) { throw std::invalid_argument( "GaussianConditional::sample can only be called on single variable " "conditionals"); } - if (!model_) { + + VectorValues solution = solve(parentsValues); + Key key = firstFrontalKey(); + + Vector sigmas; + if (model_) { + sigmas = model_->sigmas(); + } else if (model) { + sigmas = model->sigmas(); + } else { throw std::invalid_argument( "GaussianConditional::sample can only be called if a diagonal noise " "model was specified at construction."); } - VectorValues solution = solve(parentsValues); - Key key = firstFrontalKey(); - const Vector& sigmas = model_->sigmas(); solution[key] += Sampler::sampleDiagonal(sigmas, rng); return solution; } - VectorValues GaussianConditional::sample(std::mt19937_64* rng) const { + VectorValues GaussianConditional::sample(std::mt19937_64* rng, + const SharedDiagonal& model) const { if (nrParents() != 0) throw std::invalid_argument( "sample() can only be invoked on no-parent prior"); VectorValues values; - return sample(values); + return sample(values, rng, model); } /* ************************************************************************ */ diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index 4822e3eaf..ccf916cd7 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -166,7 +166,8 @@ namespace gtsam { * std::mt19937_64 rng(42); * auto sample = gbn.sample(&rng); */ - VectorValues sample(std::mt19937_64* rng) const; + VectorValues sample(std::mt19937_64* rng, + const SharedDiagonal& model = nullptr) const; /** * Sample from conditional, given missing variables @@ -175,8 +176,8 @@ namespace gtsam { * VectorValues given = ...; * auto sample = gbn.sample(given, &rng); */ - VectorValues sample(const VectorValues& parentsValues, - std::mt19937_64* rng) const; + VectorValues sample(const VectorValues& parentsValues, std::mt19937_64* rng, + const SharedDiagonal& model = nullptr) const; /// Sample, use default rng VectorValues sample() const; From bdb7836d0e496cacd942405323e55dc0711f51b1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 23 Dec 2022 20:19:41 +0530 Subject: [PATCH 41/51] sampling test --- gtsam/hybrid/HybridBayesNet.cpp | 1 + gtsam/hybrid/tests/testHybridEstimation.cpp | 68 +++++++++++++++++++++ 2 files changed, 69 insertions(+) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 7338873bc..fea87d3e5 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -279,6 +279,7 @@ AlgebraicDecisionTree HybridBayesNet::error( return error_tree; } +/* ************************************************************************* */ AlgebraicDecisionTree HybridBayesNet::probPrime( const VectorValues &continuousValues) const { AlgebraicDecisionTree error_tree = this->error(continuousValues); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 39c5eea15..ef5e882bb 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -432,6 +432,74 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { EXPECT(assert_equal(discrete_seq, hybrid_values.discrete())); } +/****************************************************************************/ +/** + * Test for correctness via sampling. + * + * Given the conditional P(x0, m0, x1| z0, z1) + * with meaasurements z0, z1, we: + * 1. Start with the corresponding Factor Graph `FG`. + * 2. Eliminate the factor graph into a Bayes Net `BN`. + * 3. Sample from the Bayes Net. + * 4. Check that the ratio `BN(x)/FG(x) = constant` for all samples `x`. + */ +TEST(HybridEstimation, CorrectnessViaSampling) { + HybridNonlinearFactorGraph nfg; + + auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0)); + auto zero_motion = + boost::make_shared>(X(0), X(1), 0, noise_model); + auto one_motion = + boost::make_shared>(X(0), X(1), 1, noise_model); + std::vector factors = {zero_motion, one_motion}; + nfg.emplace_nonlinear>(X(0), 0.0, noise_model); + nfg.emplace_hybrid( + KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors); + + Values initial; + double z0 = 0.0, z1 = 1.0; + initial.insert(X(0), z0); + initial.insert(X(1), z1); + + // 1. Create the factor graph from the nonlinear factor graph. + HybridGaussianFactorGraph::shared_ptr fg = nfg.linearize(initial); + // 2. Eliminate into BN + Ordering ordering = fg->getHybridOrdering(); + HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); + + // Set up sampling + std::random_device rd; + std::mt19937_64 gen(rd()); + // Discrete distribution with 50/50 weightage on both discrete variables. + std::discrete_distribution<> ddist({50, 50}); + + // 3. Do sampling + std::vector ratios; + int num_samples = 1000; + + for (size_t i = 0; i < num_samples; i++) { + // Sample a discrete value + DiscreteValues assignment; + assignment[M(0)] = ddist(gen); + + // Using the discrete sample, get the corresponding bayes net. + GaussianBayesNet gbn = bn->choose(assignment); + // Sample from the bayes net + VectorValues sample = gbn.sample(&gen, noise_model); + // Compute the ratio in log form and canonical form + double log_ratio = bn->error(sample, assignment) - fg->error(sample, assignment); + double ratio = exp(-log_ratio); + + // Store the ratio for post-processing + ratios.push_back(ratio); + } + + // 4. Check that all samples == 1.0 (constant) + double ratio_sum = std::accumulate(ratios.begin(), ratios.end(), + decltype(ratios)::value_type(0)); + EXPECT_DOUBLES_EQUAL(1.0, ratio_sum / num_samples, 1e-9); +} + /* ************************************************************************* */ int main() { TestResult tr; From aa86af2d778953a1bbf4d0123aa208f6b0c3ea00 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Dec 2022 20:11:19 +0530 Subject: [PATCH 42/51] Revert "add optional model parameter to sample method" This reverts commit ffd1802ceaafd574517335bd34ddd525c8e1227b. --- gtsam/linear/GaussianBayesNet.cpp | 10 ++++------ gtsam/linear/GaussianBayesNet.h | 6 ++---- gtsam/linear/GaussianConditional.cpp | 22 +++++++--------------- gtsam/linear/GaussianConditional.h | 7 +++---- 4 files changed, 16 insertions(+), 29 deletions(-) diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 8db301aa5..6dcf662a9 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -59,18 +59,16 @@ namespace gtsam { } /* ************************************************************************ */ - VectorValues GaussianBayesNet::sample(std::mt19937_64* rng, - const SharedDiagonal& model) const { + VectorValues GaussianBayesNet::sample(std::mt19937_64* rng) const { VectorValues result; // no missing variables -> create an empty vector - return sample(result, rng, model); + return sample(result, rng); } VectorValues GaussianBayesNet::sample(VectorValues result, - std::mt19937_64* rng, - const SharedDiagonal& model) const { + std::mt19937_64* rng) const { // sample each node in reverse topological sort order (parents first) for (auto cg : boost::adaptors::reverse(*this)) { - const VectorValues sampled = cg->sample(result, rng, model); + const VectorValues sampled = cg->sample(result, rng); result.insert(sampled); } return result; diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index e6dae6126..83328576f 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -101,8 +101,7 @@ namespace gtsam { * std::mt19937_64 rng(42); * auto sample = gbn.sample(&rng); */ - VectorValues sample(std::mt19937_64* rng, - const SharedDiagonal& model = nullptr) const; + VectorValues sample(std::mt19937_64* rng) const; /** * Sample from an incomplete BayesNet, given missing variables @@ -111,8 +110,7 @@ namespace gtsam { * VectorValues given = ...; * auto sample = gbn.sample(given, &rng); */ - VectorValues sample(VectorValues given, std::mt19937_64* rng, - const SharedDiagonal& model = nullptr) const; + VectorValues sample(VectorValues given, std::mt19937_64* rng) const; /// Sample using ancestral sampling, use default rng VectorValues sample() const; diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 1a6620d62..951577641 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -279,38 +279,30 @@ namespace gtsam { /* ************************************************************************ */ VectorValues GaussianConditional::sample(const VectorValues& parentsValues, - std::mt19937_64* rng, - const SharedDiagonal& model) const { + std::mt19937_64* rng) const { if (nrFrontals() != 1) { throw std::invalid_argument( "GaussianConditional::sample can only be called on single variable " "conditionals"); } - - VectorValues solution = solve(parentsValues); - Key key = firstFrontalKey(); - - Vector sigmas; - if (model_) { - sigmas = model_->sigmas(); - } else if (model) { - sigmas = model->sigmas(); - } else { + if (!model_) { throw std::invalid_argument( "GaussianConditional::sample can only be called if a diagonal noise " "model was specified at construction."); } + VectorValues solution = solve(parentsValues); + Key key = firstFrontalKey(); + const Vector& sigmas = model_->sigmas(); solution[key] += Sampler::sampleDiagonal(sigmas, rng); return solution; } - VectorValues GaussianConditional::sample(std::mt19937_64* rng, - const SharedDiagonal& model) const { + VectorValues GaussianConditional::sample(std::mt19937_64* rng) const { if (nrParents() != 0) throw std::invalid_argument( "sample() can only be invoked on no-parent prior"); VectorValues values; - return sample(values, rng, model); + return sample(values); } /* ************************************************************************ */ diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index ccf916cd7..4822e3eaf 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -166,8 +166,7 @@ namespace gtsam { * std::mt19937_64 rng(42); * auto sample = gbn.sample(&rng); */ - VectorValues sample(std::mt19937_64* rng, - const SharedDiagonal& model = nullptr) const; + VectorValues sample(std::mt19937_64* rng) const; /** * Sample from conditional, given missing variables @@ -176,8 +175,8 @@ namespace gtsam { * VectorValues given = ...; * auto sample = gbn.sample(given, &rng); */ - VectorValues sample(const VectorValues& parentsValues, std::mt19937_64* rng, - const SharedDiagonal& model = nullptr) const; + VectorValues sample(const VectorValues& parentsValues, + std::mt19937_64* rng) const; /// Sample, use default rng VectorValues sample() const; From 798c51aec99ecf313a27304c82d2de0b2a522e9a Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Dec 2022 20:32:08 +0530 Subject: [PATCH 43/51] update sampling test to use new sample method --- gtsam/hybrid/tests/testHybridEstimation.cpp | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index ef5e882bb..be32a97c7 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -469,25 +469,20 @@ TEST(HybridEstimation, CorrectnessViaSampling) { // Set up sampling std::random_device rd; - std::mt19937_64 gen(rd()); - // Discrete distribution with 50/50 weightage on both discrete variables. - std::discrete_distribution<> ddist({50, 50}); + std::mt19937_64 gen(11); // 3. Do sampling std::vector ratios; int num_samples = 1000; for (size_t i = 0; i < num_samples; i++) { - // Sample a discrete value - DiscreteValues assignment; - assignment[M(0)] = ddist(gen); - - // Using the discrete sample, get the corresponding bayes net. - GaussianBayesNet gbn = bn->choose(assignment); // Sample from the bayes net - VectorValues sample = gbn.sample(&gen, noise_model); + HybridValues sample = bn->sample(&gen); + // Compute the ratio in log form and canonical form - double log_ratio = bn->error(sample, assignment) - fg->error(sample, assignment); + DiscreteValues assignment = sample.discrete(); + double log_ratio = bn->error(sample.continuous(), assignment) - + fg->error(sample.continuous(), assignment); double ratio = exp(-log_ratio); // Store the ratio for post-processing From 13d22b123a1eae92551fa196561040dd5eb11279 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 24 Dec 2022 21:10:18 +0530 Subject: [PATCH 44/51] address review comments --- gtsam/hybrid/tests/testHybridEstimation.cpp | 32 +++++++++++++-------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index be32a97c7..11d298ef3 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -436,8 +436,8 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { /** * Test for correctness via sampling. * - * Given the conditional P(x0, m0, x1| z0, z1) - * with meaasurements z0, z1, we: + * Compute the conditional P(x0, m0, x1| z0, z1) + * with measurements z0, z1. To do so, we: * 1. Start with the corresponding Factor Graph `FG`. * 2. Eliminate the factor graph into a Bayes Net `BN`. * 3. Sample from the Bayes Net. @@ -446,15 +446,20 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { TEST(HybridEstimation, CorrectnessViaSampling) { HybridNonlinearFactorGraph nfg; - auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0)); - auto zero_motion = + // First we create a hybrid nonlinear factor graph + // which represents f(x0, x1, m0; z0, z1). + // We linearize and eliminate this to get + // the required Factor Graph FG and Bayes Net BN. + const auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); + const auto zero_motion = boost::make_shared>(X(0), X(1), 0, noise_model); - auto one_motion = + const auto one_motion = boost::make_shared>(X(0), X(1), 1, noise_model); - std::vector factors = {zero_motion, one_motion}; + nfg.emplace_nonlinear>(X(0), 0.0, noise_model); nfg.emplace_hybrid( - KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, factors); + KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, + std::vector{zero_motion, one_motion}); Values initial; double z0 = 0.0, z1 = 1.0; @@ -463,13 +468,13 @@ TEST(HybridEstimation, CorrectnessViaSampling) { // 1. Create the factor graph from the nonlinear factor graph. HybridGaussianFactorGraph::shared_ptr fg = nfg.linearize(initial); + // 2. Eliminate into BN - Ordering ordering = fg->getHybridOrdering(); + const Ordering ordering = fg->getHybridOrdering(); HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); // Set up sampling - std::random_device rd; - std::mt19937_64 gen(11); + std::mt19937_64 rng(11); // 3. Do sampling std::vector ratios; @@ -477,10 +482,10 @@ TEST(HybridEstimation, CorrectnessViaSampling) { for (size_t i = 0; i < num_samples; i++) { // Sample from the bayes net - HybridValues sample = bn->sample(&gen); + const HybridValues sample = bn->sample(&rng); // Compute the ratio in log form and canonical form - DiscreteValues assignment = sample.discrete(); + const DiscreteValues assignment = sample.discrete(); double log_ratio = bn->error(sample.continuous(), assignment) - fg->error(sample.continuous(), assignment); double ratio = exp(-log_ratio); @@ -490,6 +495,9 @@ TEST(HybridEstimation, CorrectnessViaSampling) { } // 4. Check that all samples == 1.0 (constant) + // The error evaluated by the factor graph and the bayes net should be the + // same since the FG represents the unnormalized joint distribution and the BN + // is the unnormalized conditional, hence giving the ratio value as 1. double ratio_sum = std::accumulate(ratios.begin(), ratios.end(), decltype(ratios)::value_type(0)); EXPECT_DOUBLES_EQUAL(1.0, ratio_sum / num_samples, 1e-9); From 1e17dd3655695f078c04757b55d57c310f50c542 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sun, 25 Dec 2022 01:05:32 +0530 Subject: [PATCH 45/51] compute sampling ratio for one sample and then for multiple samples --- gtsam/hybrid/tests/testHybridEstimation.cpp | 42 ++++++++++++--------- 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 11d298ef3..e4a45bf4d 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -477,30 +477,36 @@ TEST(HybridEstimation, CorrectnessViaSampling) { std::mt19937_64 rng(11); // 3. Do sampling - std::vector ratios; - int num_samples = 1000; + int num_samples = 10; + // Functor to compute the ratio between the + // Bayes net and the factor graph. + auto compute_ratio = + [](const HybridBayesNet::shared_ptr& bayesNet, + const HybridGaussianFactorGraph::shared_ptr& factorGraph, + const HybridValues& sample) -> double { + const DiscreteValues assignment = sample.discrete(); + // Compute in log form for numerical stability + double log_ratio = bayesNet->error(sample.continuous(), assignment) - + factorGraph->error(sample.continuous(), assignment); + double ratio = exp(-log_ratio); + return ratio; + }; + + // The error evaluated by the factor graph and the Bayes net should differ by + // the normalizing term computed via the Bayes net determinant. + const HybridValues sample = bn->sample(&rng); + double ratio = compute_ratio(bn, fg, sample); + // regression + EXPECT_DOUBLES_EQUAL(1.0, ratio, 1e-9); + + // 4. Check that all samples == constant for (size_t i = 0; i < num_samples; i++) { // Sample from the bayes net const HybridValues sample = bn->sample(&rng); - // Compute the ratio in log form and canonical form - const DiscreteValues assignment = sample.discrete(); - double log_ratio = bn->error(sample.continuous(), assignment) - - fg->error(sample.continuous(), assignment); - double ratio = exp(-log_ratio); - - // Store the ratio for post-processing - ratios.push_back(ratio); + EXPECT_DOUBLES_EQUAL(ratio, compute_ratio(bn, fg, sample), 1e-9); } - - // 4. Check that all samples == 1.0 (constant) - // The error evaluated by the factor graph and the bayes net should be the - // same since the FG represents the unnormalized joint distribution and the BN - // is the unnormalized conditional, hence giving the ratio value as 1. - double ratio_sum = std::accumulate(ratios.begin(), ratios.end(), - decltype(ratios)::value_type(0)); - EXPECT_DOUBLES_EQUAL(1.0, ratio_sum / num_samples, 1e-9); } /* ************************************************************************* */ From 76e838b8d05349cdbae42cc40fdc1878c4c5743c Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Dec 2022 18:19:02 -0500 Subject: [PATCH 46/51] Implement printing rather than calling factor graph version --- gtsam/inference/BayesNet-inst.h | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/gtsam/inference/BayesNet-inst.h b/gtsam/inference/BayesNet-inst.h index e792b5c03..f43b4025e 100644 --- a/gtsam/inference/BayesNet-inst.h +++ b/gtsam/inference/BayesNet-inst.h @@ -31,7 +31,14 @@ namespace gtsam { template void BayesNet::print(const std::string& s, const KeyFormatter& formatter) const { - Base::print(s, formatter); + std::cout << (s.empty() ? "" : s + " ") << std::endl; + std::cout << "size: " << this->size() << std::endl; + for (size_t i = 0; i < this->size(); i++) { + const auto& conditional = this->at(i); + std::stringstream ss; + ss << "conditional " << i << ": "; + if (conditional) conditional->print(ss.str(), formatter); + } } /* ************************************************************************* */ From 4ad482aa70913719b71a706a36de2579946763c3 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Dec 2022 19:20:55 -0500 Subject: [PATCH 47/51] Small comments --- gtsam/hybrid/HybridBayesNet.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index f5c11c6e1..2cb60475c 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -99,6 +99,7 @@ std::function &, double)> prunerFunc( } /* ************************************************************************* */ +// TODO(dellaert): what is this non-const method used for? Abolish it? void HybridBayesNet::updateDiscreteConditionals( const DecisionTreeFactor::shared_ptr &prunedDecisionTree) { KeyVector prunedTreeKeys = prunedDecisionTree->keys(); @@ -150,9 +151,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) { // Go through all the conditionals in the // Bayes Net and prune them as per decisionTree. - for (size_t i = 0; i < this->size(); i++) { - HybridConditional::shared_ptr conditional = this->at(i); - + for (auto &&conditional : *this) { if (conditional->isHybrid()) { GaussianMixture::shared_ptr gaussianMixture = conditional->asMixture(); From a7573e8e6f9f75aa1f49e2d7cfed18282a30fe67 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Dec 2022 19:21:34 -0500 Subject: [PATCH 48/51] Refactor elimination setup to not use C declaration style --- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 63 +++++++++++----------- 1 file changed, 33 insertions(+), 30 deletions(-) diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 39c3c2965..a2777bfc0 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -344,18 +344,20 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, // However this is also the case with iSAM2, so no pressure :) // PREPROCESS: Identify the nature of the current elimination - std::unordered_map mapFromKeyToDiscreteKey; - std::set discreteSeparatorSet; - std::set discreteFrontals; + // First, identify the separator keys, i.e. all keys that are not frontal. KeySet separatorKeys; - KeySet allContinuousKeys; - KeySet continuousFrontals; - KeySet continuousSeparator; - - // This initializes separatorKeys and mapFromKeyToDiscreteKey for (auto &&factor : factors) { separatorKeys.insert(factor->begin(), factor->end()); + } + // remove frontals from separator + for (auto &k : frontalKeys) { + separatorKeys.erase(k); + } + + // Build a map from keys to DiscreteKeys + std::unordered_map mapFromKeyToDiscreteKey; + for (auto &&factor : factors) { if (!factor->isContinuous()) { for (auto &k : factor->discreteKeys()) { mapFromKeyToDiscreteKey[k.first] = k; @@ -363,49 +365,50 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors, } } - // remove frontals from separator - for (auto &k : frontalKeys) { - separatorKeys.erase(k); - } - - // Fill in discrete frontals and continuous frontals for the end result + // Fill in discrete frontals and continuous frontals. + std::set discreteFrontals; + KeySet continuousFrontals; for (auto &k : frontalKeys) { if (mapFromKeyToDiscreteKey.find(k) != mapFromKeyToDiscreteKey.end()) { discreteFrontals.insert(mapFromKeyToDiscreteKey.at(k)); } else { continuousFrontals.insert(k); - allContinuousKeys.insert(k); } } - // Fill in discrete frontals and continuous frontals for the end result + // Fill in discrete discrete separator keys and continuous separator keys. + std::set discreteSeparatorSet; + KeySet continuousSeparator; for (auto &k : separatorKeys) { if (mapFromKeyToDiscreteKey.find(k) != mapFromKeyToDiscreteKey.end()) { discreteSeparatorSet.insert(mapFromKeyToDiscreteKey.at(k)); } else { continuousSeparator.insert(k); - allContinuousKeys.insert(k); } } + // Check if we have any continuous keys: + const bool discrete_only = + continuousFrontals.empty() && continuousSeparator.empty(); + // NOTE: We should really defer the product here because of pruning - // Case 1: we are only dealing with continuous - if (mapFromKeyToDiscreteKey.empty() && !allContinuousKeys.empty()) { - return continuousElimination(factors, frontalKeys); - } - - // Case 2: we are only dealing with discrete - if (allContinuousKeys.empty()) { + if (discrete_only) { + // Case 1: we are only dealing with discrete return discreteElimination(factors, frontalKeys); - } - + } else { + // Case 2: we are only dealing with continuous + if (mapFromKeyToDiscreteKey.empty()) { + return continuousElimination(factors, frontalKeys); + } else { + // Case 3: We are now in the hybrid land! #ifdef HYBRID_TIMING - tictoc_reset_(); + tictoc_reset_(); #endif - // Case 3: We are now in the hybrid land! - return hybridElimination(factors, frontalKeys, continuousSeparator, - discreteSeparatorSet); + return hybridElimination(factors, frontalKeys, continuousSeparator, + discreteSeparatorSet); + } + } } /* ************************************************************************ */ From 8a319c5a49e4df071eaa4ae76491e4996d0a0f02 Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Dec 2022 19:31:54 -0500 Subject: [PATCH 49/51] Separated out NFG setup and added test. --- gtsam/hybrid/tests/testHybridEstimation.cpp | 87 +++++++++++++++------ 1 file changed, 61 insertions(+), 26 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index e4a45bf4d..9b3d192ee 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -432,8 +432,65 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { EXPECT(assert_equal(discrete_seq, hybrid_values.discrete())); } -/****************************************************************************/ -/** +/********************************************************************************* + // Create a hybrid nonlinear factor graph f(x0, x1, m0; z0, z1) + ********************************************************************************/ +static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { + HybridNonlinearFactorGraph nfg; + + constexpr double sigma = 0.5; // measurement noise + const auto noise_model = noiseModel::Isotropic::Sigma(1, sigma); + + // Add "measurement" factors: + nfg.emplace_nonlinear>(X(0), 0.0, noise_model); + nfg.emplace_nonlinear>(X(1), 1.0, noise_model); + + // Add mixture factor: + DiscreteKey m(M(0), 2); + const auto zero_motion = + boost::make_shared>(X(0), X(1), 0, noise_model); + const auto one_motion = + boost::make_shared>(X(0), X(1), 1, noise_model); + nfg.emplace_hybrid( + KeyVector{X(0), X(1)}, DiscreteKeys{m}, + std::vector{zero_motion, one_motion}); + + return nfg; +} + +/********************************************************************************* + // Create a hybrid nonlinear factor graph f(x0, x1, m0; z0, z1) + ********************************************************************************/ +static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { + HybridNonlinearFactorGraph nfg = createHybridNonlinearFactorGraph(); + + Values initial; + double z0 = 0.0, z1 = 1.0; + initial.insert(X(0), z0); + initial.insert(X(1), z1); + return nfg.linearize(initial); +} + +/********************************************************************************* + * Do hybrid elimination and do regression test on discrete conditional. + ********************************************************************************/ +TEST(HybridEstimation, eliminateSequentialRegression) { + // 1. Create the factor graph from the nonlinear factor graph. + HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); + + // 2. Eliminate into BN + const Ordering ordering = fg->getHybridOrdering(); + HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); + // GTSAM_PRINT(*bn); + + // TODO(dellaert): dc should be discrete conditional on m0, but it is an unnormalized factor? + // DiscreteKey m(M(0), 2); + // DiscreteConditional expected(m % "0.51341712/1"); + // auto dc = bn->back()->asDiscreteConditional(); + // EXPECT(assert_equal(expected, *dc, 1e-9)); +} + +/********************************************************************************* * Test for correctness via sampling. * * Compute the conditional P(x0, m0, x1| z0, z1) @@ -442,32 +499,10 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { * 2. Eliminate the factor graph into a Bayes Net `BN`. * 3. Sample from the Bayes Net. * 4. Check that the ratio `BN(x)/FG(x) = constant` for all samples `x`. - */ + ********************************************************************************/ TEST(HybridEstimation, CorrectnessViaSampling) { - HybridNonlinearFactorGraph nfg; - - // First we create a hybrid nonlinear factor graph - // which represents f(x0, x1, m0; z0, z1). - // We linearize and eliminate this to get - // the required Factor Graph FG and Bayes Net BN. - const auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0); - const auto zero_motion = - boost::make_shared>(X(0), X(1), 0, noise_model); - const auto one_motion = - boost::make_shared>(X(0), X(1), 1, noise_model); - - nfg.emplace_nonlinear>(X(0), 0.0, noise_model); - nfg.emplace_hybrid( - KeyVector{X(0), X(1)}, DiscreteKeys{DiscreteKey(M(0), 2)}, - std::vector{zero_motion, one_motion}); - - Values initial; - double z0 = 0.0, z1 = 1.0; - initial.insert(X(0), z0); - initial.insert(X(1), z1); - // 1. Create the factor graph from the nonlinear factor graph. - HybridGaussianFactorGraph::shared_ptr fg = nfg.linearize(initial); + HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); // 2. Eliminate into BN const Ordering ordering = fg->getHybridOrdering(); From db17a04c59d041cf86e1ad40d6c65823f50fdeac Mon Sep 17 00:00:00 2001 From: Frank Dellaert Date: Sun, 25 Dec 2022 22:30:12 -0500 Subject: [PATCH 50/51] Fix print test --- gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index a0a6f7d95..a4de4a1ae 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -587,7 +587,7 @@ factor 6: Discrete [m1 m0] // Expected output for hybridBayesNet. string expected_hybridBayesNet = R"( size: 3 -factor 0: Hybrid P( x0 | x1 m0) +conditional 0: Hybrid P( x0 | x1 m0) Discrete Keys = (m0, 2), Choice(m0) 0 Leaf p(x0 | x1) @@ -602,7 +602,7 @@ factor 0: Hybrid P( x0 | x1 m0) d = [ -9.95037 ] No noise model -factor 1: Hybrid P( x1 | x2 m0 m1) +conditional 1: Hybrid P( x1 | x2 m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) @@ -631,7 +631,7 @@ factor 1: Hybrid P( x1 | x2 m0 m1) d = [ -10 ] No noise model -factor 2: Hybrid P( x2 | m0 m1) +conditional 2: Hybrid P( x2 | m0 m1) Discrete Keys = (m0, 2), (m1, 2), Choice(m1) 0 Choice(m0) From 28f349c57d7bd9241877fcbf7f0d61f022f99d8e Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Fri, 30 Dec 2022 10:47:06 +0530 Subject: [PATCH 51/51] minor fixes --- gtsam/hybrid/HybridBayesNet.cpp | 4 +- gtsam/hybrid/HybridGaussianFactorGraph.cpp | 23 ------ gtsam/hybrid/tests/testHybridEstimation.cpp | 78 ++------------------- 3 files changed, 6 insertions(+), 99 deletions(-) diff --git a/gtsam/hybrid/HybridBayesNet.cpp b/gtsam/hybrid/HybridBayesNet.cpp index 112cf0747..485abbc37 100644 --- a/gtsam/hybrid/HybridBayesNet.cpp +++ b/gtsam/hybrid/HybridBayesNet.cpp @@ -108,7 +108,6 @@ void HybridBayesNet::updateDiscreteConditionals( for (size_t i = 0; i < this->size(); i++) { HybridConditional::shared_ptr conditional = this->at(i); if (conditional->isDiscrete()) { - // std::cout << demangle(typeid(conditional).name()) << std::endl; auto discrete = conditional->asDiscrete(); KeyVector frontals(discrete->frontals().begin(), discrete->frontals().end()); @@ -218,8 +217,7 @@ HybridValues HybridBayesNet::optimize() const { DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); // Given the MPE, compute the optimal continuous values. - GaussianBayesNet gbn = choose(mpe); - return HybridValues(gbn.optimize(), mpe); + return HybridValues(optimize(mpe), mpe); } /* ************************************************************************* */ diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index 9427eb582..a28f9dea0 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -539,27 +539,4 @@ AlgebraicDecisionTree HybridGaussianFactorGraph::probPrime( return prob_tree; } -/* ************************************************************************ */ -std::pair -HybridGaussianFactorGraph::separateContinuousDiscreteOrdering( - const Ordering &ordering) const { - KeySet all_continuous_keys = this->continuousKeys(); - KeySet all_discrete_keys = this->discreteKeys(); - Ordering continuous_ordering, discrete_ordering; - - for (auto &&key : ordering) { - if (std::find(all_continuous_keys.begin(), all_continuous_keys.end(), - key) != all_continuous_keys.end()) { - continuous_ordering.push_back(key); - } else if (std::find(all_discrete_keys.begin(), all_discrete_keys.end(), - key) != all_discrete_keys.end()) { - discrete_ordering.push_back(key); - } else { - throw std::runtime_error("Key in ordering not present in factors."); - } - } - - return std::make_pair(continuous_ordering, discrete_ordering); -} - } // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 9b3d192ee..dac1f9f54 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -269,26 +269,8 @@ TEST(HybridEstimation, Probability) { std::vector measurements = {0, 1, 2, 2}; double between_sigma = 1.0, measurement_sigma = 0.1; - std::vector expected_errors, expected_prob_primes; - std::map> discrete_seq_map; - for (size_t i = 0; i < pow(2, K - 1); i++) { - discrete_seq_map[i] = getDiscreteSequence(i); - - GaussianFactorGraph::shared_ptr linear_graph = specificModesFactorGraph( - K, measurements, discrete_seq_map[i], measurement_sigma, between_sigma); - - auto bayes_net = linear_graph->eliminateSequential(); - - VectorValues values = bayes_net->optimize(); - - double error = linear_graph->error(values); - expected_errors.push_back(error); - double prob_prime = linear_graph->probPrime(values); - expected_prob_primes.push_back(prob_prime); - } - - // Switching example of robot moving in 1D with given measurements and equal - // mode priors. + // Switching example of robot moving in 1D with + // given measurements and equal mode priors. Switching switching(K, between_sigma, measurement_sigma, measurements, "1/1 1/1"); auto graph = switching.linearizedFactorGraph; @@ -297,18 +279,6 @@ TEST(HybridEstimation, Probability) { HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(ordering); auto discreteConditional = bayesNet->atDiscrete(bayesNet->size() - 3); - // Test if the probPrimeTree matches the probability of - // the individual factor graphs - for (size_t i = 0; i < pow(2, K - 1); i++) { - DiscreteValues discrete_assignment; - for (size_t v = 0; v < discrete_seq_map[i].size(); v++) { - discrete_assignment[M(v)] = discrete_seq_map[i][v]; - } - double discrete_transition_prob = 0.25; - EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i) * discrete_transition_prob, - (*discreteConditional)(discrete_assignment), 1e-8); - } - HybridValues hybrid_values = bayesNet->optimize(); // This is the correct sequence as designed @@ -332,24 +302,6 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { double between_sigma = 1.0, measurement_sigma = 0.1; - // For each discrete mode sequence, create the individual factor graphs and - // optimize each. - std::vector expected_errors, expected_prob_primes; - std::map> discrete_seq_map; - for (size_t i = 0; i < pow(2, K - 1); i++) { - discrete_seq_map[i] = getDiscreteSequence(i); - - GaussianFactorGraph::shared_ptr linear_graph = specificModesFactorGraph( - K, measurements, discrete_seq_map[i], measurement_sigma, between_sigma); - - auto bayes_tree = linear_graph->eliminateMultifrontal(); - - VectorValues values = bayes_tree->optimize(); - - expected_errors.push_back(linear_graph->error(values)); - expected_prob_primes.push_back(linear_graph->probPrime(values)); - } - // Switching example of robot moving in 1D with given measurements and equal // mode priors. Switching switching(K, between_sigma, measurement_sigma, measurements, @@ -373,25 +325,6 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { auto last_conditional = (*bayesTree)[last_continuous_key]->conditional(); DiscreteKeys discrete_keys = last_conditional->discreteKeys(); - // Create a decision tree of all the different VectorValues - AlgebraicDecisionTree probPrimeTree = - graph.continuousProbPrimes(discrete_keys, bayesTree); - - EXPECT(assert_equal(expected_probPrimeTree, probPrimeTree)); - - // Test if the probPrimeTree matches the probability of - // the individual factor graphs - for (size_t i = 0; i < pow(2, K - 1); i++) { - Assignment discrete_assignment; - for (size_t v = 0; v < discrete_seq_map[i].size(); v++) { - discrete_assignment[M(v)] = discrete_seq_map[i][v]; - } - EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i), - probPrimeTree(discrete_assignment), 1e-8); - } - - discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree)); - Ordering discrete(graph.discreteKeys()); auto discreteBayesTree = discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete); @@ -483,10 +416,9 @@ TEST(HybridEstimation, eliminateSequentialRegression) { HybridBayesNet::shared_ptr bn = fg->eliminateSequential(ordering); // GTSAM_PRINT(*bn); - // TODO(dellaert): dc should be discrete conditional on m0, but it is an unnormalized factor? - // DiscreteKey m(M(0), 2); - // DiscreteConditional expected(m % "0.51341712/1"); - // auto dc = bn->back()->asDiscreteConditional(); + // TODO(dellaert): dc should be discrete conditional on m0, but it is an + // unnormalized factor? DiscreteKey m(M(0), 2); DiscreteConditional expected(m + // % "0.51341712/1"); auto dc = bn->back()->asDiscreteConditional(); // EXPECT(assert_equal(expected, *dc, 1e-9)); }