diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 831a92d2f..65e9d9bd6 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -57,7 +57,8 @@ Ordering HybridSmoother::getOrdering(const HybridGaussianFactorGraph &factors, void HybridSmoother::update(const HybridGaussianFactorGraph &graph, std::optional maxNrLeaves, const std::optional given_ordering) { - std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size() << std::endl; + std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size() + << std::endl; std::cout << "newFactors size: " << graph.size() << std::endl; HybridGaussianFactorGraph updatedGraph; // Add the necessary conditionals from the previous timestep(s). @@ -65,8 +66,10 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph, addConditionals(graph, hybridBayesNet_); // print size of graph, updatedGraph, hybridBayesNet_ std::cout << "updatedGraph size: " << updatedGraph.size() << std::endl; - std::cout << "hybridBayesNet_ size after: " << hybridBayesNet_.size() << std::endl; - std::cout << "total size: " << updatedGraph.size() + hybridBayesNet_.size() << std::endl; + std::cout << "hybridBayesNet_ size after: " << hybridBayesNet_.size() + << std::endl; + std::cout << "total size: " << updatedGraph.size() + hybridBayesNet_.size() + << std::endl; Ordering ordering; // If no ordering provided, then we compute one @@ -85,8 +88,9 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph, HybridBayesNet bayesNetFragment = *updatedGraph.eliminateSequential(ordering); #ifdef DEBUG_SMOOTHER - for (auto conditional: bayesNetFragment) { - auto e =std::dynamic_pointer_cast(conditional); + for (auto conditional : bayesNetFragment) { + auto e = std::dynamic_pointer_cast( + conditional); GTSAM_PRINT(*e); } #endif @@ -101,7 +105,10 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph, if (maxNrLeaves) { // `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in // all the conditionals with the same keys in bayesNetFragment. - bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_); + DiscreteValues newlyFixedValues; + bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_, + &newlyFixedValues); + fixedValues_.insert(newlyFixedValues); } // Print discrete keys in the bayesNetFragment: @@ -112,8 +119,9 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph, std::cout << std::endl << std::endl; #ifdef DEBUG_SMOOTHER - for (auto conditional: bayesNetFragment) { - auto c =std::dynamic_pointer_cast(conditional); + for (auto conditional : bayesNetFragment) { + auto c = std::dynamic_pointer_cast( + conditional); GTSAM_PRINT(*c); } #endif diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index 2f7bfcebb..653df3957 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -106,6 +106,18 @@ class GTSAM_EXPORT HybridSmoother { /// Return the Bayes Net posterior. const HybridBayesNet& hybridBayesNet() const; + + /// Optimize the hybrid Bayes Net, taking into accound fixed values. + HybridValues optimize() const { + // Solve for the MPE + DiscreteValues mpe = hybridBayesNet_.mpe(); + + // Add fixed values to the MPE. + mpe.insert(fixedValues_); + + // Given the MPE, compute the optimal continuous values. + return HybridValues(hybridBayesNet_.optimize(mpe), mpe); + } }; } // namespace gtsam