diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index de26bad7e..56c62cf19 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -57,8 +57,16 @@ Ordering HybridSmoother::getOrdering( /* ************************************************************************* */ void HybridSmoother::update(HybridGaussianFactorGraph graph, - const Ordering &ordering, - std::optional maxNrLeaves) { + std::optional maxNrLeaves, + const std::optional given_ordering) { + Ordering ordering; + // If no ordering provided, then we compute one + if (!given_ordering.has_value()) { + ordering = this->getOrdering(graph); + } else { + ordering = *given_ordering; + } + // Add the necessary conditionals from the previous timestep(s). std::tie(graph, hybridBayesNet_) = addConditionals(graph, hybridBayesNet_, ordering); diff --git a/gtsam/hybrid/HybridSmoother.h b/gtsam/hybrid/HybridSmoother.h index 0494834cd..0767da12f 100644 --- a/gtsam/hybrid/HybridSmoother.h +++ b/gtsam/hybrid/HybridSmoother.h @@ -44,13 +44,14 @@ class HybridSmoother { * corresponding to the pruned choices. * * @param graph The new factors, should be linear only - * @param ordering The ordering for elimination, only continuous vars are - * allowed * @param maxNrLeaves The maximum number of leaves in the new discrete factor, * if applicable + * @param given_ordering The (optional) ordering for elimination, only + * continuous variables are allowed */ - void update(HybridGaussianFactorGraph graph, const Ordering& ordering, - std::optional maxNrLeaves = {}); + void update(HybridGaussianFactorGraph graph, + std::optional maxNrLeaves = {}, + const std::optional given_ordering = {}); Ordering getOrdering(const HybridGaussianFactorGraph& newFactors); @@ -74,4 +75,4 @@ class HybridSmoother { const HybridBayesNet& hybridBayesNet() const; }; -}; // namespace gtsam +} // namespace gtsam diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index e74990fe6..b5f5244fa 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -46,35 +46,6 @@ using namespace gtsam; using symbol_shorthand::X; using symbol_shorthand::Z; -Ordering getOrdering(HybridGaussianFactorGraph& factors, - const HybridGaussianFactorGraph& newFactors) { - factors.push_back(newFactors); - // Get all the discrete keys from the factors - KeySet allDiscrete = factors.discreteKeySet(); - - // Create KeyVector with continuous keys followed by discrete keys. - KeyVector newKeysDiscreteLast; - const KeySet newFactorKeys = newFactors.keys(); - // Insert continuous keys first. - for (auto& k : newFactorKeys) { - if (!allDiscrete.exists(k)) { - newKeysDiscreteLast.push_back(k); - } - } - - // Insert discrete keys at the end - std::copy(allDiscrete.begin(), allDiscrete.end(), - std::back_inserter(newKeysDiscreteLast)); - - const VariableIndex index(factors); - - // Get an ordering where the new keys are eliminated last - Ordering ordering = Ordering::ColamdConstrainedLast( - index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()), - true); - return ordering; -} - TEST(HybridEstimation, Full) { size_t K = 6; std::vector measurements = {0, 1, 2, 2, 2, 3}; @@ -117,7 +88,7 @@ TEST(HybridEstimation, Full) { /****************************************************************************/ // Test approximate inference with an additional pruning step. -TEST(HybridEstimation, Incremental) { +TEST(HybridEstimation, IncrementalSmoother) { size_t K = 15; std::vector measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6, 7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; @@ -136,7 +107,6 @@ TEST(HybridEstimation, Incremental) { initial.insert(X(0), switching.linearizationPoint.at(X(0))); HybridGaussianFactorGraph linearized; - HybridGaussianFactorGraph bayesNet; for (size_t k = 1; k < K; k++) { // Motion Model @@ -146,11 +116,10 @@ TEST(HybridEstimation, Incremental) { initial.insert(X(k), switching.linearizationPoint.at(X(k))); - bayesNet = smoother.hybridBayesNet(); linearized = *graph.linearize(initial); - Ordering ordering = getOrdering(bayesNet, linearized); + Ordering ordering = smoother.getOrdering(linearized); - smoother.update(linearized, ordering, 3); + smoother.update(linearized, 3, ordering); graph.resize(0); }