diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 5a070db16..0c09b6e21 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -120,6 +120,28 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, // NOTE(Varun) Using a for-range loop doesn't work since some of the // conditionals are invalid pointers + + // First get all the keys involved. + // We do this by iterating over all conditionals, and checking if their + // frontals are involved in the factor graph. If yes, then also make the + // parent keys involved in the factor graph. + for (size_t i = 0; i < hybridBayesNet.size(); i++) { + auto conditional = hybridBayesNet.at(i); + + for (auto &key : conditional->frontals()) { + if (std::find(factorKeys.begin(), factorKeys.end(), key) != + factorKeys.end()) { + // Add the conditional parents to factorKeys + // so we add those conditionals too. + for (auto &&parentKey : conditional->parents()) { + factorKeys.insert(parentKey); + } + // Break so we don't add parents twice. + break; + } + } + } + for (size_t i = 0; i < hybridBayesNet.size(); i++) { auto conditional = hybridBayesNet.at(i); @@ -128,14 +150,6 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph, factorKeys.end()) { newConditionals.push_back(conditional); - // Add the conditional parents to factorKeys - // so we add those conditionals too. - // NOTE: This assumes we have a structure where - // variables depend on those in the future. - for (auto &&parentKey : conditional->parents()) { - factorKeys.insert(parentKey); - } - // Remove the conditional from the updated Bayes net auto it = find(updatedHybridBayesNet.begin(), updatedHybridBayesNet.end(), conditional); diff --git a/gtsam/hybrid/tests/testHybridSmoother.cpp b/gtsam/hybrid/tests/testHybridSmoother.cpp index 3a0f376cc..893740b7c 100644 --- a/gtsam/hybrid/tests/testHybridSmoother.cpp +++ b/gtsam/hybrid/tests/testHybridSmoother.cpp @@ -39,6 +39,7 @@ using namespace std; using namespace gtsam; +using symbol_shorthand::L; using symbol_shorthand::X; using symbol_shorthand::Z; @@ -69,6 +70,56 @@ Switching InitializeEstimationProblem( } // namespace estimation_fixture +/****************************************************************************/ +// Test HybridSmoother::addConditionals +TEST(HybridSmoother, AddConditionals) { + using namespace estimation_fixture; + + size_t K = 7; + + // Switching example of robot moving in 1D + // with given measurements and equal mode priors. + HybridNonlinearFactorGraph graph; + Values initial; + Switching switching = InitializeEstimationProblem( + K, 1.0, 0.1, measurements, "1/1 1/1", &graph, &initial); + + HybridSmoother smoother(0.5); + constexpr size_t maxNrLeaves = 5; + + // Loop over timesteps from 1...K-1 + for (size_t k = 1; k < K; k++) { + if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain + graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model + graph.push_back(switching.unaryFactors.at(k)); // Measurement + + // Add a loop closure factor every 3rd step between `k-3` and `k`. + if (k % 3 == 0) { + size_t l = int(k / 3); + auto open = std::make_shared( + X(k - 3), X(k), 0.0, noiseModel::Isotropic::Sigma(1, 10.0)), + close = std::make_shared( + X(k - 3), X(k), 0.0, noiseModel::Isotropic::Sigma(1, 0.1)); + + HybridNonlinearFactor loopFactor( + DiscreteKey(L(l), 2), + std::vector{open, close}); + graph.push_back(loopFactor); + } + + initial.insert(X(k), switching.linearizationPoint.at(X(k))); + + HybridGaussianFactorGraph linearized = *graph.linearize(initial); + + smoother.update(linearized, maxNrLeaves); + + // Clear all the factors from the graph + graph.resize(0); + } + + //TODO(Varun) recreate scenario which would fail before commit b627fc422. +} + /****************************************************************************/ // Test approximate inference with an additional pruning step. TEST(HybridSmoother, IncrementalSmoother) {