Merge branch 'fix-addConditionals' into city10000

release/4.3a0
Varun Agrawal 2025-01-29 18:44:41 -05:00
commit 9b81f0caa9
2 changed files with 73 additions and 8 deletions

View File

@ -120,6 +120,28 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
// NOTE(Varun) Using a for-range loop doesn't work since some of the
// conditionals are invalid pointers
// First get all the keys involved.
// We do this by iterating over all conditionals, and checking if their
// frontals are involved in the factor graph. If yes, then also make the
// parent keys involved in the factor graph.
for (size_t i = 0; i < hybridBayesNet.size(); i++) {
auto conditional = hybridBayesNet.at(i);
for (auto &key : conditional->frontals()) {
if (std::find(factorKeys.begin(), factorKeys.end(), key) !=
factorKeys.end()) {
// Add the conditional parents to factorKeys
// so we add those conditionals too.
for (auto &&parentKey : conditional->parents()) {
factorKeys.insert(parentKey);
}
// Break so we don't add parents twice.
break;
}
}
}
for (size_t i = 0; i < hybridBayesNet.size(); i++) {
auto conditional = hybridBayesNet.at(i);
@ -128,14 +150,6 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
factorKeys.end()) {
newConditionals.push_back(conditional);
// Add the conditional parents to factorKeys
// so we add those conditionals too.
// NOTE: This assumes we have a structure where
// variables depend on those in the future.
for (auto &&parentKey : conditional->parents()) {
factorKeys.insert(parentKey);
}
// Remove the conditional from the updated Bayes net
auto it = find(updatedHybridBayesNet.begin(),
updatedHybridBayesNet.end(), conditional);

View File

@ -39,6 +39,7 @@
using namespace std;
using namespace gtsam;
using symbol_shorthand::L;
using symbol_shorthand::X;
using symbol_shorthand::Z;
@ -69,6 +70,56 @@ Switching InitializeEstimationProblem(
} // namespace estimation_fixture
/****************************************************************************/
// Test HybridSmoother::addConditionals
TEST(HybridSmoother, AddConditionals) {
using namespace estimation_fixture;
size_t K = 7;
// Switching example of robot moving in 1D
// with given measurements and equal mode priors.
HybridNonlinearFactorGraph graph;
Values initial;
Switching switching = InitializeEstimationProblem(
K, 1.0, 0.1, measurements, "1/1 1/1", &graph, &initial);
HybridSmoother smoother(0.5);
constexpr size_t maxNrLeaves = 5;
// Loop over timesteps from 1...K-1
for (size_t k = 1; k < K; k++) {
if (k > 1) graph.push_back(switching.modeChain.at(k - 1)); // Mode chain
graph.push_back(switching.binaryFactors.at(k - 1)); // Motion Model
graph.push_back(switching.unaryFactors.at(k)); // Measurement
// Add a loop closure factor every 3rd step between `k-3` and `k`.
if (k % 3 == 0) {
size_t l = int(k / 3);
auto open = std::make_shared<MotionModel>(
X(k - 3), X(k), 0.0, noiseModel::Isotropic::Sigma(1, 10.0)),
close = std::make_shared<MotionModel>(
X(k - 3), X(k), 0.0, noiseModel::Isotropic::Sigma(1, 0.1));
HybridNonlinearFactor loopFactor(
DiscreteKey(L(l), 2),
std::vector<NoiseModelFactor::shared_ptr>{open, close});
graph.push_back(loopFactor);
}
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
HybridGaussianFactorGraph linearized = *graph.linearize(initial);
smoother.update(linearized, maxNrLeaves);
// Clear all the factors from the graph
graph.resize(0);
}
//TODO(Varun) recreate scenario which would fail before commit b627fc422.
}
/****************************************************************************/
// Test approximate inference with an additional pruning step.
TEST(HybridSmoother, IncrementalSmoother) {