diff --git a/gtsam/hybrid/GaussianMixtureFactor.h b/gtsam/hybrid/GaussianMixtureFactor.h index 2088a7837..1325cfe93 100644 --- a/gtsam/hybrid/GaussianMixtureFactor.h +++ b/gtsam/hybrid/GaussianMixtureFactor.h @@ -143,6 +143,9 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { */ double error(const HybridValues &values) const override; + /// Getter for GaussianFactor decision tree + const Factors &factors() const { return factors_; } + /// Add MixtureFactor to a Sum, syntactic sugar. friend GaussianFactorGraphTree &operator+=( GaussianFactorGraphTree &sum, const GaussianMixtureFactor &factor) { diff --git a/gtsam/hybrid/HybridGaussianFactorGraph.cpp b/gtsam/hybrid/HybridGaussianFactorGraph.cpp index aa29c924e..897d56272 100644 --- a/gtsam/hybrid/HybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/HybridGaussianFactorGraph.cpp @@ -190,8 +190,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors, /* ************************************************************************ */ // If any GaussianFactorGraph in the decision tree contains a nullptr, convert // that leaf to an empty GaussianFactorGraph. Needed since the DecisionTree will -// otherwise create a GFG with a single (null) factor. -// TODO(dellaert): still a mystery to me why this is needed. +// otherwise create a GFG with a single (null) factor, which doesn't register as null. GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { auto emptyGaussian = [](const GaussianFactorGraph &graph) { bool hasNull = @@ -275,9 +274,9 @@ hybridElimination(const HybridGaussianFactorGraph &factors, }; DecisionTree probabilities(eliminationResults, probability); - return {std::make_shared(gaussianMixture), - std::make_shared(discreteSeparator, - probabilities)}; + return { + std::make_shared(gaussianMixture), + std::make_shared(discreteSeparator, probabilities)}; } else { // Otherwise, we create a resulting GaussianMixtureFactor on the separator, // taking care to correct for conditional constant. diff --git a/gtsam/hybrid/HybridSmoother.cpp b/gtsam/hybrid/HybridSmoother.cpp index 549c71714..43e75770d 100644 --- a/gtsam/hybrid/HybridSmoother.cpp +++ b/gtsam/hybrid/HybridSmoother.cpp @@ -45,7 +45,7 @@ Ordering HybridSmoother::getOrdering( std::copy(allDiscrete.begin(), allDiscrete.end(), std::back_inserter(newKeysDiscreteLast)); - const VariableIndex index(newFactors); + const VariableIndex index(factors); // Get an ordering where the new keys are eliminated last Ordering ordering = Ordering::ColamdConstrainedLast( diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 28f47232b..e74990fe6 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -17,6 +17,7 @@ #include #include +#include #include #include #include @@ -35,14 +36,15 @@ // Include for test suite #include -#include "Switching.h" - #include +#include "Switching.h" + using namespace std; using namespace gtsam; using symbol_shorthand::X; +using symbol_shorthand::Z; Ordering getOrdering(HybridGaussianFactorGraph& factors, const HybridGaussianFactorGraph& newFactors) { @@ -91,8 +93,7 @@ TEST(HybridEstimation, Full) { hybridOrdering.push_back(M(k)); } - HybridBayesNet::shared_ptr bayesNet = - graph.eliminateSequential(); + HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(); EXPECT_LONGS_EQUAL(2 * K - 1, bayesNet->size()); @@ -338,7 +339,6 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { Switching switching(K, between_sigma, measurement_sigma, measurements, "1/1 1/1"); auto graph = switching.linearizedFactorGraph; - Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph()); // Get the tree of unnormalized probabilities for each mode sequence. AlgebraicDecisionTree expected_probPrimeTree = getProbPrimeTree(graph); @@ -503,6 +503,179 @@ TEST(HybridEstimation, CorrectnessViaSampling) { } } +/****************************************************************************/ +/** + * Helper function to add the constant term corresponding to + * the difference in noise models. + */ +std::shared_ptr mixedVarianceFactor( + const MixtureFactor& mf, const Values& initial, const Key& mode, + double noise_tight, double noise_loose, size_t d, size_t tight_index) { + GaussianMixtureFactor::shared_ptr gmf = mf.linearize(initial); + + constexpr double log2pi = 1.8378770664093454835606594728112; + // logConstant will be of the tighter model + double logNormalizationConstant = log(1.0 / noise_tight); + double logConstant = -0.5 * d * log2pi + logNormalizationConstant; + + auto func = [&](const Assignment& assignment, + const GaussianFactor::shared_ptr& gf) { + if (assignment.at(mode) != tight_index) { + double factor_log_constant = -0.5 * d * log2pi + log(1.0 / noise_loose); + + GaussianFactorGraph _gfg; + _gfg.push_back(gf); + Vector c(d); + for (size_t i = 0; i < d; i++) { + c(i) = std::sqrt(2.0 * (logConstant - factor_log_constant)); + } + + _gfg.emplace_shared(c); + return std::make_shared(_gfg); + } else { + return dynamic_pointer_cast(gf); + } + }; + auto updated_components = gmf->factors().apply(func); + auto updated_gmf = std::make_shared( + gmf->continuousKeys(), gmf->discreteKeys(), updated_components); + + return updated_gmf; +} + +/****************************************************************************/ +TEST(HybridEstimation, ModeSelection) { + HybridNonlinearFactorGraph graph; + Values initial; + + auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1); + auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0); + + graph.emplace_shared>(X(0), 0.0, measurement_model); + graph.emplace_shared>(X(1), 0.0, measurement_model); + + DiscreteKeys modes; + modes.emplace_back(M(0), 2); + + // The size of the noise model + size_t d = 1; + double noise_tight = 0.5, noise_loose = 5.0; + + auto model0 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), + model1 = std::make_shared( + X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); + + std::vector components = {model0, model1}; + + KeyVector keys = {X(0), X(1)}; + MixtureFactor mf(keys, modes, components); + + initial.insert(X(0), 0.0); + initial.insert(X(1), 0.0); + + auto gmf = + mixedVarianceFactor(mf, initial, M(0), noise_tight, noise_loose, d, 1); + graph.add(gmf); + + auto gfg = graph.linearize(initial); + + HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); + + HybridValues delta = bayesNet->optimize(); + EXPECT_LONGS_EQUAL(1, delta.discrete().at(M(0))); + + /**************************************************************/ + HybridBayesNet bn; + const DiscreteKey mode{M(0), 2}; + + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); + bn.emplace_back(new GaussianMixture( + {Z(0)}, {X(0), X(1)}, {mode}, + {GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_tight)})); + + VectorValues vv; + vv.insert(Z(0), Z_1x1); + + auto fg = bn.toFactorGraph(vv); + auto expected_posterior = fg.eliminateSequential(); + + EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); +} + +/****************************************************************************/ +TEST(HybridEstimation, ModeSelection2) { + using symbol_shorthand::Z; + + // The size of the noise model + size_t d = 3; + double noise_tight = 0.5, noise_loose = 5.0; + + HybridBayesNet bn; + const DiscreteKey mode{M(0), 2}; + + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); + bn.push_back( + GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); + bn.emplace_back(new GaussianMixture( + {Z(0)}, {X(0), X(1)}, {mode}, + {GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_tight)})); + + VectorValues vv; + vv.insert(Z(0), Z_3x1); + + auto fg = bn.toFactorGraph(vv); + + auto expected_posterior = fg.eliminateSequential(); + + // ===================================== + + HybridNonlinearFactorGraph graph; + Values initial; + + auto measurement_model = noiseModel::Isotropic::Sigma(d, 0.1); + auto motion_model = noiseModel::Isotropic::Sigma(d, 1.0); + + graph.emplace_shared>(X(0), Z_3x1, measurement_model); + graph.emplace_shared>(X(1), Z_3x1, measurement_model); + + DiscreteKeys modes; + modes.emplace_back(M(0), 2); + + auto model0 = std::make_shared>( + X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)), + model1 = std::make_shared>( + X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); + + std::vector components = {model0, model1}; + + KeyVector keys = {X(0), X(1)}; + MixtureFactor mf(keys, modes, components); + + initial.insert(X(0), Z_3x1); + initial.insert(X(1), Z_3x1); + + auto gmf = + mixedVarianceFactor(mf, initial, M(0), noise_tight, noise_loose, d, 1); + graph.add(gmf); + + auto gfg = graph.linearize(initial); + + HybridBayesNet::shared_ptr bayesNet = gfg->eliminateSequential(); + + EXPECT(assert_equal(*expected_posterior, *bayesNet, 1e-6)); +} + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/gtsam/linear/HessianFactor.h b/gtsam/linear/HessianFactor.h index eb1fe2de2..8cbabcd5d 100644 --- a/gtsam/linear/HessianFactor.h +++ b/gtsam/linear/HessianFactor.h @@ -131,7 +131,7 @@ namespace gtsam { * term, and f the constant term. * JacobianFactor error is \f[ 0.5* (Ax-b)' M (Ax-b) = 0.5*x'A'MAx - x'A'Mb + 0.5*b'Mb \f] * HessianFactor error is \f[ 0.5*(x'Gx - 2x'g + f) = 0.5*x'Gx - x'*g + 0.5*f \f] - * So, with \f$ A = [A1 A2] \f$ and \f$ G=A*'M*A = [A1';A2']*M*[A1 A2] \f$ we have + * So, with \f$ A = [A1 A2] \f$ and \f$ G=A'*M*A = [A1';A2']*M*[A1 A2] \f$ we have \code n1*n1 G11 = A1'*M*A1 n1*n2 G12 = A1'*M*A2