diff --git a/gtsam/hybrid/HybridGaussianConditional.cpp b/gtsam/hybrid/HybridGaussianConditional.cpp index 2b2062e6f..8fce251cc 100644 --- a/gtsam/hybrid/HybridGaussianConditional.cpp +++ b/gtsam/hybrid/HybridGaussianConditional.cpp @@ -55,24 +55,6 @@ HybridGaussianConditional::conditionals() const { return conditionals_; } -/* *******************************************************************************/ -HybridGaussianConditional::HybridGaussianConditional( - KeyVector &&continuousFrontals, KeyVector &&continuousParents, - DiscreteKeys &&discreteParents, - std::vector &&conditionals) - : HybridGaussianConditional(continuousFrontals, continuousParents, - discreteParents, - Conditionals(discreteParents, conditionals)) {} - -/* *******************************************************************************/ -HybridGaussianConditional::HybridGaussianConditional( - const KeyVector &continuousFrontals, const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, - const std::vector &conditionals) - : HybridGaussianConditional(continuousFrontals, continuousParents, - discreteParents, - Conditionals(discreteParents, conditionals)) {} - /* *******************************************************************************/ // TODO(dellaert): This is copy/paste: HybridGaussianConditional should be // derived from HybridGaussianFactor, no? diff --git a/gtsam/hybrid/HybridGaussianConditional.h b/gtsam/hybrid/HybridGaussianConditional.h index fc315c6f9..2c132d3bf 100644 --- a/gtsam/hybrid/HybridGaussianConditional.h +++ b/gtsam/hybrid/HybridGaussianConditional.h @@ -106,32 +106,6 @@ class GTSAM_EXPORT HybridGaussianConditional const DiscreteKeys &discreteParents, const Conditionals &conditionals); - /** - * @brief Make a Gaussian Mixture from a list of Gaussian conditionals - * - * @param continuousFrontals The continuous frontal variables - * @param continuousParents The continuous parent variables - * @param discreteParents Discrete parents variables - * @param conditionals List of conditionals - */ - HybridGaussianConditional( - KeyVector &&continuousFrontals, KeyVector &&continuousParents, - DiscreteKeys &&discreteParents, - std::vector &&conditionals); - - /** - * @brief Make a Gaussian Mixture from a list of Gaussian conditionals - * - * @param continuousFrontals The continuous frontal variables - * @param continuousParents The continuous parent variables - * @param discreteParents Discrete parents variables - * @param conditionals List of conditionals - */ - HybridGaussianConditional( - const KeyVector &continuousFrontals, const KeyVector &continuousParents, - const DiscreteKeys &discreteParents, - const std::vector &conditionals); - /// @} /// @name Testable /// @{ @@ -273,7 +247,7 @@ class GTSAM_EXPORT HybridGaussianConditional #endif }; -/// Return the DiscreteKey vector as a set. +/// Return the DiscreteKeys vector as a set. std::set DiscreteKeysAsSet(const DiscreteKeys &discreteKeys); // traits diff --git a/gtsam/hybrid/HybridGaussianFactor.cpp b/gtsam/hybrid/HybridGaussianFactor.cpp index bfebb064e..cabe6defc 100644 --- a/gtsam/hybrid/HybridGaussianFactor.cpp +++ b/gtsam/hybrid/HybridGaussianFactor.cpp @@ -42,9 +42,11 @@ HybridGaussianFactor::Factors augment( const HybridGaussianFactor::FactorValuePairs &factors) { // Find the minimum value so we can "proselytize" to positive values. // Done because we can't have sqrt of negative numbers. - auto unzipped_pair = unzip(factors); - const HybridGaussianFactor::Factors gaussianFactors = unzipped_pair.first; - const AlgebraicDecisionTree valueTree = unzipped_pair.second; + HybridGaussianFactor::Factors gaussianFactors; + AlgebraicDecisionTree valueTree; + std::tie(gaussianFactors, valueTree) = unzip(factors); + + // Normalize double min_value = valueTree.min(); AlgebraicDecisionTree values = valueTree.apply([&min_value](double n) { return n - min_value; }); diff --git a/gtsam/hybrid/HybridGaussianFactor.h b/gtsam/hybrid/HybridGaussianFactor.h index 333c5d25b..5f9cd925e 100644 --- a/gtsam/hybrid/HybridGaussianFactor.h +++ b/gtsam/hybrid/HybridGaussianFactor.h @@ -96,15 +96,15 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { * GaussianFactor shared pointers. * * @param continuousKeys Vector of keys for continuous factors. - * @param discreteKeys Vector of discrete keys. + * @param discreteKey The discrete key to index each component. * @param factors Vector of gaussian factor shared pointers - * and arbitrary scalars. + * and arbitrary scalars. Same size as the cardinality of discreteKey. */ HybridGaussianFactor(const KeyVector &continuousKeys, - const DiscreteKeys &discreteKeys, + const DiscreteKey &discreteKey, const std::vector &factors) - : HybridGaussianFactor(continuousKeys, discreteKeys, - FactorValuePairs(discreteKeys, factors)) {} + : HybridGaussianFactor(continuousKeys, {discreteKey}, + FactorValuePairs({discreteKey}, factors)) {} /// @} /// @name Testable diff --git a/gtsam/hybrid/HybridNonlinearFactor.h b/gtsam/hybrid/HybridNonlinearFactor.h index 742b4c162..1120fc948 100644 --- a/gtsam/hybrid/HybridNonlinearFactor.h +++ b/gtsam/hybrid/HybridNonlinearFactor.h @@ -92,17 +92,18 @@ class HybridNonlinearFactor : public HybridFactor { * @tparam FACTOR The type of the factor shared pointers being passed in. * Will be typecast to NonlinearFactor shared pointers. * @param keys Vector of keys for continuous factors. - * @param discreteKeys Vector of discrete keys. + * @param discreteKey The discrete key indexing each component factor. * @param factors Vector of nonlinear factor and scalar pairs. + * Same size as the cardinality of discreteKey. * @param normalized Flag indicating if the factor error is already * normalized. */ template HybridNonlinearFactor( - const KeyVector& keys, const DiscreteKeys& discreteKeys, + const KeyVector& keys, const DiscreteKey& discreteKey, const std::vector, double>>& factors, bool normalized = false) - : Base(keys, discreteKeys), normalized_(normalized) { + : Base(keys, {discreteKey}), normalized_(normalized) { std::vector nonlinear_factors; KeySet continuous_keys_set(keys.begin(), keys.end()); KeySet factor_keys_set; @@ -118,7 +119,7 @@ class HybridNonlinearFactor : public HybridFactor { "Factors passed into HybridNonlinearFactor need to be nonlinear!"); } } - factors_ = Factors(discreteKeys, nonlinear_factors); + factors_ = Factors({discreteKey}, nonlinear_factors); if (continuous_keys_set != factor_keys_set) { throw std::runtime_error( @@ -140,7 +141,7 @@ class HybridNonlinearFactor : public HybridFactor { auto errorFunc = [continuousValues](const std::pair& f) { auto [factor, val] = f; - return factor->error(continuousValues) + (0.5 * val * val); + return factor->error(continuousValues) + (0.5 * val); }; DecisionTree result(factors_, errorFunc); return result; @@ -159,7 +160,7 @@ class HybridNonlinearFactor : public HybridFactor { auto [factor, val] = factors_(discreteValues); // Compute the error for the selected factor const double factorError = factor->error(continuousValues); - return factorError + (0.5 * val * val); + return factorError + (0.5 * val); } /** diff --git a/gtsam/hybrid/tests/Switching.h b/gtsam/hybrid/tests/Switching.h index 9b7c44d02..f18c19559 100644 --- a/gtsam/hybrid/tests/Switching.h +++ b/gtsam/hybrid/tests/Switching.h @@ -57,15 +57,16 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( // keyFunc(1) to keyFunc(n+1) for (size_t t = 1; t < n; t++) { - std::vector components = { - {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), - I_3x3, Z_3x1), - 0.0}, - {std::make_shared(keyFunc(t), I_3x3, keyFunc(t + 1), - I_3x3, Vector3::Ones()), - 0.0}}; - hfg.add(HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)}, - {{dKeyFunc(t), 2}}, components)); + DiscreteKeys dKeys{{dKeyFunc(t), 2}}; + HybridGaussianFactor::FactorValuePairs components( + dKeys, {{std::make_shared(keyFunc(t), I_3x3, + keyFunc(t + 1), I_3x3, Z_3x1), + 0.0}, + {std::make_shared( + keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Vector3::Ones()), + 0.0}}); + hfg.add( + HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)}, dKeys, components)); if (t > 1) { hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}}, @@ -167,8 +168,8 @@ struct Switching { components.push_back( {std::dynamic_pointer_cast(f), 0.0}); } - nonlinearFactorGraph.emplace_shared( - keys, DiscreteKeys{modes[k]}, components); + nonlinearFactorGraph.emplace_shared(keys, modes[k], + components); } // Add measurement factors diff --git a/gtsam/hybrid/tests/TinyHybridExample.h b/gtsam/hybrid/tests/TinyHybridExample.h index 00af28308..b2b944f91 100644 --- a/gtsam/hybrid/tests/TinyHybridExample.h +++ b/gtsam/hybrid/tests/TinyHybridExample.h @@ -43,12 +43,13 @@ inline HybridBayesNet createHybridBayesNet(size_t num_measurements = 1, // Create Gaussian mixture z_i = x0 + noise for each measurement. for (size_t i = 0; i < num_measurements; i++) { const auto mode_i = manyModes ? DiscreteKey{M(i), 2} : mode; + DiscreteKeys modes{mode_i}; + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), Z_1x1, 3)}; bayesNet.emplace_shared( KeyVector{Z(i)}, KeyVector{X(0)}, DiscreteKeys{mode_i}, - std::vector{GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), - Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev(Z(i), I_1x1, X(0), - Z_1x1, 3)}); + HybridGaussianConditional::Conditionals(modes, conditionals)); } // Create prior on X(0). diff --git a/gtsam/hybrid/tests/testHybridBayesNet.cpp b/gtsam/hybrid/tests/testHybridBayesNet.cpp index 99487a84a..462ce8625 100644 --- a/gtsam/hybrid/tests/testHybridBayesNet.cpp +++ b/gtsam/hybrid/tests/testHybridBayesNet.cpp @@ -107,9 +107,11 @@ TEST(HybridBayesNet, evaluateHybrid) { // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); + DiscreteKeys discreteParents{Asia}; bayesNet.emplace_shared( - KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, - std::vector{conditional0, conditional1}); + KeyVector{X(1)}, KeyVector{}, discreteParents, + HybridGaussianConditional::Conditionals( + discreteParents, std::vector{conditional0, conditional1})); bayesNet.emplace_shared(Asia, "99/1"); // Create values at which to evaluate. @@ -168,9 +170,11 @@ TEST(HybridBayesNet, Error) { conditional1 = std::make_shared( X(1), Vector1::Constant(2), I_1x1, model1); + DiscreteKeys discreteParents{Asia}; auto gm = std::make_shared( - KeyVector{X(1)}, KeyVector{}, DiscreteKeys{Asia}, - std::vector{conditional0, conditional1}); + KeyVector{X(1)}, KeyVector{}, discreteParents, + HybridGaussianConditional::Conditionals( + discreteParents, std::vector{conditional0, conditional1})); // Create hybrid Bayes net. HybridBayesNet bayesNet; bayesNet.push_back(continuousConditional); @@ -383,15 +387,15 @@ TEST(HybridBayesNet, Sampling) { HybridNonlinearFactorGraph nfg; auto noise_model = noiseModel::Diagonal::Sigmas(Vector1(1.0)); + nfg.emplace_shared>(X(0), 0.0, noise_model); + auto zero_motion = std::make_shared>(X(0), X(1), 0, noise_model); auto one_motion = std::make_shared>(X(0), X(1), 1, noise_model); - DiscreteKeys discreteKeys{DiscreteKey(M(0), 2)}; HybridNonlinearFactor::Factors factors( discreteKeys, {{zero_motion, 0.0}, {one_motion, 0.0}}); - nfg.emplace_shared>(X(0), 0.0, noise_model); nfg.emplace_shared(KeyVector{X(0), X(1)}, discreteKeys, factors); diff --git a/gtsam/hybrid/tests/testHybridEstimation.cpp b/gtsam/hybrid/tests/testHybridEstimation.cpp index 7204d819d..ee48ad5d8 100644 --- a/gtsam/hybrid/tests/testHybridEstimation.cpp +++ b/gtsam/hybrid/tests/testHybridEstimation.cpp @@ -437,8 +437,8 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { std::make_shared>(X(0), X(1), 1, noise_model); std::vector components = {{zero_motion, 0.0}, {one_motion, 0.0}}; - nfg.emplace_shared(KeyVector{X(0), X(1)}, - DiscreteKeys{m}, components); + nfg.emplace_shared(KeyVector{X(0), X(1)}, m, + components); return nfg; } @@ -583,9 +583,6 @@ TEST(HybridEstimation, ModeSelection) { graph.emplace_shared>(X(0), 0.0, measurement_model); graph.emplace_shared>(X(1), 0.0, measurement_model); - DiscreteKeys modes; - modes.emplace_back(M(0), 2); - // The size of the noise model size_t d = 1; double noise_tight = 0.5, noise_loose = 5.0; @@ -594,11 +591,11 @@ TEST(HybridEstimation, ModeSelection) { X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)), model1 = std::make_shared( X(0), X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {{model0, 0.0}, {model1, 0.0}}; KeyVector keys = {X(0), X(1)}; + DiscreteKey modes(M(0), 2); HybridNonlinearFactor mf(keys, modes, components); initial.insert(X(0), 0.0); @@ -617,18 +614,22 @@ TEST(HybridEstimation, ModeSelection) { /**************************************************************/ HybridBayesNet bn; - const DiscreteKey mode{M(0), 2}; + const DiscreteKey mode(M(0), 2); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(0), Z_1x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_1x1, X(1), Z_1x1, 0.1)); + + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), -I_1x1, X(1), + Z_1x1, noise_tight)}; bn.emplace_shared( KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, - std::vector{GaussianConditional::sharedMeanAndStddev( - Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_loose), - GaussianConditional::sharedMeanAndStddev( - Z(0), I_1x1, X(0), -I_1x1, X(1), Z_1x1, noise_tight)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{mode}, + conditionals)); VectorValues vv; vv.insert(Z(0), Z_1x1); @@ -648,18 +649,22 @@ TEST(HybridEstimation, ModeSelection2) { double noise_tight = 0.5, noise_loose = 5.0; HybridBayesNet bn; - const DiscreteKey mode{M(0), 2}; + const DiscreteKey mode(M(0), 2); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(0), Z_3x1, 0.1)); bn.push_back( GaussianConditional::sharedMeanAndStddev(Z(0), -I_3x3, X(1), Z_3x1, 0.1)); + + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_loose), + GaussianConditional::sharedMeanAndStddev(Z(0), I_3x3, X(0), -I_3x3, X(1), + Z_3x1, noise_tight)}; bn.emplace_shared( KeyVector{Z(0)}, KeyVector{X(0), X(1)}, DiscreteKeys{mode}, - std::vector{GaussianConditional::sharedMeanAndStddev( - Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_loose), - GaussianConditional::sharedMeanAndStddev( - Z(0), I_3x3, X(0), -I_3x3, X(1), Z_3x1, noise_tight)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{mode}, + conditionals)); VectorValues vv; vv.insert(Z(0), Z_3x1); @@ -679,18 +684,15 @@ TEST(HybridEstimation, ModeSelection2) { graph.emplace_shared>(X(0), Z_3x1, measurement_model); graph.emplace_shared>(X(1), Z_3x1, measurement_model); - DiscreteKeys modes; - modes.emplace_back(M(0), 2); - auto model0 = std::make_shared>( X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)), model1 = std::make_shared>( X(0), X(1), Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight)); - std::vector components = {{model0, 0.0}, {model1, 0.0}}; KeyVector keys = {X(0), X(1)}; + DiscreteKey modes(M(0), 2); HybridNonlinearFactor mf(keys, modes, components); initial.insert(X(0), Z_3x1); diff --git a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp index 6156fee96..d01158413 100644 --- a/gtsam/hybrid/tests/testHybridGaussianConditional.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianConditional.cpp @@ -52,7 +52,9 @@ const std::vector conditionals{ commonSigma), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), commonSigma)}; -const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals); +const HybridGaussianConditional mixture( + {Z(0)}, {X(0)}, {mode}, + HybridGaussianConditional::Conditionals({mode}, conditionals)); } // namespace equal_constants /* ************************************************************************* */ @@ -153,7 +155,9 @@ const std::vector conditionals{ 0.5), GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Vector1(0.0), 3.0)}; -const HybridGaussianConditional mixture({Z(0)}, {X(0)}, {mode}, conditionals); +const HybridGaussianConditional mixture( + {Z(0)}, {X(0)}, {mode}, + HybridGaussianConditional::Conditionals({mode}, conditionals)); } // namespace mode_dependent_constants /* ************************************************************************* */ diff --git a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp index 11226e19d..850a1e4d1 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactor.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactor.cpp @@ -233,8 +233,11 @@ static HybridBayesNet GetGaussianMixtureModel(double mu0, double mu1, c1 = make_shared(z, Vector1(mu1), I_1x1, model1); HybridBayesNet hbn; + DiscreteKeys discreteParents{m}; hbn.emplace_shared( - KeyVector{z}, KeyVector{}, DiscreteKeys{m}, std::vector{c0, c1}); + KeyVector{z}, KeyVector{}, discreteParents, + HybridGaussianConditional::Conditionals(discreteParents, + std::vector{c0, c1})); auto mixing = make_shared(m, "50/50"); hbn.push_back(mixing); @@ -408,8 +411,11 @@ static HybridGaussianConditional::shared_ptr CreateHybridMotionModel( -I_1x1, model0), c1 = make_shared(X(1), Vector1(mu1), I_1x1, X(0), -I_1x1, model1); + DiscreteKeys discreteParents{m1}; return std::make_shared( - KeyVector{X(1)}, KeyVector{X(0)}, DiscreteKeys{m1}, std::vector{c0, c1}); + KeyVector{X(1)}, KeyVector{X(0)}, discreteParents, + HybridGaussianConditional::Conditionals(discreteParents, + std::vector{c0, c1})); } /// Create two state Bayes network with 1 or two measurement models diff --git a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp index 122f905ff..dc88c8b7e 100644 --- a/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp @@ -181,7 +181,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { std::vector factors = { {std::make_shared(X(1), I_3x3, Z_3x1), 0.0}, {std::make_shared(X(1), I_3x3, Vector3::Ones()), 0.0}}; - hfg.add(HybridGaussianFactor({X(1)}, {{M(1), 2}}, factors)); + hfg.add(HybridGaussianFactor({X(1)}, {M(1), 2}, factors)); hfg.add(DecisionTreeFactor(m1, {2, 8})); // TODO(Varun) Adding extra discrete variable not connected to continuous @@ -241,7 +241,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { std::vector factors = { {std::make_shared(X(0), I_3x3, Z_3x1), 0.0}, {std::make_shared(X(0), I_3x3, Vector3::Ones()), 0.0}}; - hfg.add(HybridGaussianFactor({X(0)}, {{M(0), 2}}, factors)); + hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, factors)); DecisionTree dt1( M(1), {std::make_shared(X(2), I_3x3, Z_3x1), 0.0}, @@ -682,8 +682,11 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { x0, -I_1x1, model0), c1 = make_shared(f01, Vector1(mu), I_1x1, x1, I_1x1, x0, -I_1x1, model1); + DiscreteKeys discreteParents{m1}; hbn.emplace_shared( - KeyVector{f01}, KeyVector{x0, x1}, DiscreteKeys{m1}, std::vector{c0, c1}); + KeyVector{f01}, KeyVector{x0, x1}, discreteParents, + HybridGaussianConditional::Conditionals(discreteParents, + std::vector{c0, c1})); // Discrete uniform prior. hbn.emplace_shared(m1, "0.5/0.5"); @@ -806,9 +809,11 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) { X(0), Vector1(14.1421), I_1x1 * 2.82843), conditional1 = std::make_shared( X(0), Vector1(10.1379), I_1x1 * 2.02759); + DiscreteKeys discreteParents{mode}; expectedBayesNet.emplace_shared( - KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, - std::vector{conditional0, conditional1}); + KeyVector{X(0)}, KeyVector{}, discreteParents, + HybridGaussianConditional::Conditionals( + discreteParents, std::vector{conditional0, conditional1})); // Add prior on mode. expectedBayesNet.emplace_shared(mode, "74/26"); @@ -831,12 +836,13 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { HybridBayesNet bn; // Create Gaussian mixture z_0 = x0 + noise for each measurement. + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), + GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 0.5)}; auto gm = std::make_shared( KeyVector{Z(0)}, KeyVector{X(0)}, DiscreteKeys{mode}, - std::vector{ - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, 3), - GaussianConditional::sharedMeanAndStddev(Z(0), I_1x1, X(0), Z_1x1, - 0.5)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{mode}, + conditionals)); bn.push_back(gm); // Create prior on X(0). @@ -865,7 +871,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) { X(0), Vector1(14.1421), I_1x1 * 2.82843); expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, - std::vector{conditional0, conditional1}); + HybridGaussianConditional::Conditionals( + DiscreteKeys{mode}, std::vector{conditional0, conditional1})); // Add prior on mode. expectedBayesNet.emplace_shared(mode, "1/1"); @@ -902,7 +909,8 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) { X(0), Vector1(10.274), I_1x1 * 2.0548); expectedBayesNet.emplace_shared( KeyVector{X(0)}, KeyVector{}, DiscreteKeys{mode}, - std::vector{conditional0, conditional1}); + HybridGaussianConditional::Conditionals( + DiscreteKeys{mode}, std::vector{conditional0, conditional1})); // Add prior on mode. expectedBayesNet.emplace_shared(mode, "23/77"); @@ -947,12 +955,14 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {0, 1, 2}) { // Create Gaussian mixture on Z(t) conditioned on X(t) and mode N(t): const auto noise_mode_t = DiscreteKey{N(t), 2}; + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, 0.5), + GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), Z_1x1, + 3.0)}; bn.emplace_shared( KeyVector{Z(t)}, KeyVector{X(t)}, DiscreteKeys{noise_mode_t}, - std::vector{GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), - Z_1x1, 0.5), - GaussianConditional::sharedMeanAndStddev(Z(t), I_1x1, X(t), - Z_1x1, 3.0)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{noise_mode_t}, + conditionals)); // Create prior on discrete mode N(t): bn.emplace_shared(noise_mode_t, "20/80"); @@ -962,12 +972,15 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) { for (size_t t : {2, 1}) { // Create Gaussian mixture on X(t) conditioned on X(t-1) and mode M(t-1): const auto motion_model_t = DiscreteKey{M(t), 2}; + std::vector conditionals{ + GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), Z_1x1, + 0.2), + GaussianConditional::sharedMeanAndStddev(X(t), I_1x1, X(t - 1), I_1x1, + 0.2)}; auto gm = std::make_shared( KeyVector{X(t)}, KeyVector{X(t - 1)}, DiscreteKeys{motion_model_t}, - std::vector{GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), Z_1x1, 0.2), - GaussianConditional::sharedMeanAndStddev( - X(t), I_1x1, X(t - 1), I_1x1, 0.2)}); + HybridGaussianConditional::Conditionals(DiscreteKeys{motion_model_t}, + conditionals)); bn.push_back(gm); // Create prior on motion model M(t): diff --git a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp index cf983254a..e2a10a783 100644 --- a/gtsam/hybrid/tests/testHybridGaussianISAM.cpp +++ b/gtsam/hybrid/tests/testHybridGaussianISAM.cpp @@ -423,7 +423,7 @@ TEST(HybridGaussianISAM, NonTrivial) { std::vector> components = { {moving, 0.0}, {still, 0.0}}; auto mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + contKeys, gtsam::DiscreteKey(M(1), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor @@ -463,7 +463,7 @@ TEST(HybridGaussianISAM, NonTrivial) { std::make_shared(W(1), W(2), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + contKeys, gtsam::DiscreteKey(M(2), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor @@ -506,7 +506,7 @@ TEST(HybridGaussianISAM, NonTrivial) { std::make_shared(W(2), W(3), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + contKeys, gtsam::DiscreteKey(M(3), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor diff --git a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp index 00d0d3a39..3631ac44e 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearFactorGraph.cpp @@ -135,7 +135,7 @@ TEST(HybridGaussianFactorGraph, Resize) { std::vector> components = { {still, 0.0}, {moving, 0.0}}; auto dcFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + contKeys, gtsam::DiscreteKey(M(1), 2), components); nhfg.push_back(dcFactor); Values linearizationPoint; @@ -170,12 +170,12 @@ TEST(HybridGaussianFactorGraph, HybridNonlinearFactor) { // Check for exception when number of continuous keys are under-specified. KeyVector contKeys = {X(0)}; THROWS_EXCEPTION(std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); + contKeys, gtsam::DiscreteKey(M(1), 2), components)); // Check for exception when number of continuous keys are too many. contKeys = {X(0), X(1), X(2)}; THROWS_EXCEPTION(std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components)); + contKeys, gtsam::DiscreteKey(M(1), 2), components)); } /***************************************************************************** @@ -807,7 +807,7 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { std::vector> motion_models = {{still, 0.0}, {moving, 0.0}}; fg.emplace_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models); + contKeys, gtsam::DiscreteKey(M(1), 2), motion_models); // Add Range-Bearing measurements to from X0 to L0 and X1 to L1. // create a noise model for the landmark measurements diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 7519162eb..6932508df 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -442,7 +442,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { std::vector> components = { {moving, 0.0}, {still, 0.0}}; auto mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components); + contKeys, gtsam::DiscreteKey(M(1), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor @@ -482,7 +482,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { std::make_shared(W(1), W(2), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(2), 2)}, components); + contKeys, gtsam::DiscreteKey(M(2), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor @@ -525,7 +525,7 @@ TEST(HybridNonlinearISAM, NonTrivial) { std::make_shared(W(2), W(3), odometry, noise_model); components = {{moving, 0.0}, {still, 0.0}}; mixtureFactor = std::make_shared( - contKeys, DiscreteKeys{gtsam::DiscreteKey(M(3), 2)}, components); + contKeys, gtsam::DiscreteKey(M(3), 2), components); fg.push_back(mixtureFactor); // Add equivalent of ImuFactor diff --git a/gtsam/hybrid/tests/testSerializationHybrid.cpp b/gtsam/hybrid/tests/testSerializationHybrid.cpp index 74cfa72e6..701b870f1 100644 --- a/gtsam/hybrid/tests/testSerializationHybrid.cpp +++ b/gtsam/hybrid/tests/testSerializationHybrid.cpp @@ -76,7 +76,7 @@ BOOST_CLASS_EXPORT_GUID(HybridBayesNet, "gtsam_HybridBayesNet"); // Test HybridGaussianFactor serialization. TEST(HybridSerialization, HybridGaussianFactor) { KeyVector continuousKeys{X(0)}; - DiscreteKeys discreteKeys{{M(0), 2}}; + DiscreteKey discreteKey{M(0), 2}; auto A = Matrix::Zero(2, 1); auto b0 = Matrix::Zero(2, 1); @@ -85,7 +85,7 @@ TEST(HybridSerialization, HybridGaussianFactor) { auto f1 = std::make_shared(X(0), A, b1); std::vector factors{{f0, 0.0}, {f1, 0.0}}; - const HybridGaussianFactor factor(continuousKeys, discreteKeys, factors); + const HybridGaussianFactor factor(continuousKeys, discreteKey, factors); EXPECT(equalsObj(factor)); EXPECT(equalsXML(factor)); @@ -116,7 +116,8 @@ TEST(HybridSerialization, HybridGaussianConditional) { const auto conditional1 = std::make_shared( GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3)); const HybridGaussianConditional gm({Z(0)}, {X(0)}, {mode}, - {conditional0, conditional1}); + HybridGaussianConditional::Conditionals( + {mode}, {conditional0, conditional1})); EXPECT(equalsObj(gm)); EXPECT(equalsXML(gm)); diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 29222a106..a586d119b 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -714,6 +714,9 @@ double ComputeLogNormalizer( const noiseModel::Gaussian::shared_ptr& noise_model) { // Since noise models are Gaussian, we can get the logDeterminant using // the same trick as in GaussianConditional + // Sigma = (R'R)^{-1}, det(Sigma) = det((R'R)^{-1}) = det(R'R)^{-1} + // log det(Sigma) = -log(det(R'R)) = -2*log(det(R)) + // Hence, log det(Sigma)) = -2.0 * logDetR() double logDetR = noise_model->R() .diagonal() .unaryExpr([](double x) { return log(x); }) diff --git a/gtsam/linear/NoiseModel.h b/gtsam/linear/NoiseModel.h index 831cfd7dd..ffc1a3ebc 100644 --- a/gtsam/linear/NoiseModel.h +++ b/gtsam/linear/NoiseModel.h @@ -752,7 +752,7 @@ namespace gtsam { template<> struct traits : public Testable {}; /** - * @brief Helper function to compute the sqrt(|2πΣ|) normalizer values + * @brief Helper function to compute the log(|2πΣ|) normalizer values * for a Gaussian noise model. * We compute this in the log-space for numerical accuracy. * diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 725680501..49874f901 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -807,6 +807,26 @@ TEST(NoiseModel, NonDiagonalGaussian) } } +TEST(NoiseModel, ComputeLogNormalizer) { + // Very simple 1D noise model, which we can compute by hand. + double sigma = 0.1; + auto noise_model = Isotropic::Sigma(1, sigma); + double actual_value = ComputeLogNormalizer(noise_model); + // Compute log(|2πΣ|) by hand. + // = log(2π) + log(Σ) (since it is 1D) + constexpr double log2pi = 1.8378770664093454835606594728112; + double expected_value = log2pi + log(sigma * sigma); + EXPECT_DOUBLES_EQUAL(expected_value, actual_value, 1e-9); + + // Similar situation in the 3D case + size_t n = 3; + auto noise_model2 = Isotropic::Sigma(n, sigma); + double actual_value2 = ComputeLogNormalizer(noise_model2); + // We multiply by 3 due to the determinant + double expected_value2 = n * (log2pi + log(sigma * sigma)); + EXPECT_DOUBLES_EQUAL(expected_value2, actual_value2, 1e-9); +} + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } /* ************************************************************************* */