Merge branch 'working-hybrid' into model-selection-bayestree
						commit
						f06777fe7a
					
				|  | @ -24,6 +24,7 @@ | |||
| #include <gtsam/hybrid/GaussianMixtureFactor.h> | ||||
| #include <gtsam/hybrid/HybridValues.h> | ||||
| #include <gtsam/inference/Conditional-inst.h> | ||||
| #include <gtsam/linear/GaussianBayesNet.h> | ||||
| #include <gtsam/linear/GaussianFactorGraph.h> | ||||
| 
 | ||||
| namespace gtsam { | ||||
|  | @ -318,8 +319,15 @@ AlgebraicDecisionTree<Key> GaussianMixture::logProbability( | |||
| AlgebraicDecisionTree<Key> GaussianMixture::errorTree( | ||||
|     const VectorValues &continuousValues) const { | ||||
|   auto errorFunc = [&](const GaussianConditional::shared_ptr &conditional) { | ||||
|     return conditional->error(continuousValues) +  //
 | ||||
|            logConstant_ - conditional->logNormalizationConstant(); | ||||
|     // Check if valid pointer
 | ||||
|     if (conditional) { | ||||
|       return conditional->error(continuousValues) +  //
 | ||||
|              logConstant_ - conditional->logNormalizationConstant(); | ||||
|     } else { | ||||
|       // If not valid, pointer, it means this conditional was pruned,
 | ||||
|       // so we return maximum error.
 | ||||
|       return std::numeric_limits<double>::max(); | ||||
|     } | ||||
|   }; | ||||
|   DecisionTree<Key, double> error_tree(conditionals_, errorFunc); | ||||
|   return error_tree; | ||||
|  | @ -327,10 +335,33 @@ AlgebraicDecisionTree<Key> GaussianMixture::errorTree( | |||
| 
 | ||||
| /* *******************************************************************************/ | ||||
| double GaussianMixture::error(const HybridValues &values) const { | ||||
|   // Check if discrete keys in discrete assignment are
 | ||||
|   // present in the GaussianMixture
 | ||||
|   KeyVector dKeys = this->discreteKeys_.indices(); | ||||
|   bool valid_assignment = false; | ||||
|   for (auto &&kv : values.discrete()) { | ||||
|     if (std::find(dKeys.begin(), dKeys.end(), kv.first) != dKeys.end()) { | ||||
|       valid_assignment = true; | ||||
|       break; | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   // The discrete assignment is not valid so we throw an error.
 | ||||
|   if (!valid_assignment) { | ||||
|     throw std::runtime_error( | ||||
|         "Invalid discrete values in values. Not all discrete keys specified."); | ||||
|   } | ||||
| 
 | ||||
|   // Directly index to get the conditional, no need to build the whole tree.
 | ||||
|   auto conditional = conditionals_(values.discrete()); | ||||
|   return conditional->error(values.continuous()) +  //
 | ||||
|          logConstant_ - conditional->logNormalizationConstant(); | ||||
|   if (conditional) { | ||||
|     return conditional->error(values.continuous()) +  //
 | ||||
|            logConstant_ - conditional->logNormalizationConstant(); | ||||
|   } else { | ||||
|     // If not valid, pointer, it means this conditional was pruned,
 | ||||
|     // so we return maximum error.
 | ||||
|     return std::numeric_limits<double>::max(); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /* *******************************************************************************/ | ||||
|  |  | |||
|  | @ -67,7 +67,7 @@ class GTSAM_EXPORT GaussianMixture | |||
|   double logConstant_;         ///< log of the normalization constant.
 | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Convert a DecisionTree of factors into | ||||
|    * @brief Convert a GaussianMixture of conditionals into | ||||
|    * a DecisionTree of Gaussian factor graphs. | ||||
|    */ | ||||
|   GaussianFactorGraphTree asGaussianFactorGraphTree() const; | ||||
|  |  | |||
|  | @ -54,7 +54,9 @@ bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const { | |||
| /* *******************************************************************************/ | ||||
| void GaussianMixtureFactor::print(const std::string &s, | ||||
|                                   const KeyFormatter &formatter) const { | ||||
|   HybridFactor::print(s, formatter); | ||||
|   std::cout << (s.empty() ? "" : s + "\n"); | ||||
|   std::cout << "GaussianMixtureFactor" << std::endl; | ||||
|   HybridFactor::print("", formatter); | ||||
|   std::cout << "{\n"; | ||||
|   if (factors_.empty()) { | ||||
|     std::cout << "  empty" << std::endl; | ||||
|  | @ -117,6 +119,5 @@ double GaussianMixtureFactor::error(const HybridValues &values) const { | |||
|   const sharedFactor gf = factors_(values.discrete()); | ||||
|   return gf->error(values.continuous()); | ||||
| } | ||||
| /* *******************************************************************************/ | ||||
| 
 | ||||
| }  // namespace gtsam
 | ||||
|  |  | |||
|  | @ -80,8 +80,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { | |||
|    * @param continuousKeys A vector of keys representing continuous variables. | ||||
|    * @param discreteKeys A vector of keys representing discrete variables and | ||||
|    * their cardinalities. | ||||
|    * @param factors The decision tree of Gaussian factors stored as the mixture | ||||
|    * density. | ||||
|    * @param factors The decision tree of Gaussian factors stored | ||||
|    * as the mixture density. | ||||
|    */ | ||||
|   GaussianMixtureFactor(const KeyVector &continuousKeys, | ||||
|                         const DiscreteKeys &discreteKeys, | ||||
|  | @ -107,9 +107,8 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor { | |||
| 
 | ||||
|   bool equals(const HybridFactor &lf, double tol = 1e-9) const override; | ||||
| 
 | ||||
|   void print( | ||||
|       const std::string &s = "GaussianMixtureFactor\n", | ||||
|       const KeyFormatter &formatter = DefaultKeyFormatter) const override; | ||||
|   void print(const std::string &s = "", const KeyFormatter &formatter = | ||||
|                                             DefaultKeyFormatter) const override; | ||||
| 
 | ||||
|   /// @}
 | ||||
|   /// @name Standard API
 | ||||
|  |  | |||
|  | @ -220,15 +220,16 @@ GaussianBayesNet HybridBayesNet::choose( | |||
| /* ************************************************************************* */ | ||||
| HybridValues HybridBayesNet::optimize() const { | ||||
|   // Collect all the discrete factors to compute MPE
 | ||||
|   DiscreteBayesNet discrete_bn; | ||||
|   DiscreteFactorGraph discrete_fg; | ||||
| 
 | ||||
|   for (auto &&conditional : *this) { | ||||
|     if (conditional->isDiscrete()) { | ||||
|       discrete_bn.push_back(conditional->asDiscrete()); | ||||
|       discrete_fg.push_back(conditional->asDiscrete()); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   // Solve for the MPE
 | ||||
|   DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize(); | ||||
|   DiscreteValues mpe = discrete_fg.optimize(); | ||||
| 
 | ||||
|   // Given the MPE, compute the optimal continuous values.
 | ||||
|   return HybridValues(optimize(mpe), mpe); | ||||
|  |  | |||
|  | @ -13,6 +13,7 @@ | |||
|  *  @file HybridFactor.h | ||||
|  *  @date Mar 11, 2022 | ||||
|  *  @author Fan Jiang | ||||
|  *  @author Varun Agrawal | ||||
|  */ | ||||
| 
 | ||||
| #pragma once | ||||
|  |  | |||
|  | @ -242,6 +242,18 @@ discreteElimination(const HybridGaussianFactorGraph &factors, | |||
|   for (auto &f : factors) { | ||||
|     if (auto df = dynamic_pointer_cast<DiscreteFactor>(f)) { | ||||
|       dfg.push_back(df); | ||||
|     } else if (auto gmf = dynamic_pointer_cast<GaussianMixtureFactor>(f)) { | ||||
|       // Case where we have a GaussianMixtureFactor with no continuous keys.
 | ||||
|       // In this case, compute discrete probabilities.
 | ||||
|       auto probability = | ||||
|           [&](const GaussianFactor::shared_ptr &factor) -> double { | ||||
|         if (!factor) return 0.0; | ||||
|         return exp(-factor->error(VectorValues())); | ||||
|       }; | ||||
|       dfg.emplace_shared<DecisionTreeFactor>( | ||||
|           gmf->discreteKeys(), | ||||
|           DecisionTree<Key, double>(gmf->factors(), probability)); | ||||
| 
 | ||||
|     } else if (auto orphan = dynamic_pointer_cast<OrphanWrapper>(f)) { | ||||
|       // Ignore orphaned clique.
 | ||||
|       // TODO(dellaert): is this correct? If so explain here.
 | ||||
|  | @ -279,21 +291,37 @@ GaussianFactorGraphTree removeEmpty(const GaussianFactorGraphTree &sum) { | |||
| using Result = std::pair<std::shared_ptr<GaussianConditional>, | ||||
|                          GaussianMixtureFactor::sharedFactor>; | ||||
| 
 | ||||
| // Integrate the probability mass in the last continuous conditional using
 | ||||
| // the unnormalized probability q(μ;m) = exp(-error(μ;m)) at the mean.
 | ||||
| //   discrete_probability = exp(-error(μ;m)) * sqrt(det(2π Σ_m))
 | ||||
| /**
 | ||||
|  * Compute the probability q(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m) | ||||
|  * from the residual error at the mean μ. | ||||
|  * The residual error contains no keys, and only | ||||
|  * depends on the discrete separator if present. | ||||
|  */ | ||||
| static std::shared_ptr<Factor> createDiscreteFactor( | ||||
|     const DecisionTree<Key, Result> &eliminationResults, | ||||
|     const DiscreteKeys &discreteSeparator) { | ||||
|   auto probability = [&](const Result &pair) -> double { | ||||
|   auto logProbability = [&](const Result &pair) -> double { | ||||
|     const auto &[conditional, factor] = pair; | ||||
|     static const VectorValues kEmpty; | ||||
|     // If the factor is not null, it has no keys, just contains the residual.
 | ||||
|     if (!factor) return 1.0;  // TODO(dellaert): not loving this.
 | ||||
|     return exp(-factor->error(kEmpty)) / conditional->normalizationConstant(); | ||||
| 
 | ||||
|     // Logspace version of:
 | ||||
|     // exp(-factor->error(kEmpty)) / conditional->normalizationConstant();
 | ||||
|     // We take negative of the logNormalizationConstant `log(1/k)`
 | ||||
|     // to get `log(k)`.
 | ||||
|     return -factor->error(kEmpty) + (-conditional->logNormalizationConstant()); | ||||
|   }; | ||||
| 
 | ||||
|   DecisionTree<Key, double> probabilities(eliminationResults, probability); | ||||
|   AlgebraicDecisionTree<Key> logProbabilities( | ||||
|       DecisionTree<Key, double>(eliminationResults, logProbability)); | ||||
| 
 | ||||
|   // Perform normalization
 | ||||
|   double max_log = logProbabilities.max(); | ||||
|   AlgebraicDecisionTree probabilities = DecisionTree<Key, double>( | ||||
|       logProbabilities, | ||||
|       [&max_log](const double x) { return exp(x - max_log); }); | ||||
|   probabilities = probabilities.normalize(probabilities.sum()); | ||||
| 
 | ||||
|   return std::make_shared<DecisionTreeFactor>(discreteSeparator, probabilities); | ||||
| } | ||||
|  |  | |||
|  | @ -22,9 +22,13 @@ | |||
| #include <gtsam/discrete/DiscreteValues.h> | ||||
| #include <gtsam/hybrid/GaussianMixture.h> | ||||
| #include <gtsam/hybrid/GaussianMixtureFactor.h> | ||||
| #include <gtsam/hybrid/HybridBayesNet.h> | ||||
| #include <gtsam/hybrid/HybridGaussianFactorGraph.h> | ||||
| #include <gtsam/hybrid/HybridValues.h> | ||||
| #include <gtsam/inference/Symbol.h> | ||||
| #include <gtsam/linear/GaussianFactorGraph.h> | ||||
| #include <gtsam/nonlinear/PriorFactor.h> | ||||
| #include <gtsam/slam/BetweenFactor.h> | ||||
| 
 | ||||
| // Include for test suite
 | ||||
| #include <CppUnitLite/TestHarness.h> | ||||
|  | @ -34,6 +38,7 @@ using namespace gtsam; | |||
| using noiseModel::Isotropic; | ||||
| using symbol_shorthand::M; | ||||
| using symbol_shorthand::X; | ||||
| using symbol_shorthand::Z; | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| // Check iterators of empty mixture.
 | ||||
|  | @ -56,7 +61,6 @@ TEST(GaussianMixtureFactor, Sum) { | |||
|   auto b = Matrix::Zero(2, 1); | ||||
|   Vector2 sigmas; | ||||
|   sigmas << 1, 2; | ||||
|   auto model = noiseModel::Diagonal::Sigmas(sigmas, true); | ||||
| 
 | ||||
|   auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); | ||||
|   auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); | ||||
|  | @ -106,7 +110,8 @@ TEST(GaussianMixtureFactor, Printing) { | |||
|   GaussianMixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); | ||||
| 
 | ||||
|   std::string expected = | ||||
|       R"(Hybrid [x1 x2; 1]{ | ||||
|       R"(GaussianMixtureFactor | ||||
| Hybrid [x1 x2; 1]{ | ||||
|  Choice(1)  | ||||
|  0 Leaf : | ||||
|   A[x1] = [ | ||||
|  | @ -178,7 +183,8 @@ TEST(GaussianMixtureFactor, Error) { | |||
|   continuousValues.insert(X(2), Vector2(1, 1)); | ||||
| 
 | ||||
|   // error should return a tree of errors, with nodes for each discrete value.
 | ||||
|   AlgebraicDecisionTree<Key> error_tree = mixtureFactor.errorTree(continuousValues); | ||||
|   AlgebraicDecisionTree<Key> error_tree = | ||||
|       mixtureFactor.errorTree(continuousValues); | ||||
| 
 | ||||
|   std::vector<DiscreteKey> discrete_keys = {m1}; | ||||
|   // Error values for regression test
 | ||||
|  | @ -191,8 +197,422 @@ TEST(GaussianMixtureFactor, Error) { | |||
|   DiscreteValues discreteValues; | ||||
|   discreteValues[m1.first] = 1; | ||||
|   EXPECT_DOUBLES_EQUAL( | ||||
|       4.0, mixtureFactor.error({continuousValues, discreteValues}), | ||||
|       1e-9); | ||||
|       4.0, mixtureFactor.error({continuousValues, discreteValues}), 1e-9); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| /**
 | ||||
|  * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) | ||||
|  * where m is a discrete variable and z is a continuous variable. | ||||
|  * m is binary and depending on m, we have 2 different means | ||||
|  * μ1 and μ2 for the Gaussian distribution around which we sample z. | ||||
|  * | ||||
|  * The resulting factor graph should eliminate to a Bayes net | ||||
|  * which represents a sigmoid function. | ||||
|  */ | ||||
| TEST(GaussianMixtureFactor, GaussianMixtureModel) { | ||||
|   double mu0 = 1.0, mu1 = 3.0; | ||||
|   double sigma = 2.0; | ||||
|   auto model = noiseModel::Isotropic::Sigma(1, sigma); | ||||
| 
 | ||||
|   DiscreteKey m(M(0), 2); | ||||
|   Key z = Z(0); | ||||
| 
 | ||||
|   auto c0 = make_shared<GaussianConditional>(z, Vector1(mu0), I_1x1, model), | ||||
|        c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model); | ||||
| 
 | ||||
|   auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); | ||||
|   auto mixing = new DiscreteConditional(m, "0.5/0.5"); | ||||
| 
 | ||||
|   HybridBayesNet hbn; | ||||
|   hbn.emplace_back(gm); | ||||
|   hbn.emplace_back(mixing); | ||||
| 
 | ||||
|   // The result should be a sigmoid.
 | ||||
|   // So should be m = 0.5 at z=3.0 - 1.0=2.0
 | ||||
|   VectorValues given; | ||||
|   given.insert(z, Vector1(mu1 - mu0)); | ||||
| 
 | ||||
|   HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); | ||||
|   HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); | ||||
| 
 | ||||
|   HybridBayesNet expected; | ||||
|   expected.emplace_back(new DiscreteConditional(m, "0.5/0.5")); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expected, *bn)); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| /**
 | ||||
|  * Test a simple Gaussian Mixture Model represented as P(m)P(z|m) | ||||
|  * where m is a discrete variable and z is a continuous variable. | ||||
|  * m is binary and depending on m, we have 2 different means | ||||
|  * and covariances each for the | ||||
|  * Gaussian distribution around which we sample z. | ||||
|  * | ||||
|  * The resulting factor graph should eliminate to a Bayes net | ||||
|  * which represents a sigmoid function leaning towards | ||||
|  * the tighter covariance Gaussian. | ||||
|  */ | ||||
| TEST(GaussianMixtureFactor, GaussianMixtureModel2) { | ||||
|   double mu0 = 1.0, mu1 = 3.0; | ||||
|   auto model0 = noiseModel::Isotropic::Sigma(1, 8.0); | ||||
|   auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); | ||||
| 
 | ||||
|   DiscreteKey m(M(0), 2); | ||||
|   Key z = Z(0); | ||||
| 
 | ||||
|   auto c0 = make_shared<GaussianConditional>(z, Vector1(mu0), I_1x1, model0), | ||||
|        c1 = make_shared<GaussianConditional>(z, Vector1(mu1), I_1x1, model1); | ||||
| 
 | ||||
|   auto gm = new GaussianMixture({z}, {}, {m}, {c0, c1}); | ||||
|   auto mixing = new DiscreteConditional(m, "0.5/0.5"); | ||||
| 
 | ||||
|   HybridBayesNet hbn; | ||||
|   hbn.emplace_back(gm); | ||||
|   hbn.emplace_back(mixing); | ||||
| 
 | ||||
|   // The result should be a sigmoid leaning towards model1
 | ||||
|   // since it has the tighter covariance.
 | ||||
|   // So should be m = 0.34/0.66 at z=3.0 - 1.0=2.0
 | ||||
|   VectorValues given; | ||||
|   given.insert(z, Vector1(mu1 - mu0)); | ||||
|   HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); | ||||
|   HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); | ||||
| 
 | ||||
|   HybridBayesNet expected; | ||||
|   expected.emplace_back( | ||||
|       new DiscreteConditional(m, "0.338561851224/0.661438148776")); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expected, *bn)); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| /**
 | ||||
|  * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). | ||||
|  * | ||||
|  * p(x1|m1) has different means and same covariance. | ||||
|  * | ||||
|  * Converting to a factor graph gives us | ||||
|  * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) | ||||
|  * | ||||
|  * If we only have a measurement on z0, then | ||||
|  * the probability of x1 should be 0.5/0.5. | ||||
|  * Getting a measurement on z1 gives use more information. | ||||
|  */ | ||||
| TEST(GaussianMixtureFactor, TwoStateModel) { | ||||
|   double mu0 = 1.0, mu1 = 3.0; | ||||
|   auto model = noiseModel::Isotropic::Sigma(1, 2.0); | ||||
| 
 | ||||
|   DiscreteKey m1(M(1), 2); | ||||
|   Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); | ||||
| 
 | ||||
|   auto c0 = make_shared<GaussianConditional>(x1, Vector1(mu0), I_1x1, model), | ||||
|        c1 = make_shared<GaussianConditional>(x1, Vector1(mu1), I_1x1, model); | ||||
| 
 | ||||
|   auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, | ||||
|                                       noiseModel::Isotropic::Sigma(1, 1.0)); | ||||
|   auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, | ||||
|                                         noiseModel::Isotropic::Sigma(1, 1.0)); | ||||
|   auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); | ||||
|   auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, | ||||
|                                         noiseModel::Isotropic::Sigma(1, 3.0)); | ||||
|   auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); | ||||
| 
 | ||||
|   HybridBayesNet hbn; | ||||
|   hbn.emplace_back(p_x0); | ||||
|   hbn.emplace_back(p_z0x0); | ||||
|   hbn.emplace_back(p_x1m1); | ||||
|   hbn.emplace_back(p_m1); | ||||
| 
 | ||||
|   VectorValues given; | ||||
|   given.insert(z0, Vector1(0.5)); | ||||
| 
 | ||||
|   { | ||||
|     // Start with no measurement on x1, only on x0
 | ||||
|     HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); | ||||
|     HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); | ||||
| 
 | ||||
|     // Since no measurement on x1, we hedge our bets
 | ||||
|     DiscreteConditional expected(m1, "0.5/0.5"); | ||||
| 
 | ||||
|     EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); | ||||
|   } | ||||
| 
 | ||||
|   { | ||||
|     // Now we add a measurement z1 on x1
 | ||||
|     hbn.emplace_back(p_z1x1); | ||||
| 
 | ||||
|     given.insert(z1, Vector1(2.2)); | ||||
|     HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); | ||||
|     HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); | ||||
| 
 | ||||
|     // Since we have a measurement on z2, we get a definite result
 | ||||
|     DiscreteConditional expected(m1, "0.4923083/0.5076917"); | ||||
| 
 | ||||
|     EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| /**
 | ||||
|  * Test a model P(x0)P(z0|x0)p(x1|m1)p(z1|x1)p(m1). | ||||
|  * | ||||
|  * p(x1|m1) has different means and different covariances. | ||||
|  * | ||||
|  * Converting to a factor graph gives us | ||||
|  * P(x0)ϕ(x0)P(x1|m1)ϕ(x1)P(m1) | ||||
|  * | ||||
|  * If we only have a measurement on z0, then | ||||
|  * the probability of x1 should be the ratio of covariances. | ||||
|  * Getting a measurement on z1 gives use more information. | ||||
|  */ | ||||
| TEST(GaussianMixtureFactor, TwoStateModel2) { | ||||
|   double mu0 = 1.0, mu1 = 3.0; | ||||
|   auto model0 = noiseModel::Isotropic::Sigma(1, 6.0); | ||||
|   auto model1 = noiseModel::Isotropic::Sigma(1, 4.0); | ||||
| 
 | ||||
|   DiscreteKey m1(M(1), 2); | ||||
|   Key z0 = Z(0), z1 = Z(1), x0 = X(0), x1 = X(1); | ||||
| 
 | ||||
|   auto c0 = make_shared<GaussianConditional>(x1, Vector1(mu0), I_1x1, model0), | ||||
|        c1 = make_shared<GaussianConditional>(x1, Vector1(mu1), I_1x1, model1); | ||||
| 
 | ||||
|   auto p_x0 = new GaussianConditional(x0, Vector1(0.0), I_1x1, | ||||
|                                       noiseModel::Isotropic::Sigma(1, 1.0)); | ||||
|   auto p_z0x0 = new GaussianConditional(z0, Vector1(0.0), I_1x1, x0, -I_1x1, | ||||
|                                         noiseModel::Isotropic::Sigma(1, 1.0)); | ||||
|   auto p_x1m1 = new GaussianMixture({x1}, {}, {m1}, {c0, c1}); | ||||
|   auto p_z1x1 = new GaussianConditional(z1, Vector1(0.0), I_1x1, x1, -I_1x1, | ||||
|                                         noiseModel::Isotropic::Sigma(1, 3.0)); | ||||
|   auto p_m1 = new DiscreteConditional(m1, "0.5/0.5"); | ||||
| 
 | ||||
|   HybridBayesNet hbn; | ||||
|   hbn.emplace_back(p_x0); | ||||
|   hbn.emplace_back(p_z0x0); | ||||
|   hbn.emplace_back(p_x1m1); | ||||
|   hbn.emplace_back(p_m1); | ||||
| 
 | ||||
|   VectorValues given; | ||||
|   given.insert(z0, Vector1(0.5)); | ||||
| 
 | ||||
|   { | ||||
|     // Start with no measurement on x1, only on x0
 | ||||
|     HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); | ||||
|     HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); | ||||
| 
 | ||||
|     // Since no measurement on x1, we get the ratio of covariances.
 | ||||
|     DiscreteConditional expected(m1, "0.6/0.4"); | ||||
| 
 | ||||
|     EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()))); | ||||
|   } | ||||
| 
 | ||||
|   { | ||||
|     // Now we add a measurement z1 on x1
 | ||||
|     hbn.emplace_back(p_z1x1); | ||||
| 
 | ||||
|     given.insert(z1, Vector1(2.2)); | ||||
|     HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given); | ||||
|     HybridBayesNet::shared_ptr bn = gfg.eliminateSequential(); | ||||
| 
 | ||||
|     // Since we have a measurement on z2, we get a definite result
 | ||||
|     DiscreteConditional expected(m1, "0.52706646/0.47293354"); | ||||
| 
 | ||||
|     EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 1e-6)); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * @brief Helper function to specify a Hybrid Bayes Net | ||||
|  * {P(X1) P(Z1 | X1, X2, M1)} and convert it to a Hybrid Factor Graph | ||||
|  * {P(X1)L(X1, X2, M1; Z1)} by converting to likelihoods given Z1. | ||||
|  * | ||||
|  * We can specify either different means or different sigmas, | ||||
|  * or both for each hybrid factor component. | ||||
|  * | ||||
|  * @param values Initial values for linearization. | ||||
|  * @param means The mean values for the conditional components. | ||||
|  * @param sigmas Noise model sigma values (standard deviation). | ||||
|  * @param m1 The discrete mode key. | ||||
|  * @param z1 The measurement value. | ||||
|  * @return HybridGaussianFactorGraph | ||||
|  */ | ||||
| HybridGaussianFactorGraph GetFactorGraphFromBayesNet( | ||||
|     const gtsam::Values &values, const std::vector<double> &means, | ||||
|     const std::vector<double> &sigmas, DiscreteKey &m1, double z1 = 0.0) { | ||||
|   // Noise models
 | ||||
|   auto model0 = noiseModel::Isotropic::Sigma(1, sigmas[0]); | ||||
|   auto model1 = noiseModel::Isotropic::Sigma(1, sigmas[1]); | ||||
|   auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); | ||||
| 
 | ||||
|   // GaussianMixtureFactor component factors
 | ||||
|   auto f0 = | ||||
|       std::make_shared<BetweenFactor<double>>(X(1), X(2), means[0], model0); | ||||
|   auto f1 = | ||||
|       std::make_shared<BetweenFactor<double>>(X(1), X(2), means[1], model1); | ||||
|   std::vector<NonlinearFactor::shared_ptr> factors{f0, f1}; | ||||
| 
 | ||||
|   /// Get terms for each p^m(z1 | x1, x2)
 | ||||
|   Matrix H0_1, H0_2, H1_1, H1_2; | ||||
|   double x1 = values.at<double>(X(1)), x2 = values.at<double>(X(2)); | ||||
|   Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); | ||||
|   std::vector<std::pair<Key, Matrix>> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, | ||||
|                                                 //
 | ||||
|                                                 {X(1), H0_1 /*Sp1*/}, | ||||
|                                                 {X(2), H0_2 /*Tp2*/}}; | ||||
| 
 | ||||
|   Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); | ||||
|   std::vector<std::pair<Key, Matrix>> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, | ||||
|                                                 //
 | ||||
|                                                 {X(1), H1_1 /*Sp1*/}, | ||||
|                                                 {X(2), H1_2 /*Tp2*/}}; | ||||
|   // Create conditional P(Z1 | X1, X2, M1)
 | ||||
|   auto gm = new gtsam::GaussianMixture( | ||||
|       {Z(1)}, {X(1), X(2)}, {m1}, | ||||
|       {std::make_shared<GaussianConditional>(terms0, 1, -d0, model0), | ||||
|        std::make_shared<GaussianConditional>(terms1, 1, -d1, model1)}); | ||||
|   gtsam::HybridBayesNet bn; | ||||
|   bn.emplace_back(gm); | ||||
|   // bn.print();
 | ||||
| 
 | ||||
|   // Create FG via toFactorGraph
 | ||||
|   gtsam::VectorValues measurements; | ||||
|   measurements.insert(Z(1), gtsam::I_1x1 * z1);  // Set Z1 = 0
 | ||||
|   HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); | ||||
| 
 | ||||
|   // Linearized prior factor on X1
 | ||||
|   auto prior = PriorFactor<double>(X(1), x1, prior_noise).linearize(values); | ||||
|   mixture_fg.push_back(prior); | ||||
| 
 | ||||
|   return mixture_fg; | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| /**
 | ||||
|  * @brief Test components with differing means. | ||||
|  * | ||||
|  * We specify a hybrid Bayes network P(Z | X, M) =p(X1)p(Z1 | X1, X2, M1), | ||||
|  * which is then converted to a factor graph by specifying Z1. | ||||
|  * This is a different case since now we have a hybrid factor | ||||
|  * with 2 continuous variables ϕ(x1, x2, m1). | ||||
|  * p(Z1 | X1, X2, M1) has 2 factors each for the binary | ||||
|  * mode m1, with only the means being different. | ||||
|  */ | ||||
| TEST(GaussianMixtureFactor, DifferentMeans) { | ||||
|   DiscreteKey m1(M(1), 2); | ||||
| 
 | ||||
|   Values values; | ||||
|   double x1 = 0.0, x2 = 1.75; | ||||
|   values.insert(X(1), x1); | ||||
|   values.insert(X(2), x2); | ||||
| 
 | ||||
|   // Different means, same sigma
 | ||||
|   std::vector<double> means{0.0, 2.0}, sigmas{1e-0, 1e-0}; | ||||
| 
 | ||||
|   HybridGaussianFactorGraph hfg = | ||||
|       GetFactorGraphFromBayesNet(values, means, sigmas, m1, 0.0); | ||||
| 
 | ||||
|   { | ||||
|     // With no measurement on X2, each mode should be equally likely
 | ||||
|     auto bn = hfg.eliminateSequential(); | ||||
|     HybridValues actual = bn->optimize(); | ||||
| 
 | ||||
|     HybridValues expected( | ||||
|         VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(-1.75)}}, | ||||
|         DiscreteValues{{M(1), 0}}); | ||||
| 
 | ||||
|     EXPECT(assert_equal(expected, actual)); | ||||
| 
 | ||||
|     { | ||||
|       DiscreteValues dv{{M(1), 0}}; | ||||
|       VectorValues cont = bn->optimize(dv); | ||||
|       double error = bn->error(HybridValues(cont, dv)); | ||||
|       // regression
 | ||||
|       EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); | ||||
|     } | ||||
|     { | ||||
|       DiscreteValues dv{{M(1), 1}}; | ||||
|       VectorValues cont = bn->optimize(dv); | ||||
|       double error = bn->error(HybridValues(cont, dv)); | ||||
|       // regression
 | ||||
|       EXPECT_DOUBLES_EQUAL(0.69314718056, error, 1e-9); | ||||
|     } | ||||
|   } | ||||
|   { | ||||
|     // If we add a measurement on X2, we have more information to work with.
 | ||||
|     // Add a measurement on X2
 | ||||
|     auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); | ||||
|     GaussianConditional meas_z2(Z(2), Vector1(2.0), I_1x1, X(2), I_1x1, | ||||
|                                 prior_noise); | ||||
|     auto prior_x2 = meas_z2.likelihood(Vector1(x2)); | ||||
| 
 | ||||
|     hfg.push_back(prior_x2); | ||||
| 
 | ||||
|     auto bn = hfg.eliminateSequential(); | ||||
|     HybridValues actual = bn->optimize(); | ||||
| 
 | ||||
|     HybridValues expected( | ||||
|         VectorValues{{X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}}, | ||||
|         DiscreteValues{{M(1), 1}}); | ||||
| 
 | ||||
|     EXPECT(assert_equal(expected, actual)); | ||||
| 
 | ||||
|     { | ||||
|       DiscreteValues dv{{M(1), 0}}; | ||||
|       VectorValues cont = bn->optimize(dv); | ||||
|       double error = bn->error(HybridValues(cont, dv)); | ||||
|       // regression
 | ||||
|       EXPECT_DOUBLES_EQUAL(2.12692448787, error, 1e-9); | ||||
|     } | ||||
|     { | ||||
|       DiscreteValues dv{{M(1), 1}}; | ||||
|       VectorValues cont = bn->optimize(dv); | ||||
|       double error = bn->error(HybridValues(cont, dv)); | ||||
|       // regression
 | ||||
|       EXPECT_DOUBLES_EQUAL(0.126928487854, error, 1e-9); | ||||
|     } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| /**
 | ||||
|  * @brief Test components with differing covariances | ||||
|  * but with a Bayes net P(Z|X, M) converted to a FG. | ||||
|  * Same as the DifferentMeans example but in this case, | ||||
|  * we keep the means the same and vary the covariances. | ||||
|  */ | ||||
| TEST(GaussianMixtureFactor, DifferentCovariances) { | ||||
|   DiscreteKey m1(M(1), 2); | ||||
| 
 | ||||
|   Values values; | ||||
|   double x1 = 1.0, x2 = 1.0; | ||||
|   values.insert(X(1), x1); | ||||
|   values.insert(X(2), x2); | ||||
| 
 | ||||
|   std::vector<double> means{0.0, 0.0}, sigmas{1e2, 1e-2}; | ||||
|   HybridGaussianFactorGraph mixture_fg = | ||||
|       GetFactorGraphFromBayesNet(values, means, sigmas, m1); | ||||
| 
 | ||||
|   auto hbn = mixture_fg.eliminateSequential(); | ||||
| 
 | ||||
|   VectorValues cv; | ||||
|   cv.insert(X(1), Vector1(0.0)); | ||||
|   cv.insert(X(2), Vector1(0.0)); | ||||
| 
 | ||||
|   // Check that the error values at the MLE point μ.
 | ||||
|   AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv); | ||||
| 
 | ||||
|   DiscreteValues dv0{{M(1), 0}}; | ||||
|   DiscreteValues dv1{{M(1), 1}}; | ||||
| 
 | ||||
|   // regression
 | ||||
|   EXPECT_DOUBLES_EQUAL(9.90348755254, errorTree(dv0), 1e-9); | ||||
|   EXPECT_DOUBLES_EQUAL(0.69314718056, errorTree(dv1), 1e-9); | ||||
| 
 | ||||
|   DiscreteConditional expected_m1(m1, "0.5/0.5"); | ||||
|   DiscreteConditional actual_m1 = *(hbn->at(2)->asDiscrete()); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expected_m1, actual_m1)); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
|  |  | |||
|  | @ -510,6 +510,7 @@ factor 0: | |||
|   b = [ -10 ] | ||||
|   No noise model | ||||
| factor 1:  | ||||
| GaussianMixtureFactor | ||||
| Hybrid [x0 x1; m0]{ | ||||
|  Choice(m0)  | ||||
|  0 Leaf : | ||||
|  | @ -534,6 +535,7 @@ Hybrid [x0 x1; m0]{ | |||
| 
 | ||||
| } | ||||
| factor 2:  | ||||
| GaussianMixtureFactor | ||||
| Hybrid [x1 x2; m1]{ | ||||
|  Choice(m1)  | ||||
|  0 Leaf : | ||||
|  |  | |||
|  | @ -18,6 +18,9 @@ | |||
| 
 | ||||
| #include <gtsam/base/TestableAssertions.h> | ||||
| #include <gtsam/discrete/DiscreteValues.h> | ||||
| #include <gtsam/hybrid/HybridBayesNet.h> | ||||
| #include <gtsam/hybrid/HybridGaussianFactorGraph.h> | ||||
| #include <gtsam/hybrid/HybridNonlinearFactorGraph.h> | ||||
| #include <gtsam/hybrid/MixtureFactor.h> | ||||
| #include <gtsam/inference/Symbol.h> | ||||
| #include <gtsam/slam/BetweenFactor.h> | ||||
|  | @ -115,6 +118,156 @@ TEST(MixtureFactor, Dim) { | |||
|   EXPECT_LONGS_EQUAL(1, mixtureFactor.dim()); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| // Test components with differing means
 | ||||
| TEST(MixtureFactor, DifferentMeans) { | ||||
|   DiscreteKey m1(M(1), 2), m2(M(2), 2); | ||||
| 
 | ||||
|   Values values; | ||||
|   double x1 = 0.0, x2 = 1.75, x3 = 2.60; | ||||
|   values.insert(X(1), x1); | ||||
|   values.insert(X(2), x2); | ||||
|   values.insert(X(3), x3); | ||||
| 
 | ||||
|   auto model0 = noiseModel::Isotropic::Sigma(1, 1e-0); | ||||
|   auto model1 = noiseModel::Isotropic::Sigma(1, 1e-0); | ||||
|   auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-0); | ||||
| 
 | ||||
|   auto f0 = std::make_shared<BetweenFactor<double>>(X(1), X(2), 0.0, model0); | ||||
|   auto f1 = std::make_shared<BetweenFactor<double>>(X(1), X(2), 2.0, model1); | ||||
|   std::vector<NonlinearFactor::shared_ptr> factors{f0, f1}; | ||||
| 
 | ||||
|   MixtureFactor mixtureFactor({X(1), X(2)}, {m1}, factors); | ||||
|   HybridNonlinearFactorGraph hnfg; | ||||
|   hnfg.push_back(mixtureFactor); | ||||
| 
 | ||||
|   f0 = std::make_shared<BetweenFactor<double>>(X(2), X(3), 0.0, model0); | ||||
|   f1 = std::make_shared<BetweenFactor<double>>(X(2), X(3), 2.0, model1); | ||||
|   std::vector<NonlinearFactor::shared_ptr> factors23{f0, f1}; | ||||
|   hnfg.push_back(MixtureFactor({X(2), X(3)}, {m2}, factors23)); | ||||
| 
 | ||||
|   auto prior = PriorFactor<double>(X(1), x1, prior_noise); | ||||
|   hnfg.push_back(prior); | ||||
| 
 | ||||
|   hnfg.emplace_shared<PriorFactor<double>>(X(2), 2.0, prior_noise); | ||||
| 
 | ||||
|   auto hgfg = hnfg.linearize(values); | ||||
|   auto bn = hgfg->eliminateSequential(); | ||||
|   HybridValues actual = bn->optimize(); | ||||
| 
 | ||||
|   HybridValues expected( | ||||
|       VectorValues{ | ||||
|           {X(1), Vector1(0.0)}, {X(2), Vector1(0.25)}, {X(3), Vector1(-0.6)}}, | ||||
|       DiscreteValues{{M(1), 1}, {M(2), 0}}); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expected, actual)); | ||||
| 
 | ||||
|   { | ||||
|     DiscreteValues dv{{M(1), 0}, {M(2), 0}}; | ||||
|     VectorValues cont = bn->optimize(dv); | ||||
|     double error = bn->error(HybridValues(cont, dv)); | ||||
|     // regression
 | ||||
|     EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); | ||||
|   } | ||||
|   { | ||||
|     DiscreteValues dv{{M(1), 0}, {M(2), 1}}; | ||||
|     VectorValues cont = bn->optimize(dv); | ||||
|     double error = bn->error(HybridValues(cont, dv)); | ||||
|     // regression
 | ||||
|     EXPECT_DOUBLES_EQUAL(1.77418393408, error, 1e-9); | ||||
|   } | ||||
|   { | ||||
|     DiscreteValues dv{{M(1), 1}, {M(2), 0}}; | ||||
|     VectorValues cont = bn->optimize(dv); | ||||
|     double error = bn->error(HybridValues(cont, dv)); | ||||
|     // regression
 | ||||
|     EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); | ||||
|   } | ||||
|   { | ||||
|     DiscreteValues dv{{M(1), 1}, {M(2), 1}}; | ||||
|     VectorValues cont = bn->optimize(dv); | ||||
|     double error = bn->error(HybridValues(cont, dv)); | ||||
|     // regression
 | ||||
|     EXPECT_DOUBLES_EQUAL(1.10751726741, error, 1e-9); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| // Test components with differing covariances
 | ||||
| TEST(MixtureFactor, DifferentCovariances) { | ||||
|   DiscreteKey m1(M(1), 2); | ||||
| 
 | ||||
|   Values values; | ||||
|   double x1 = 1.0, x2 = 1.0; | ||||
|   values.insert(X(1), x1); | ||||
|   values.insert(X(2), x2); | ||||
| 
 | ||||
|   double between = 0.0; | ||||
| 
 | ||||
|   auto model0 = noiseModel::Isotropic::Sigma(1, 1e2); | ||||
|   auto model1 = noiseModel::Isotropic::Sigma(1, 1e-2); | ||||
|   auto prior_noise = noiseModel::Isotropic::Sigma(1, 1e-3); | ||||
| 
 | ||||
|   auto f0 = | ||||
|       std::make_shared<BetweenFactor<double>>(X(1), X(2), between, model0); | ||||
|   auto f1 = | ||||
|       std::make_shared<BetweenFactor<double>>(X(1), X(2), between, model1); | ||||
|   std::vector<NonlinearFactor::shared_ptr> factors{f0, f1}; | ||||
| 
 | ||||
|   // Create via toFactorGraph
 | ||||
|   using symbol_shorthand::Z; | ||||
|   Matrix H0_1, H0_2, H1_1, H1_2; | ||||
|   Vector d0 = f0->evaluateError(x1, x2, &H0_1, &H0_2); | ||||
|   std::vector<std::pair<Key, Matrix>> terms0 = {{Z(1), gtsam::I_1x1 /*Rx*/}, | ||||
|                                                 //
 | ||||
|                                                 {X(1), H0_1 /*Sp1*/}, | ||||
|                                                 {X(2), H0_2 /*Tp2*/}}; | ||||
| 
 | ||||
|   Vector d1 = f1->evaluateError(x1, x2, &H1_1, &H1_2); | ||||
|   std::vector<std::pair<Key, Matrix>> terms1 = {{Z(1), gtsam::I_1x1 /*Rx*/}, | ||||
|                                                 //
 | ||||
|                                                 {X(1), H1_1 /*Sp1*/}, | ||||
|                                                 {X(2), H1_2 /*Tp2*/}}; | ||||
|   auto gm = new gtsam::GaussianMixture( | ||||
|       {Z(1)}, {X(1), X(2)}, {m1}, | ||||
|       {std::make_shared<GaussianConditional>(terms0, 1, -d0, model0), | ||||
|        std::make_shared<GaussianConditional>(terms1, 1, -d1, model1)}); | ||||
|   gtsam::HybridBayesNet bn; | ||||
|   bn.emplace_back(gm); | ||||
| 
 | ||||
|   gtsam::VectorValues measurements; | ||||
|   measurements.insert(Z(1), gtsam::Z_1x1); | ||||
|   // Create FG with single GaussianMixtureFactor
 | ||||
|   HybridGaussianFactorGraph mixture_fg = bn.toFactorGraph(measurements); | ||||
| 
 | ||||
|   // Linearized prior factor on X1
 | ||||
|   auto prior = PriorFactor<double>(X(1), x1, prior_noise).linearize(values); | ||||
|   mixture_fg.push_back(prior); | ||||
| 
 | ||||
|   auto hbn = mixture_fg.eliminateSequential(); | ||||
| 
 | ||||
|   VectorValues cv; | ||||
|   cv.insert(X(1), Vector1(0.0)); | ||||
|   cv.insert(X(2), Vector1(0.0)); | ||||
|   // P(m1) = [0.5, 0.5], so we should pick 0
 | ||||
|   DiscreteValues dv; | ||||
|   dv.insert({M(1), 0}); | ||||
|   HybridValues expected_values(cv, dv); | ||||
| 
 | ||||
|   HybridValues actual_values = hbn->optimize(); | ||||
|   EXPECT(assert_equal(expected_values, actual_values)); | ||||
| 
 | ||||
|   // Check that we get different error values at the MLE point μ.
 | ||||
|   AlgebraicDecisionTree<Key> errorTree = hbn->errorTree(cv); | ||||
| 
 | ||||
|   HybridValues hv0(cv, DiscreteValues{{M(1), 0}}); | ||||
|   HybridValues hv1(cv, DiscreteValues{{M(1), 1}}); | ||||
| 
 | ||||
|   AlgebraicDecisionTree<Key> expectedErrorTree(m1, 9.90348755254, | ||||
|                                                0.69314718056); | ||||
|   EXPECT(assert_equal(expectedErrorTree, errorTree)); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| int main() { | ||||
|   TestResult tr; | ||||
|  |  | |||
|  | @ -263,11 +263,6 @@ namespace gtsam { | |||
|     /** equals required by Testable for unit testing */ | ||||
|     bool equals(const VectorValues& x, double tol = 1e-9) const; | ||||
| 
 | ||||
|     /// Check equality.
 | ||||
|     friend bool operator==(const VectorValues& lhs, const VectorValues& rhs) { | ||||
|       return lhs.equals(rhs); | ||||
|     } | ||||
| 
 | ||||
|     /// @{
 | ||||
|     /// @name Advanced Interface
 | ||||
|     /// @{
 | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue