Merge branch 'develop' into improved-api-3
						commit
						796d85d7fa
					
				|  | @ -38,7 +38,7 @@ namespace gtsam { | |||
|  * Gaussian factor in factors. | ||||
|  * @return HybridGaussianFactor::Factors | ||||
|  */ | ||||
| HybridGaussianFactor::Factors augment( | ||||
| static HybridGaussianFactor::Factors augment( | ||||
|     const HybridGaussianFactor::FactorValuePairs &factors) { | ||||
|   // Find the minimum value so we can "proselytize" to positive values.
 | ||||
|   // Done because we can't have sqrt of negative numbers.
 | ||||
|  |  | |||
|  | @ -89,25 +89,28 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { | |||
|   HybridGaussianFactor() = default; | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Construct a new hybrid Gaussian factor. | ||||
|    * | ||||
|    * @param continuousKeys A vector of keys representing continuous variables. | ||||
|    * @param discreteKeys A vector of keys representing discrete variables and | ||||
|    * their cardinalities. | ||||
|    * @param factors The decision tree of Gaussian factors and arbitrary scalars. | ||||
|    */ | ||||
|   HybridGaussianFactor(const KeyVector &continuousKeys, | ||||
|                        const DiscreteKeys &discreteKeys, | ||||
|                        const FactorValuePairs &factors); | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Construct a new HybridGaussianFactor object using a vector of | ||||
|    * GaussianFactor shared pointers. | ||||
|    * @brief Construct a new HybridGaussianFactor on a single discrete key, | ||||
|    * providing the factors for each mode m as a vector of factors ϕ_m(x). | ||||
|    * The value ϕ(x,m) for the factor is simply ϕ_m(x). | ||||
|    * | ||||
|    * @param continuousKeys Vector of keys for continuous factors. | ||||
|    * @param discreteKey The discrete key to index each component. | ||||
|    * @param factors Vector of gaussian factor shared pointers | ||||
|    *  and arbitrary scalars. Same size as the cardinality of discreteKey. | ||||
|    * @param discreteKey The discrete key for the "mode", indexing components. | ||||
|    * @param factors Vector of gaussian factors, one for each mode. | ||||
|    */ | ||||
|   HybridGaussianFactor(const KeyVector &continuousKeys, | ||||
|                        const DiscreteKey &discreteKey, | ||||
|                        const std::vector<GaussianFactor::shared_ptr> &factors) | ||||
|       : Base(continuousKeys, {discreteKey}), factors_({discreteKey}, factors) {} | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Construct a new HybridGaussianFactor on a single discrete key, | ||||
|    * including a scalar error value for each mode m. The factors and scalars are | ||||
|    * provided as a vector of pairs (ϕ_m(x), E_m). | ||||
|    * The value ϕ(x,m) for the factor is now ϕ_m(x) + E_m. | ||||
|    * | ||||
|    * @param continuousKeys Vector of keys for continuous factors. | ||||
|    * @param discreteKey The discrete key for the "mode", indexing components. | ||||
|    * @param factors Vector of gaussian factor-scalar pairs, one per mode. | ||||
|    */ | ||||
|   HybridGaussianFactor(const KeyVector &continuousKeys, | ||||
|                        const DiscreteKey &discreteKey, | ||||
|  | @ -115,6 +118,20 @@ class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor { | |||
|       : HybridGaussianFactor(continuousKeys, {discreteKey}, | ||||
|                              FactorValuePairs({discreteKey}, factors)) {} | ||||
| 
 | ||||
|   /**
 | ||||
|    * @brief Construct a new HybridGaussianFactor on a several discrete keys M, | ||||
|    * including a scalar error value for each assignment m. The factors and | ||||
|    * scalars are provided as a DecisionTree<Key> of pairs (ϕ_M(x), E_M). | ||||
|    * The value ϕ(x,M) for the factor is again ϕ_m(x) + E_m. | ||||
|    * | ||||
|    * @param continuousKeys A vector of keys representing continuous variables. | ||||
|    * @param discreteKeys Discrete variables and their cardinalities. | ||||
|    * @param factors The decision tree of Gaussian factor/scalar pairs. | ||||
|    */ | ||||
|   HybridGaussianFactor(const KeyVector &continuousKeys, | ||||
|                        const DiscreteKeys &discreteKeys, | ||||
|                        const FactorValuePairs &factors); | ||||
| 
 | ||||
|   /// @}
 | ||||
|   /// @name Testable
 | ||||
|   /// @{
 | ||||
|  |  | |||
|  | @ -31,6 +31,9 @@ | |||
| 
 | ||||
| #include <vector> | ||||
| 
 | ||||
| #include "gtsam/linear/GaussianFactor.h" | ||||
| #include "gtsam/linear/GaussianFactorGraph.h" | ||||
| 
 | ||||
| #pragma once | ||||
| 
 | ||||
| namespace gtsam { | ||||
|  | @ -44,33 +47,28 @@ using symbol_shorthand::X; | |||
|  * system which depends on a discrete mode at each time step of the chain. | ||||
|  * | ||||
|  * @param n The number of chain elements. | ||||
|  * @param keyFunc The functional to help specify the continuous key. | ||||
|  * @param dKeyFunc The functional to help specify the discrete key. | ||||
|  * @param x The functional to help specify the continuous key. | ||||
|  * @param m The functional to help specify the discrete key. | ||||
|  * @return HybridGaussianFactorGraph::shared_ptr | ||||
|  */ | ||||
| inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain( | ||||
|     size_t n, std::function<Key(int)> keyFunc = X, | ||||
|     std::function<Key(int)> dKeyFunc = M) { | ||||
|     size_t n, std::function<Key(int)> x = X, std::function<Key(int)> m = M) { | ||||
|   HybridGaussianFactorGraph hfg; | ||||
| 
 | ||||
|   hfg.add(JacobianFactor(keyFunc(1), I_3x3, Z_3x1)); | ||||
|   hfg.add(JacobianFactor(x(1), I_3x3, Z_3x1)); | ||||
| 
 | ||||
|   // keyFunc(1) to keyFunc(n+1)
 | ||||
|   // x(1) to x(n+1)
 | ||||
|   for (size_t t = 1; t < n; t++) { | ||||
|     DiscreteKeys dKeys{{dKeyFunc(t), 2}}; | ||||
|     HybridGaussianFactor::FactorValuePairs components( | ||||
|         dKeys, {{std::make_shared<JacobianFactor>(keyFunc(t), I_3x3, | ||||
|                                                   keyFunc(t + 1), I_3x3, Z_3x1), | ||||
|                  0.0}, | ||||
|                 {std::make_shared<JacobianFactor>( | ||||
|                      keyFunc(t), I_3x3, keyFunc(t + 1), I_3x3, Vector3::Ones()), | ||||
|                  0.0}}); | ||||
|     hfg.add( | ||||
|         HybridGaussianFactor({keyFunc(t), keyFunc(t + 1)}, dKeys, components)); | ||||
|     DiscreteKeys dKeys{{m(t), 2}}; | ||||
|     std::vector<GaussianFactor::shared_ptr> components; | ||||
|     components.emplace_back( | ||||
|         new JacobianFactor(x(t), I_3x3, x(t + 1), I_3x3, Z_3x1)); | ||||
|     components.emplace_back( | ||||
|         new JacobianFactor(x(t), I_3x3, x(t + 1), I_3x3, Vector3::Ones())); | ||||
|     hfg.add(HybridGaussianFactor({x(t), x(t + 1)}, {m(t), 2}, components)); | ||||
| 
 | ||||
|     if (t > 1) { | ||||
|       hfg.add(DecisionTreeFactor({{dKeyFunc(t - 1), 2}, {dKeyFunc(t), 2}}, | ||||
|                                  "0 1 1 3")); | ||||
|       hfg.add(DecisionTreeFactor({{m(t - 1), 2}, {m(t), 2}}, "0 1 1 3")); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|  |  | |||
|  | @ -52,10 +52,10 @@ TEST(HybridFactorGraph, Keys) { | |||
| 
 | ||||
|   // Add a hybrid Gaussian factor ϕ(x1, c1)
 | ||||
|   DiscreteKey m1(M(1), 2); | ||||
|   DecisionTree<Key, GaussianFactorValuePair> dt( | ||||
|       M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0}, | ||||
|       {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}); | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); | ||||
|   std::vector<GaussianFactor::shared_ptr> components{ | ||||
|       std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), | ||||
|       std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}; | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, m1, components)); | ||||
| 
 | ||||
|   KeySet expected_continuous{X(0), X(1)}; | ||||
|   EXPECT( | ||||
|  |  | |||
|  | @ -54,32 +54,49 @@ TEST(HybridGaussianFactor, Constructor) { | |||
|   CHECK(it == factor.end()); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| namespace test_constructor { | ||||
| DiscreteKey m1(1, 2); | ||||
| 
 | ||||
| auto A1 = Matrix::Zero(2, 1); | ||||
| auto A2 = Matrix::Zero(2, 2); | ||||
| auto b = Matrix::Zero(2, 1); | ||||
| 
 | ||||
| auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); | ||||
| auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); | ||||
| }  // namespace test_constructor
 | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| // Test simple to complex constructors...
 | ||||
| TEST(HybridGaussianFactor, ConstructorVariants) { | ||||
|   using namespace test_constructor; | ||||
|   HybridGaussianFactor fromFactors({X(1), X(2)}, m1, {f10, f11}); | ||||
| 
 | ||||
|   std::vector<GaussianFactorValuePair> pairs{{f10, 0.0}, {f11, 0.0}}; | ||||
|   HybridGaussianFactor fromPairs({X(1), X(2)}, {m1}, pairs); | ||||
|   assert_equal(fromFactors, fromPairs); | ||||
| 
 | ||||
|   HybridGaussianFactor::FactorValuePairs decisionTree({m1}, pairs); | ||||
|   HybridGaussianFactor fromDecisionTree({X(1), X(2)}, {m1}, decisionTree); | ||||
|   assert_equal(fromDecisionTree, fromPairs); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| // "Add" two hybrid factors together.
 | ||||
| TEST(HybridGaussianFactor, Sum) { | ||||
|   DiscreteKey m1(1, 2), m2(2, 3); | ||||
|   using namespace test_constructor; | ||||
|   DiscreteKey m2(2, 3); | ||||
| 
 | ||||
|   auto A1 = Matrix::Zero(2, 1); | ||||
|   auto A2 = Matrix::Zero(2, 2); | ||||
|   auto A3 = Matrix::Zero(2, 3); | ||||
|   auto b = Matrix::Zero(2, 1); | ||||
|   Vector2 sigmas; | ||||
|   sigmas << 1, 2; | ||||
| 
 | ||||
|   auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); | ||||
|   auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); | ||||
|   auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b); | ||||
|   auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b); | ||||
|   auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b); | ||||
|   std::vector<GaussianFactorValuePair> factorsA{{f10, 0.0}, {f11, 0.0}}; | ||||
|   std::vector<GaussianFactorValuePair> factorsB{ | ||||
|       {f20, 0.0}, {f21, 0.0}, {f22, 0.0}}; | ||||
| 
 | ||||
|   // TODO(Frank): why specify keys at all? And: keys in factor should be *all*
 | ||||
|   // keys, deviating from Kevin's scheme. Should we index DT on DiscreteKey?
 | ||||
|   // Design review!
 | ||||
|   HybridGaussianFactor hybridFactorA({X(1), X(2)}, {m1}, factorsA); | ||||
|   HybridGaussianFactor hybridFactorB({X(1), X(3)}, {m2}, factorsB); | ||||
|   HybridGaussianFactor hybridFactorA({X(1), X(2)}, m1, {f10, f11}); | ||||
|   HybridGaussianFactor hybridFactorB({X(1), X(3)}, m2, {f20, f21, f22}); | ||||
| 
 | ||||
|   // Check that number of keys is 3
 | ||||
|   EXPECT_LONGS_EQUAL(3, hybridFactorA.keys().size()); | ||||
|  | @ -104,15 +121,8 @@ TEST(HybridGaussianFactor, Sum) { | |||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| TEST(HybridGaussianFactor, Printing) { | ||||
|   DiscreteKey m1(1, 2); | ||||
|   auto A1 = Matrix::Zero(2, 1); | ||||
|   auto A2 = Matrix::Zero(2, 2); | ||||
|   auto b = Matrix::Zero(2, 1); | ||||
|   auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); | ||||
|   auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b); | ||||
|   std::vector<GaussianFactorValuePair> factors{{f10, 0.0}, {f11, 0.0}}; | ||||
| 
 | ||||
|   HybridGaussianFactor hybridFactor({X(1), X(2)}, {m1}, factors); | ||||
|   using namespace test_constructor; | ||||
|   HybridGaussianFactor hybridFactor({X(1), X(2)}, m1, {f10, f11}); | ||||
| 
 | ||||
|   std::string expected = | ||||
|       R"(HybridGaussianFactor | ||||
|  | @ -179,9 +189,7 @@ TEST(HybridGaussianFactor, Error) { | |||
| 
 | ||||
|   auto f0 = std::make_shared<JacobianFactor>(X(1), A01, X(2), A02, b); | ||||
|   auto f1 = std::make_shared<JacobianFactor>(X(1), A11, X(2), A12, b); | ||||
|   std::vector<GaussianFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}}; | ||||
| 
 | ||||
|   HybridGaussianFactor hybridFactor({X(1), X(2)}, {m1}, factors); | ||||
|   HybridGaussianFactor hybridFactor({X(1), X(2)}, m1, {f0, f1}); | ||||
| 
 | ||||
|   VectorValues continuousValues; | ||||
|   continuousValues.insert(X(1), Vector2(0, 0)); | ||||
|  |  | |||
|  | @ -114,6 +114,14 @@ TEST(HybridGaussianFactorGraph, EliminateMultifrontal) { | |||
|   EXPECT_LONGS_EQUAL(result.first->size(), 1); | ||||
|   EXPECT_LONGS_EQUAL(result.second->size(), 1); | ||||
| } | ||||
| /* ************************************************************************* */ | ||||
| 
 | ||||
| namespace two { | ||||
| std::vector<GaussianFactor::shared_ptr> components(Key key) { | ||||
|   return {std::make_shared<JacobianFactor>(key, I_3x3, Z_3x1), | ||||
|           std::make_shared<JacobianFactor>(key, I_3x3, Vector3::Ones())}; | ||||
| } | ||||
| }  // namespace two
 | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { | ||||
|  | @ -127,10 +135,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) { | |||
| 
 | ||||
|   // Add a hybrid gaussian factor ϕ(x1, c1)
 | ||||
|   DiscreteKey m1(M(1), 2); | ||||
|   DecisionTree<Key, GaussianFactorValuePair> dt( | ||||
|       M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0}, | ||||
|       {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}); | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, {m1}, dt)); | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, m1, two::components(X(1)))); | ||||
| 
 | ||||
|   auto result = hfg.eliminateSequential(); | ||||
| 
 | ||||
|  | @ -153,10 +158,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialSimple) { | |||
|   // Add factor between x0 and x1
 | ||||
|   hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); | ||||
| 
 | ||||
|   std::vector<GaussianFactorValuePair> factors = { | ||||
|       {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0}, | ||||
|       {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}}; | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, {m1}, factors)); | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, m1, two::components(X(1)))); | ||||
| 
 | ||||
|   // Discrete probability table for c1
 | ||||
|   hfg.add(DecisionTreeFactor(m1, {2, 8})); | ||||
|  | @ -178,10 +180,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) { | |||
|   hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); | ||||
|   hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); | ||||
| 
 | ||||
|   std::vector<GaussianFactorValuePair> factors = { | ||||
|       {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0}, | ||||
|       {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}}; | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, {M(1), 2}, factors)); | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, {M(1), 2}, two::components(X(1)))); | ||||
| 
 | ||||
|   hfg.add(DecisionTreeFactor(m1, {2, 8})); | ||||
|   // TODO(Varun) Adding extra discrete variable not connected to continuous
 | ||||
|  | @ -207,13 +206,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalCLG) { | |||
|   // Factor between x0-x1
 | ||||
|   hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); | ||||
| 
 | ||||
|   // Decision tree with different modes on x1
 | ||||
|   DecisionTree<Key, GaussianFactorValuePair> dt( | ||||
|       M(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0}, | ||||
|       {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}); | ||||
| 
 | ||||
|   // Hybrid factor P(x1|c1)
 | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, {m}, dt)); | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, m, two::components(X(1)))); | ||||
|   // Prior factor on c1
 | ||||
|   hfg.add(DecisionTreeFactor(m, {2, 8})); | ||||
| 
 | ||||
|  | @ -238,16 +232,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { | |||
|   hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1)); | ||||
| 
 | ||||
|   { | ||||
|     std::vector<GaussianFactorValuePair> factors = { | ||||
|         {std::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1), 0.0}, | ||||
|         {std::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones()), 0.0}}; | ||||
|     hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, factors)); | ||||
| 
 | ||||
|     DecisionTree<Key, GaussianFactorValuePair> dt1( | ||||
|         M(1), {std::make_shared<JacobianFactor>(X(2), I_3x3, Z_3x1), 0.0}, | ||||
|         {std::make_shared<JacobianFactor>(X(2), I_3x3, Vector3::Ones()), 0.0}); | ||||
| 
 | ||||
|     hfg.add(HybridGaussianFactor({X(2)}, {{M(1), 2}}, dt1)); | ||||
|     hfg.add(HybridGaussianFactor({X(0)}, {M(0), 2}, two::components(X(0)))); | ||||
|     hfg.add(HybridGaussianFactor({X(2)}, {M(1), 2}, two::components(X(2)))); | ||||
|   } | ||||
| 
 | ||||
|   hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4")); | ||||
|  | @ -256,17 +242,8 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { | |||
|   hfg.add(JacobianFactor(X(4), I_3x3, X(5), -I_3x3, Z_3x1)); | ||||
| 
 | ||||
|   { | ||||
|     DecisionTree<Key, GaussianFactorValuePair> dt( | ||||
|         M(3), {std::make_shared<JacobianFactor>(X(3), I_3x3, Z_3x1), 0.0}, | ||||
|         {std::make_shared<JacobianFactor>(X(3), I_3x3, Vector3::Ones()), 0.0}); | ||||
| 
 | ||||
|     hfg.add(HybridGaussianFactor({X(3)}, {{M(3), 2}}, dt)); | ||||
| 
 | ||||
|     DecisionTree<Key, GaussianFactorValuePair> dt1( | ||||
|         M(2), {std::make_shared<JacobianFactor>(X(5), I_3x3, Z_3x1), 0.0}, | ||||
|         {std::make_shared<JacobianFactor>(X(5), I_3x3, Vector3::Ones()), 0.0}); | ||||
| 
 | ||||
|     hfg.add(HybridGaussianFactor({X(5)}, {{M(2), 2}}, dt1)); | ||||
|     hfg.add(HybridGaussianFactor({X(3)}, {M(3), 2}, two::components(X(3)))); | ||||
|     hfg.add(HybridGaussianFactor({X(5)}, {M(2), 2}, two::components(X(5)))); | ||||
|   } | ||||
| 
 | ||||
|   auto ordering_full = | ||||
|  | @ -279,7 +256,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) { | |||
|   EXPECT_LONGS_EQUAL(0, remaining->size()); | ||||
| 
 | ||||
|   /*
 | ||||
|   (Fan) Explanation: the Junction tree will need to reeliminate to get to the | ||||
|   (Fan) Explanation: the Junction tree will need to re-eliminate to get to the | ||||
|   marginal on X(1), which is not possible because it involves eliminating | ||||
|   discrete before continuous. The solution to this, however, is in Murphy02. | ||||
|   TLDR is that this is 1. expensive and 2. inexact. nevertheless it is doable. | ||||
|  | @ -551,12 +528,7 @@ TEST(HybridGaussianFactorGraph, optimize) { | |||
| 
 | ||||
|   hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1)); | ||||
|   hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1)); | ||||
| 
 | ||||
|   DecisionTree<Key, GaussianFactorValuePair> dt( | ||||
|       C(1), {std::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1), 0.0}, | ||||
|       {std::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones()), 0.0}); | ||||
| 
 | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, {c1}, dt)); | ||||
|   hfg.add(HybridGaussianFactor({X(1)}, c1, two::components(X(1)))); | ||||
| 
 | ||||
|   auto result = hfg.eliminateSequential(); | ||||
| 
 | ||||
|  | @ -642,13 +614,13 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) { | |||
|   // regression
 | ||||
|   EXPECT(assert_equal(expected_error, error_tree, 1e-7)); | ||||
| 
 | ||||
|   auto probs = graph.probPrime(delta.continuous()); | ||||
|   auto probabilities = graph.probPrime(delta.continuous()); | ||||
|   std::vector<double> prob_leaves = {0.36793249, 0.61247742, 0.59489556, | ||||
|                                      0.99029064}; | ||||
|   AlgebraicDecisionTree<Key> expected_probs(discrete_keys, prob_leaves); | ||||
|   AlgebraicDecisionTree<Key> expected_probabilities(discrete_keys, prob_leaves); | ||||
| 
 | ||||
|   // regression
 | ||||
|   EXPECT(assert_equal(expected_probs, probs, 1e-7)); | ||||
|   EXPECT(assert_equal(expected_probabilities, probabilities, 1e-7)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
|  | @ -706,6 +678,55 @@ TEST(HybridGaussianFactorGraph, ErrorTreeWithConditional) { | |||
|   EXPECT(assert_equal(expected, errorTree, 1e-9)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Test hybrid gaussian factor graph errorTree during
 | ||||
| // incremental operation
 | ||||
| TEST(HybridGaussianFactorGraph, IncrementalErrorTree) { | ||||
|   Switching s(4); | ||||
| 
 | ||||
|   HybridGaussianFactorGraph graph; | ||||
|   graph.push_back(s.linearizedFactorGraph.at(0));  // f(X0)
 | ||||
|   graph.push_back(s.linearizedFactorGraph.at(1));  // f(X0, X1, M0)
 | ||||
|   graph.push_back(s.linearizedFactorGraph.at(2));  // f(X1, X2, M1)
 | ||||
|   graph.push_back(s.linearizedFactorGraph.at(4));  // f(X1)
 | ||||
|   graph.push_back(s.linearizedFactorGraph.at(5));  // f(X2)
 | ||||
|   graph.push_back(s.linearizedFactorGraph.at(7));  // f(M0)
 | ||||
|   graph.push_back(s.linearizedFactorGraph.at(8));  // f(M0, M1)
 | ||||
| 
 | ||||
|   HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential(); | ||||
|   EXPECT_LONGS_EQUAL(5, hybridBayesNet->size()); | ||||
| 
 | ||||
|   HybridValues delta = hybridBayesNet->optimize(); | ||||
|   auto error_tree = graph.errorTree(delta.continuous()); | ||||
| 
 | ||||
|   std::vector<DiscreteKey> discrete_keys = {{M(0), 2}, {M(1), 2}}; | ||||
|   std::vector<double> leaves = {0.99985581, 0.4902432, 0.51936941, | ||||
|                                 0.0097568009}; | ||||
|   AlgebraicDecisionTree<Key> expected_error(discrete_keys, leaves); | ||||
| 
 | ||||
|   // regression
 | ||||
|   EXPECT(assert_equal(expected_error, error_tree, 1e-7)); | ||||
| 
 | ||||
|   graph = HybridGaussianFactorGraph(); | ||||
|   graph.push_back(*hybridBayesNet); | ||||
|   graph.push_back(s.linearizedFactorGraph.at(3));  // f(X2, X3, M2)
 | ||||
|   graph.push_back(s.linearizedFactorGraph.at(6));  // f(X3)
 | ||||
| 
 | ||||
|   hybridBayesNet = graph.eliminateSequential(); | ||||
|   EXPECT_LONGS_EQUAL(7, hybridBayesNet->size()); | ||||
| 
 | ||||
|   delta = hybridBayesNet->optimize(); | ||||
|   auto error_tree2 = graph.errorTree(delta.continuous()); | ||||
| 
 | ||||
|   discrete_keys = {{M(0), 2}, {M(1), 2}, {M(2), 2}}; | ||||
|   leaves = {0.50985198, 0.0097577296, 0.50009425, 0, | ||||
|             0.52922138, 0.029127133,  0.50985105, 0.0097567964}; | ||||
|   AlgebraicDecisionTree<Key> expected_error2(discrete_keys, leaves); | ||||
| 
 | ||||
|   // regression
 | ||||
|   EXPECT(assert_equal(expected_error, error_tree, 1e-7)); | ||||
| } | ||||
| 
 | ||||
| /* ****************************************************************************/ | ||||
| // Check that assembleGraphTree assembles Gaussian factor graphs for each
 | ||||
| // assignment.
 | ||||
|  |  | |||
|  | @ -0,0 +1,85 @@ | |||
| # pylint: disable=consider-using-from-import,invalid-name,no-name-in-module,no-member,missing-function-docstring | ||||
| """ | ||||
| This is a 1:1 transcription of CameraResectioning.cpp. | ||||
| """ | ||||
| import numpy as np | ||||
| from gtsam import Cal3_S2, CustomFactor, LevenbergMarquardtOptimizer, KeyVector | ||||
| from gtsam import NonlinearFactor, NonlinearFactorGraph | ||||
| from gtsam import PinholeCameraCal3_S2, Point2, Point3, Pose3, Rot3, Values | ||||
| from gtsam.noiseModel import Base as SharedNoiseModel, Diagonal | ||||
| from gtsam.symbol_shorthand import X | ||||
| 
 | ||||
| 
 | ||||
| def resectioning_factor( | ||||
|     model: SharedNoiseModel, | ||||
|     key: int, | ||||
|     calib: Cal3_S2, | ||||
|     p: Point2, | ||||
|     P: Point3, | ||||
| ) -> NonlinearFactor: | ||||
| 
 | ||||
|     def error_func(this: CustomFactor, v: Values, H: list[np.ndarray]) -> np.ndarray: | ||||
|         pose = v.atPose3(this.keys()[0]) | ||||
|         camera = PinholeCameraCal3_S2(pose, calib) | ||||
|         if H is None: | ||||
|             return camera.project(P) - p | ||||
|         Dpose = np.zeros((2, 6), order="F") | ||||
|         Dpoint = np.zeros((2, 3), order="F") | ||||
|         Dcal = np.zeros((2, 5), order="F") | ||||
|         result = camera.project(P, Dpose, Dpoint, Dcal) - p | ||||
|         H[0] = Dpose | ||||
|         return result | ||||
| 
 | ||||
|     return CustomFactor(model, KeyVector([key]), error_func) | ||||
| 
 | ||||
| 
 | ||||
| def main() -> None: | ||||
|     """ | ||||
|     Camera: f = 1, Image: 100x100, center: 50, 50.0 | ||||
|     Pose (ground truth): (Xw, -Yw, -Zw, [0,0,2.0]') | ||||
|     Known landmarks: | ||||
|        3D Points: (10,10,0) (-10,10,0) (-10,-10,0) (10,-10,0) | ||||
|     Perfect measurements: | ||||
|        2D Point:  (55,45)   (45,45)    (45,55)     (55,55) | ||||
|     """ | ||||
| 
 | ||||
|     # read camera intrinsic parameters | ||||
|     calib = Cal3_S2(1, 1, 0, 50, 50) | ||||
| 
 | ||||
|     # 1. create graph | ||||
|     graph = NonlinearFactorGraph() | ||||
| 
 | ||||
|     # 2. add factors to the graph | ||||
|     measurement_noise = Diagonal.Sigmas(np.array([0.5, 0.5])) | ||||
|     graph.add( | ||||
|         resectioning_factor( | ||||
|             measurement_noise, X(1), calib, Point2(55, 45), Point3(10, 10, 0) | ||||
|         ) | ||||
|     ) | ||||
|     graph.add( | ||||
|         resectioning_factor( | ||||
|             measurement_noise, X(1), calib, Point2(45, 45), Point3(-10, 10, 0) | ||||
|         ) | ||||
|     ) | ||||
|     graph.add( | ||||
|         resectioning_factor( | ||||
|             measurement_noise, X(1), calib, Point2(45, 55), Point3(-10, -10, 0) | ||||
|         ) | ||||
|     ) | ||||
|     graph.add( | ||||
|         resectioning_factor( | ||||
|             measurement_noise, X(1), calib, Point2(55, 55), Point3(10, -10, 0) | ||||
|         ) | ||||
|     ) | ||||
| 
 | ||||
|     # 3. Create an initial estimate for the camera pose | ||||
|     initial: Values = Values() | ||||
|     initial.insert(X(1), Pose3(Rot3(1, 0, 0, 0, -1, 0, 0, 0, -1), Point3(0, 0, 1))) | ||||
| 
 | ||||
|     # 4. Optimize the graph using Levenberg-Marquardt | ||||
|     result: Values = LevenbergMarquardtOptimizer(graph, initial).optimize() | ||||
|     result.print("Final result:\n") | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
		Loading…
	
		Reference in New Issue