clean up testHybridEstimation
							parent
							
								
									42052d07a1
								
							
						
					
					
						commit
						0f403c7d28
					
				|  | @ -47,6 +47,14 @@ using namespace gtsam; | |||
| using symbol_shorthand::X; | ||||
| using symbol_shorthand::Z; | ||||
| 
 | ||||
| namespace estimation_fixture { | ||||
| std::vector<double> measurements = {0, 1, 2, 2, 2, 2,  3,  4,  5,  6, 6, | ||||
|                                     7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; | ||||
| // Ground truth discrete seq
 | ||||
| std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, | ||||
|                                     1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; | ||||
| }  // namespace estimation_fixture
 | ||||
| 
 | ||||
| TEST(HybridEstimation, Full) { | ||||
|   size_t K = 6; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2, 2, 3}; | ||||
|  | @ -90,12 +98,10 @@ TEST(HybridEstimation, Full) { | |||
| /****************************************************************************/ | ||||
| // Test approximate inference with an additional pruning step.
 | ||||
| TEST(HybridEstimation, IncrementalSmoother) { | ||||
|   using namespace estimation_fixture; | ||||
| 
 | ||||
|   size_t K = 15; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2, 2, 2,  3,  4,  5,  6, 6, | ||||
|                                       7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; | ||||
|   // Ground truth discrete seq
 | ||||
|   std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, | ||||
|                                       1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; | ||||
| 
 | ||||
|   // Switching example of robot moving in 1D
 | ||||
|   // with given measurements and equal mode priors.
 | ||||
|   Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); | ||||
|  | @ -123,10 +129,6 @@ TEST(HybridEstimation, IncrementalSmoother) { | |||
| 
 | ||||
|     smoother.update(linearized, maxNrLeaves, ordering); | ||||
|     graph.resize(0); | ||||
| 
 | ||||
|     // Uncomment to print out pruned discrete marginal:
 | ||||
|     // smoother.hybridBayesNet().at(0)->asDiscrete()->dot("smoother_" +
 | ||||
|     //                                                    std::to_string(k));
 | ||||
|   } | ||||
| 
 | ||||
|   HybridValues delta = smoother.hybridBayesNet().optimize(); | ||||
|  | @ -149,12 +151,10 @@ TEST(HybridEstimation, IncrementalSmoother) { | |||
| /****************************************************************************/ | ||||
| // Test approximate inference with an additional pruning step.
 | ||||
| TEST(HybridEstimation, ISAM) { | ||||
|   using namespace estimation_fixture; | ||||
| 
 | ||||
|   size_t K = 15; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2, 2, 2,  3,  4,  5,  6, 6, | ||||
|                                       7, 8, 9, 9, 9, 10, 11, 11, 11, 11}; | ||||
|   // Ground truth discrete seq
 | ||||
|   std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0, | ||||
|                                       1, 1, 1, 0, 0, 1, 1, 0, 0, 0}; | ||||
| 
 | ||||
|   // Switching example of robot moving in 1D
 | ||||
|   // with given measurements and equal mode priors.
 | ||||
|   Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1"); | ||||
|  | @ -179,7 +179,6 @@ TEST(HybridEstimation, ISAM) { | |||
|     initial.insert(X(k), switching.linearizationPoint.at<double>(X(k))); | ||||
| 
 | ||||
|     isam.update(graph, initial, 3); | ||||
|     // isam.bayesTree().print("\n\n");
 | ||||
| 
 | ||||
|     graph.resize(0); | ||||
|     initial.clear(); | ||||
|  | @ -201,65 +200,6 @@ TEST(HybridEstimation, ISAM) { | |||
|   EXPECT(assert_equal(expected_continuous, result)); | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * @brief A function to get a specific 1D robot motion problem as a linearized | ||||
|  * factor graph. This is the problem P(X|Z, M), i.e. estimating the continuous | ||||
|  * positions given the measurements and discrete sequence. | ||||
|  * | ||||
|  * @param K The number of timesteps. | ||||
|  * @param measurements The vector of measurements for each timestep. | ||||
|  * @param discrete_seq The discrete sequence governing the motion of the robot. | ||||
|  * @param measurement_sigma Noise model sigma for measurements. | ||||
|  * @param between_sigma Noise model sigma for the between factor. | ||||
|  * @return GaussianFactorGraph::shared_ptr | ||||
|  */ | ||||
| GaussianFactorGraph::shared_ptr specificModesFactorGraph( | ||||
|     size_t K, const std::vector<double>& measurements, | ||||
|     const std::vector<size_t>& discrete_seq, double measurement_sigma = 0.1, | ||||
|     double between_sigma = 1.0) { | ||||
|   NonlinearFactorGraph graph; | ||||
|   Values linearizationPoint; | ||||
| 
 | ||||
|   // Add measurement factors
 | ||||
|   auto measurement_noise = noiseModel::Isotropic::Sigma(1, measurement_sigma); | ||||
|   for (size_t k = 0; k < K; k++) { | ||||
|     graph.emplace_shared<PriorFactor<double>>(X(k), measurements.at(k), | ||||
|                                               measurement_noise); | ||||
|     linearizationPoint.insert<double>(X(k), static_cast<double>(k + 1)); | ||||
|   } | ||||
| 
 | ||||
|   using MotionModel = BetweenFactor<double>; | ||||
| 
 | ||||
|   // Add "motion models".
 | ||||
|   auto motion_noise_model = noiseModel::Isotropic::Sigma(1, between_sigma); | ||||
|   for (size_t k = 0; k < K - 1; k++) { | ||||
|     auto motion_model = std::make_shared<MotionModel>( | ||||
|         X(k), X(k + 1), discrete_seq.at(k), motion_noise_model); | ||||
|     graph.push_back(motion_model); | ||||
|   } | ||||
|   GaussianFactorGraph::shared_ptr linear_graph = | ||||
|       graph.linearize(linearizationPoint); | ||||
|   return linear_graph; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * @brief Get the discrete sequence from the integer `x`. | ||||
|  * | ||||
|  * @tparam K Template parameter so we can set the correct bitset size. | ||||
|  * @param x The integer to convert to a discrete binary sequence. | ||||
|  * @return std::vector<size_t> | ||||
|  */ | ||||
| template <size_t K> | ||||
| std::vector<size_t> getDiscreteSequence(size_t x) { | ||||
|   std::bitset<K - 1> seq = x; | ||||
|   std::vector<size_t> discrete_seq(K - 1); | ||||
|   for (size_t i = 0; i < K - 1; i++) { | ||||
|     // Save to discrete vector in reverse order
 | ||||
|     discrete_seq[K - 2 - i] = seq[i]; | ||||
|   } | ||||
|   return discrete_seq; | ||||
| } | ||||
| 
 | ||||
| /**
 | ||||
|  * @brief Helper method to get the tree of | ||||
|  * unnormalized probabilities as per the elimination scheme. | ||||
|  | @ -270,7 +210,7 @@ std::vector<size_t> getDiscreteSequence(size_t x) { | |||
|  * @param graph The HybridGaussianFactorGraph to eliminate. | ||||
|  * @return AlgebraicDecisionTree<Key> | ||||
|  */ | ||||
| AlgebraicDecisionTree<Key> getProbPrimeTree( | ||||
| AlgebraicDecisionTree<Key> GetProbPrimeTree( | ||||
|     const HybridGaussianFactorGraph& graph) { | ||||
|   Ordering continuous(graph.continuousKeySet()); | ||||
|   const auto [bayesNet, remainingGraph] = | ||||
|  | @ -316,8 +256,9 @@ AlgebraicDecisionTree<Key> getProbPrimeTree( | |||
|  * The values should match those of P'(Continuous) for each discrete mode. | ||||
|  ********************************************************************************/ | ||||
| TEST(HybridEstimation, Probability) { | ||||
|   using namespace estimation_fixture; | ||||
| 
 | ||||
|   constexpr size_t K = 4; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2}; | ||||
|   double between_sigma = 1.0, measurement_sigma = 0.1; | ||||
| 
 | ||||
|   // Switching example of robot moving in 1D with
 | ||||
|  | @ -358,8 +299,9 @@ TEST(HybridEstimation, Probability) { | |||
|  * for each discrete mode. | ||||
|  */ | ||||
| TEST(HybridEstimation, ProbabilityMultifrontal) { | ||||
|   using namespace estimation_fixture; | ||||
| 
 | ||||
|   constexpr size_t K = 4; | ||||
|   std::vector<double> measurements = {0, 1, 2, 2}; | ||||
| 
 | ||||
|   double between_sigma = 1.0, measurement_sigma = 0.1; | ||||
| 
 | ||||
|  | @ -370,7 +312,7 @@ TEST(HybridEstimation, ProbabilityMultifrontal) { | |||
|   auto graph = switching.linearizedFactorGraph; | ||||
| 
 | ||||
|   // Get the tree of unnormalized probabilities for each mode sequence.
 | ||||
|   AlgebraicDecisionTree<Key> expected_probPrimeTree = getProbPrimeTree(graph); | ||||
|   AlgebraicDecisionTree<Key> expected_probPrimeTree = GetProbPrimeTree(graph); | ||||
| 
 | ||||
|   // Eliminate continuous
 | ||||
|   Ordering continuous_ordering(graph.continuousKeySet()); | ||||
|  | @ -451,7 +393,7 @@ static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph() { | |||
| /*********************************************************************************
 | ||||
|   // Create a hybrid linear factor graph f(x0, x1, m0; z0, z1)
 | ||||
|  ********************************************************************************/ | ||||
| static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { | ||||
| static HybridGaussianFactorGraph::shared_ptr CreateHybridGaussianFactorGraph() { | ||||
|   HybridNonlinearFactorGraph nfg = createHybridNonlinearFactorGraph(); | ||||
| 
 | ||||
|   Values initial; | ||||
|  | @ -466,7 +408,7 @@ static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph() { | |||
|  ********************************************************************************/ | ||||
| TEST(HybridEstimation, eliminateSequentialRegression) { | ||||
|   // Create the factor graph from the nonlinear factor graph.
 | ||||
|   HybridGaussianFactorGraph::shared_ptr fg = createHybridGaussianFactorGraph(); | ||||
|   HybridGaussianFactorGraph::shared_ptr fg = CreateHybridGaussianFactorGraph(); | ||||
| 
 | ||||
|   // Create expected discrete conditional on m0.
 | ||||
|   DiscreteKey m(M(0), 2); | ||||
|  | @ -501,7 +443,7 @@ TEST(HybridEstimation, eliminateSequentialRegression) { | |||
|  ********************************************************************************/ | ||||
| TEST(HybridEstimation, CorrectnessViaSampling) { | ||||
|   // 1. Create the factor graph from the nonlinear factor graph.
 | ||||
|   const auto fg = createHybridGaussianFactorGraph(); | ||||
|   const auto fg = CreateHybridGaussianFactorGraph(); | ||||
| 
 | ||||
|   // 2. Eliminate into BN
 | ||||
|   const HybridBayesNet::shared_ptr bn = fg->eliminateSequential(); | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue