Get rid of all (pre c++11) += calls to create Orderings.
							parent
							
								
									a638f3a991
								
							
						
					
					
						commit
						285cbe4f22
					
				|  | @ -51,8 +51,7 @@ int main(int argc, char **argv) { | |||
|   DiscreteFactorGraph fg(asia); | ||||
| 
 | ||||
|   // Create solver and eliminate
 | ||||
|   Ordering ordering; | ||||
|   ordering += Key(0), Key(1), Key(2), Key(3), Key(4), Key(5), Key(6), Key(7); | ||||
|   const Ordering ordering{0, 1, 2, 3, 4, 5, 6, 7}; | ||||
| 
 | ||||
|   // solve
 | ||||
|   auto mpe = fg.optimize(); | ||||
|  |  | |||
|  | @ -94,8 +94,7 @@ TEST(DiscreteBayesNet, Asia) { | |||
|   EXPECT(assert_equal(vs, marginals.marginalProbabilities(Smoking))); | ||||
| 
 | ||||
|   // Create solver and eliminate
 | ||||
|   Ordering ordering; | ||||
|   ordering += Key(0), Key(1), Key(2), Key(3), Key(4), Key(5), Key(6), Key(7); | ||||
|   const Ordering ordering{0, 1, 2, 3, 4, 5, 6, 7}; | ||||
|   DiscreteBayesNet::shared_ptr chordal = fg.eliminateSequential(ordering); | ||||
|   DiscreteConditional expected2(Bronchitis % "11/9"); | ||||
|   EXPECT(assert_equal(expected2, *chordal->back())); | ||||
|  |  | |||
|  | @ -107,8 +107,7 @@ TEST(DiscreteFactorGraph, test) { | |||
|   graph.add(C & B, "3 1 1 3"); | ||||
| 
 | ||||
|   // Test EliminateDiscrete
 | ||||
|   Ordering frontalKeys; | ||||
|   frontalKeys += Key(0); | ||||
|   const Ordering frontalKeys{0}; | ||||
|   const auto [conditional, newFactor] = EliminateDiscrete(graph, frontalKeys); | ||||
| 
 | ||||
|   // Check Conditional
 | ||||
|  | @ -123,8 +122,7 @@ TEST(DiscreteFactorGraph, test) { | |||
|   EXPECT(assert_equal(expectedFactor, *newFactor)); | ||||
| 
 | ||||
|   // Test using elimination tree
 | ||||
|   Ordering ordering; | ||||
|   ordering += Key(0), Key(1), Key(2); | ||||
|   const Ordering ordering{0, 1, 2}; | ||||
|   DiscreteEliminationTree etree(graph, ordering); | ||||
|   const auto [actual, remainingGraph] = etree.eliminate(&EliminateDiscrete); | ||||
| 
 | ||||
|  | @ -231,8 +229,7 @@ TEST(DiscreteFactorGraph, testMPE_Darwiche09book_p244) { | |||
|   EXPECT(assert_equal(mpe, actualMPE)); | ||||
| 
 | ||||
|   // Check Bayes Net
 | ||||
|   Ordering ordering; | ||||
|   ordering += Key(0), Key(1), Key(2), Key(3), Key(4); | ||||
|   const Ordering ordering{0, 1, 2, 3, 4}; | ||||
|   auto chordal = graph.eliminateSequential(ordering); | ||||
|   EXPECT_LONGS_EQUAL(5, chordal->size()); | ||||
| 
 | ||||
|  |  | |||
|  | @ -91,7 +91,7 @@ TEST(SimpleParser, GibberishInMiddle) { | |||
| 
 | ||||
| // A test with slash in the end
 | ||||
| TEST(SimpleParser, SlashInEnd) { | ||||
|   const auto table = SignatureParser::parse("1/1 2/"); | ||||
|   const auto table = SignatureParser::Parse("1/1 2/"); | ||||
|   EXPECT(!table); | ||||
| } | ||||
| 
 | ||||
|  |  | |||
|  | @ -97,7 +97,7 @@ TEST(HybridBayesTree, OptimizeAssignment) { | |||
| 
 | ||||
|   // Create ordering.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t k = 0; k < s.K; k++) ordering += X(k); | ||||
|   for (size_t k = 0; k < s.K; k++) ordering.push_back(X(k)); | ||||
| 
 | ||||
|   const auto [hybridBayesNet, remainingFactorGraph] = | ||||
|       s.linearizedFactorGraph.eliminatePartialSequential(ordering); | ||||
|  | @ -139,7 +139,7 @@ TEST(HybridBayesTree, Optimize) { | |||
| 
 | ||||
|   // Create ordering.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t k = 0; k < s.K; k++) ordering += X(k); | ||||
|   for (size_t k = 0; k < s.K; k++) ordering.push_back(X(k)); | ||||
| 
 | ||||
|   const auto [hybridBayesNet, remainingFactorGraph] = | ||||
|       s.linearizedFactorGraph.eliminatePartialSequential(ordering); | ||||
|  |  | |||
|  | @ -83,10 +83,10 @@ TEST(HybridEstimation, Full) { | |||
| 
 | ||||
|   Ordering hybridOrdering; | ||||
|   for (size_t k = 0; k < K; k++) { | ||||
|     hybridOrdering += X(k); | ||||
|     hybridOrdering.push_back(X(k)); | ||||
|   } | ||||
|   for (size_t k = 0; k < K - 1; k++) { | ||||
|     hybridOrdering += M(k); | ||||
|     hybridOrdering.push_back(M(k)); | ||||
|   } | ||||
| 
 | ||||
|   HybridBayesNet::shared_ptr bayesNet = | ||||
|  | @ -442,8 +442,7 @@ TEST(HybridEstimation, eliminateSequentialRegression) { | |||
|   DiscreteConditional expected(m % "0.51341712/1");  // regression
 | ||||
| 
 | ||||
|   // Eliminate into BN using one ordering
 | ||||
|   Ordering ordering1; | ||||
|   ordering1 += X(0), X(1), M(0); | ||||
|   const Ordering ordering1{X(0), X(1), M(0)}; | ||||
|   HybridBayesNet::shared_ptr bn1 = fg->eliminateSequential(ordering1); | ||||
| 
 | ||||
|   // Check that the discrete conditional matches the expected.
 | ||||
|  | @ -451,8 +450,7 @@ TEST(HybridEstimation, eliminateSequentialRegression) { | |||
|   EXPECT(assert_equal(expected, *dc1, 1e-9)); | ||||
| 
 | ||||
|   // Eliminate into BN using a different ordering
 | ||||
|   Ordering ordering2; | ||||
|   ordering2 += X(0), X(1), M(0); | ||||
|   const Ordering ordering2{X(0), X(1), M(0)}; | ||||
|   HybridBayesNet::shared_ptr bn2 = fg->eliminateSequential(ordering2); | ||||
| 
 | ||||
|   // Check that the discrete conditional matches the expected.
 | ||||
|  |  | |||
|  | @ -126,10 +126,7 @@ TEST(HybridGaussianElimination, IncrementalInference) { | |||
| 
 | ||||
|   /********************************************************/ | ||||
|   // Run batch elimination so we can compare results.
 | ||||
|   Ordering ordering; | ||||
|   ordering += X(0); | ||||
|   ordering += X(1); | ||||
|   ordering += X(2); | ||||
|   const Ordering ordering {X(0), X(1), X(2)}; | ||||
| 
 | ||||
|   // Now we calculate the expected factors using full elimination
 | ||||
|   const auto [expectedHybridBayesTree, expectedRemainingGraph] = | ||||
|  | @ -158,12 +155,10 @@ TEST(HybridGaussianElimination, IncrementalInference) { | |||
| 
 | ||||
|   // We only perform manual continuous elimination for 0,0.
 | ||||
|   // The other discrete probabilities on M(2) are calculated the same way
 | ||||
|   Ordering discrete_ordering; | ||||
|   discrete_ordering += M(0); | ||||
|   discrete_ordering += M(1); | ||||
|   const Ordering discreteOrdering{M(0), M(1)}; | ||||
|   HybridBayesTree::shared_ptr discreteBayesTree = | ||||
|       expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( | ||||
|           discrete_ordering); | ||||
|           discreteOrdering); | ||||
| 
 | ||||
|   DiscreteValues m00; | ||||
|   m00[M(0)] = 0, m00[M(1)] = 0; | ||||
|  | @ -225,7 +220,7 @@ TEST(HybridGaussianElimination, Approx_inference) { | |||
|   // Create ordering.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t j = 0; j < 4; j++) { | ||||
|     ordering += X(j); | ||||
|     ordering.push_back(X(j)); | ||||
|   } | ||||
| 
 | ||||
|   // Now we calculate the actual factors using full elimination
 | ||||
|  |  | |||
|  | @ -265,7 +265,7 @@ TEST(HybridFactorGraph, EliminationTree) { | |||
| 
 | ||||
|   // Create ordering.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t k = 0; k < self.K; k++) ordering += X(k); | ||||
|   for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k)); | ||||
| 
 | ||||
|   // Create elimination tree.
 | ||||
|   HybridEliminationTree etree(self.linearizedFactorGraph, ordering); | ||||
|  | @ -286,8 +286,7 @@ TEST(GaussianElimination, Eliminate_x0) { | |||
|   factors.push_back(self.linearizedFactorGraph[1]); | ||||
| 
 | ||||
|   // Eliminate x0
 | ||||
|   Ordering ordering; | ||||
|   ordering += X(0); | ||||
|   const Ordering ordering{X(0)}; | ||||
| 
 | ||||
|   auto result = EliminateHybrid(factors, ordering); | ||||
|   CHECK(result.first); | ||||
|  | @ -310,8 +309,7 @@ TEST(HybridsGaussianElimination, Eliminate_x1) { | |||
|   factors.push_back(self.linearizedFactorGraph[2]);  // involves m1
 | ||||
| 
 | ||||
|   // Eliminate x1
 | ||||
|   Ordering ordering; | ||||
|   ordering += X(1); | ||||
|   const Ordering ordering{X(1)}; | ||||
| 
 | ||||
|   auto result = EliminateHybrid(factors, ordering); | ||||
|   CHECK(result.first); | ||||
|  | @ -346,9 +344,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) { | |||
|   auto factors = self.linearizedFactorGraph; | ||||
| 
 | ||||
|   // Eliminate x0
 | ||||
|   Ordering ordering; | ||||
|   ordering += X(0); | ||||
|   ordering += X(1); | ||||
|   const Ordering ordering{X(0), X(1)}; | ||||
| 
 | ||||
|   const auto [hybridConditionalMixture, factorOnModes] = | ||||
|       EliminateHybrid(factors, ordering); | ||||
|  | @ -379,7 +375,7 @@ TEST(HybridFactorGraph, Partial_Elimination) { | |||
| 
 | ||||
|   // Create ordering of only continuous variables.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t k = 0; k < self.K; k++) ordering += X(k); | ||||
|   for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k)); | ||||
| 
 | ||||
|   // Eliminate partially i.e. only continuous part.
 | ||||
|   const auto [hybridBayesNet, remainingFactorGraph] = | ||||
|  | @ -415,7 +411,7 @@ TEST(HybridFactorGraph, Full_Elimination) { | |||
|   { | ||||
|     // Create ordering.
 | ||||
|     Ordering ordering; | ||||
|     for (size_t k = 0; k < self.K; k++) ordering += X(k); | ||||
|     for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k)); | ||||
| 
 | ||||
|     // Eliminate partially.
 | ||||
|     const auto [hybridBayesNet_partial, remainingFactorGraph_partial] = | ||||
|  | @ -430,15 +426,15 @@ TEST(HybridFactorGraph, Full_Elimination) { | |||
|     } | ||||
| 
 | ||||
|     ordering.clear(); | ||||
|     for (size_t k = 0; k < self.K - 1; k++) ordering += M(k); | ||||
|     for (size_t k = 0; k < self.K - 1; k++) ordering.push_back(M(k)); | ||||
|     discreteBayesNet = | ||||
|         *discrete_fg.eliminateSequential(ordering, EliminateDiscrete); | ||||
|   } | ||||
| 
 | ||||
|   // Create ordering.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t k = 0; k < self.K; k++) ordering += X(k); | ||||
|   for (size_t k = 0; k < self.K - 1; k++) ordering += M(k); | ||||
|   for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k)); | ||||
|   for (size_t k = 0; k < self.K - 1; k++) ordering.push_back(M(k)); | ||||
| 
 | ||||
|   // Eliminate partially.
 | ||||
|   HybridBayesNet::shared_ptr hybridBayesNet = | ||||
|  | @ -479,7 +475,7 @@ TEST(HybridFactorGraph, Printing) { | |||
| 
 | ||||
|   // Create ordering.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t k = 0; k < self.K; k++) ordering += X(k); | ||||
|   for (size_t k = 0; k < self.K; k++) ordering.push_back(X(k)); | ||||
| 
 | ||||
|   // Eliminate partially.
 | ||||
|   const auto [hybridBayesNet, remainingFactorGraph] = | ||||
|  | @ -705,11 +701,7 @@ TEST(HybridFactorGraph, DefaultDecisionTree) { | |||
|   initialEstimate.insert(L(1), Point2(4.1, 1.8)); | ||||
| 
 | ||||
|   // We want to eliminate variables not connected to DCFactors first.
 | ||||
|   Ordering ordering; | ||||
|   ordering += L(0); | ||||
|   ordering += L(1); | ||||
|   ordering += X(0); | ||||
|   ordering += X(1); | ||||
|   const Ordering ordering {L(0), L(1), X(0), X(1)}; | ||||
| 
 | ||||
|   HybridGaussianFactorGraph linearized = *fg.linearize(initialEstimate); | ||||
| 
 | ||||
|  |  | |||
|  | @ -143,10 +143,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
| 
 | ||||
|   /********************************************************/ | ||||
|   // Run batch elimination so we can compare results.
 | ||||
|   Ordering ordering; | ||||
|   ordering += X(0); | ||||
|   ordering += X(1); | ||||
|   ordering += X(2); | ||||
|   const Ordering ordering {X(0), X(1), X(2)}; | ||||
| 
 | ||||
|   // Now we calculate the actual factors using full elimination
 | ||||
|   const auto [expectedHybridBayesTree, expectedRemainingGraph] = | ||||
|  | @ -176,12 +173,10 @@ TEST(HybridNonlinearISAM, IncrementalInference) { | |||
| 
 | ||||
|   // We only perform manual continuous elimination for 0,0.
 | ||||
|   // The other discrete probabilities on M(1) are calculated the same way
 | ||||
|   Ordering discrete_ordering; | ||||
|   discrete_ordering += M(0); | ||||
|   discrete_ordering += M(1); | ||||
|   const Ordering discreteOrdering{M(0), M(1)}; | ||||
|   HybridBayesTree::shared_ptr discreteBayesTree = | ||||
|       expectedRemainingGraph->BaseEliminateable::eliminateMultifrontal( | ||||
|           discrete_ordering); | ||||
|           discreteOrdering); | ||||
| 
 | ||||
|   DiscreteValues m00; | ||||
|   m00[M(0)] = 0, m00[M(1)] = 0; | ||||
|  | @ -244,7 +239,7 @@ TEST(HybridNonlinearISAM, Approx_inference) { | |||
|   // Create ordering.
 | ||||
|   Ordering ordering; | ||||
|   for (size_t j = 0; j < 4; j++) { | ||||
|     ordering += X(j); | ||||
|     ordering.push_back(X(j)); | ||||
|   } | ||||
| 
 | ||||
|   // Now we calculate the actual factors using full elimination
 | ||||
|  |  | |||
|  | @ -66,15 +66,6 @@ public: | |||
|     return boost::assign::make_list_inserter( | ||||
|         boost::assign_detail::call_push_back<This>(*this))(key); | ||||
|   } | ||||
| #else | ||||
|   /** A simple inserter to insert one key at a time
 | ||||
|    * @param key The key to insert | ||||
|    * @return The ordering | ||||
|    */ | ||||
|   This& operator+=(Key key) { | ||||
|     push_back(key); | ||||
|     return *this; | ||||
|   } | ||||
| #endif | ||||
| 
 | ||||
|   /**
 | ||||
|  |  | |||
|  | @ -211,8 +211,7 @@ TEST(GaussianBayesNet, MonteCarloIntegration) { | |||
| /* ************************************************************************* */ | ||||
| TEST(GaussianBayesNet, ordering) | ||||
| { | ||||
|   Ordering expected; | ||||
|   expected += _x_, _y_; | ||||
|   const Ordering expected{_x_, _y_}; | ||||
|   const auto actual = noisyBayesNet.ordering(); | ||||
|   EXPECT(assert_equal(expected, actual)); | ||||
| } | ||||
|  |  | |||
|  | @ -35,8 +35,7 @@ namespace gtsam { | |||
|     // Compute the marginal on the last key
 | ||||
|     // Solve the linear factor graph, converting it into a linear Bayes Network
 | ||||
|     // P(x0,x1) = P(x0|x1)*P(x1)
 | ||||
|     Ordering lastKeyAsOrdering; | ||||
|     lastKeyAsOrdering += lastKey; | ||||
|     const Ordering lastKeyAsOrdering{lastKey}; | ||||
|     const GaussianConditional::shared_ptr marginal = | ||||
|       linearFactorGraph.marginalMultifrontalBayesNet(lastKeyAsOrdering)->front(); | ||||
| 
 | ||||
|  |  | |||
|  | @ -80,8 +80,7 @@ TEST(SymbolicFactorGraph, eliminatePartialSequential) { | |||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| TEST(SymbolicFactorGraph, eliminateFullMultifrontal) { | ||||
|   Ordering ordering; | ||||
|   ordering += 0, 1, 2, 3; | ||||
|   Ordering ordering{0, 1, 2, 3}; | ||||
|   SymbolicBayesTree actual1 = *simpleChain.eliminateMultifrontal(ordering); | ||||
|   EXPECT(assert_equal(simpleChainBayesTree, actual1)); | ||||
| 
 | ||||
|  | @ -223,8 +222,7 @@ TEST(SymbolicFactorGraph, eliminate_disconnected_graph) { | |||
|   expected.emplace_shared<SymbolicConditional>(3, 4); | ||||
|   expected.emplace_shared<SymbolicConditional>(4); | ||||
| 
 | ||||
|   Ordering order; | ||||
|   order += 0, 1, 2, 3, 4; | ||||
|   const Ordering order{0, 1, 2, 3, 4}; | ||||
|   SymbolicBayesNet actual = *fg.eliminateSequential(order); | ||||
| 
 | ||||
|   EXPECT(assert_equal(expected, actual)); | ||||
|  |  | |||
|  | @ -35,7 +35,7 @@ using namespace std; | |||
|  ****************************************************************************/ | ||||
| TEST( JunctionTree, constructor ) | ||||
| { | ||||
|   Ordering order; order += 0, 1, 2, 3; | ||||
|   const Ordering order{0, 1, 2, 3}; | ||||
| 
 | ||||
|   SymbolicJunctionTree actual(SymbolicEliminationTree(simpleChain, order)); | ||||
| 
 | ||||
|  |  | |||
|  | @ -248,7 +248,7 @@ DiscreteBayesNet::shared_ptr Scheduler::eliminate() const { | |||
|   // TODO: fix this!!
 | ||||
|   size_t maxKey = keys().size(); | ||||
|   Ordering defaultKeyOrdering; | ||||
|   for (size_t i = 0; i < maxKey; ++i) defaultKeyOrdering += Key(i); | ||||
|   for (size_t i = 0; i < maxKey; ++i) defaultKeyOrdering.push_back(i); | ||||
|   DiscreteBayesNet::shared_ptr chordal = | ||||
|       this->eliminateSequential(defaultKeyOrdering); | ||||
|   gttoc(my_eliminate); | ||||
|  |  | |||
|  | @ -173,9 +173,7 @@ TEST(CSP, WesternUS) { | |||
|                      {6, 3}, {7, 2}, {8, 0}, {9, 1}, {10, 0}}; | ||||
| 
 | ||||
|   // Create ordering according to example in ND-CSP.lyx
 | ||||
|   Ordering ordering; | ||||
|   ordering += Key(0), Key(1), Key(2), Key(3), Key(4), Key(5), Key(6), Key(7), | ||||
|       Key(8), Key(9), Key(10); | ||||
|   const Ordering ordering{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; | ||||
| 
 | ||||
|   // Solve using that ordering:
 | ||||
|   auto actualMPE = csp.optimize(ordering); | ||||
|  |  | |||
|  | @ -34,7 +34,7 @@ TEST ( NestedDissection, oneIsland ) | |||
|   fg.addBearingRange(2, 1, Rot2(), 0., bearingRangeNoise); | ||||
|   fg.addPoseConstraint(1, Pose2()); | ||||
| 
 | ||||
|   Ordering ordering; ordering += x1, x2, l1; | ||||
|   const Ordering ordering{x1, x2, l1}; | ||||
| 
 | ||||
|   int numNodeStopPartition = 1e3; | ||||
|   int minNodesPerMap = 1e3; | ||||
|  | @ -61,7 +61,7 @@ TEST ( NestedDissection, TwoIslands ) | |||
|   fg.addOdometry(3, 5, Pose2(), odoNoise); | ||||
|   fg.addPoseConstraint(1, Pose2()); | ||||
|   fg.addPoseConstraint(4, Pose2()); | ||||
|   Ordering ordering; ordering += x1, x2, x3, x4, x5; | ||||
|   const Ordering ordering{x1, x2, x3, x4, x5}; | ||||
| 
 | ||||
|   int numNodeStopPartition = 2; | ||||
|   int minNodesPerMap = 1; | ||||
|  | @ -96,7 +96,7 @@ TEST ( NestedDissection, FourIslands ) | |||
|   fg.addOdometry(3, 5, Pose2(), odoNoise); | ||||
|   fg.addPoseConstraint(1, Pose2()); | ||||
|   fg.addPoseConstraint(4, Pose2()); | ||||
|   Ordering ordering; ordering += x1, x2, x3, x4, x5; | ||||
|   const Ordering ordering{x1, x2, x3, x4, x5}; | ||||
| 
 | ||||
|   int numNodeStopPartition = 2; | ||||
|   int minNodesPerMap = 1; | ||||
|  | @ -144,7 +144,7 @@ TEST ( NestedDissection, weekLinks ) | |||
|   fg.addPoseConstraint(1, Pose2()); | ||||
|   fg.addPoseConstraint(4, Pose2()); | ||||
|   fg.addPoseConstraint(5, Pose2()); | ||||
|   Ordering ordering; ordering += x1, x2, x3, x4, x5, l6; | ||||
|   const Ordering ordering{x1, x2, x3, x4, x5, l6}; | ||||
| 
 | ||||
|   int numNodeStopPartition = 2; | ||||
|   int minNodesPerMap = 1; | ||||
|  | @ -206,7 +206,7 @@ TEST ( NestedDissection, manual_cuts ) | |||
|   fg.addPrior(x0, Pose2(0.1, 0, 0), priorNoise); | ||||
| 
 | ||||
|   // generate ordering
 | ||||
|   Ordering ordering; ordering += x0, x1, x2, l1, l2, l3, l4, l5, l6; | ||||
|   const Ordering ordering{x0, x1, x2, l1, l2, l3, l4, l5, l6}; | ||||
| 
 | ||||
|   // define cuts
 | ||||
|   std::shared_ptr<Cuts> cuts(new Cuts()); | ||||
|  | @ -301,9 +301,9 @@ TEST( NestedDissection, Graph3D) { | |||
|     graph.addMeasurement(3, j, cameras[3].project(points[j-1]).expmap(measurementZeroNoise->sample()), measurementNoise); | ||||
| 
 | ||||
|   // make an easy ordering
 | ||||
|   Ordering ordering; ordering += x0, x1, x2, x3; | ||||
|   const Ordering ordering{x0, x1, x2, x3}; | ||||
|   for (int j=1; j<=24; j++) | ||||
|     ordering += Symbol('l', j); | ||||
|     ordering.push_back(Symbol('l', j)); | ||||
| 
 | ||||
|   // nested dissection
 | ||||
|   const int numNodeStopPartition = 10; | ||||
|  |  | |||
|  | @ -114,8 +114,7 @@ TEST(GaussianBayesTree, balanced_smoother_marginals) { | |||
|   GaussianFactorGraph smoother = createSmoother(7); | ||||
| 
 | ||||
|   // Create the Bayes tree
 | ||||
|   Ordering ordering; | ||||
|   ordering += X(1), X(3), X(5), X(7), X(2), X(6), X(4); | ||||
|   const Ordering ordering{X(1), X(3), X(5), X(7), X(2), X(6), X(4)}; | ||||
|   GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering); | ||||
| 
 | ||||
|   VectorValues actualSolution = bayesTree.optimize(); | ||||
|  | @ -162,8 +161,7 @@ TEST( GaussianBayesTree, balanced_smoother_shortcuts ) | |||
|   GaussianFactorGraph smoother = createSmoother(7); | ||||
| 
 | ||||
|   // Create the Bayes tree
 | ||||
|   Ordering ordering; | ||||
|   ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4); | ||||
|   const Ordering ordering{X(1), X(3), X(5), X(7), X(2), X(6), X(4)}; | ||||
|   GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering); | ||||
| 
 | ||||
|   // Check the conditional P(Root|Root)
 | ||||
|  | @ -194,8 +192,7 @@ TEST( GaussianBayesTree, balanced_smoother_shortcuts ) | |||
| //TEST( BayesTree, balanced_smoother_clique_marginals )
 | ||||
| //{
 | ||||
| //  // Create smoother with 7 nodes
 | ||||
| //  Ordering ordering;
 | ||||
| //  ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4);
 | ||||
| //  const Ordering ordering{X(1),X(3),X(5),X(7),X(2),X(6),X(4)};
 | ||||
| //  GaussianFactorGraph smoother = createSmoother(7, ordering).first;
 | ||||
| //
 | ||||
| //  // Create the Bayes tree
 | ||||
|  | @ -223,8 +220,7 @@ TEST( GaussianBayesTree, balanced_smoother_shortcuts ) | |||
| TEST( GaussianBayesTree, balanced_smoother_joint ) | ||||
| { | ||||
|   // Create smoother with 7 nodes
 | ||||
|   Ordering ordering; | ||||
|   ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4); | ||||
|   const Ordering ordering{X(1), X(3), X(5), X(7), X(2), X(6), X(4)}; | ||||
|   GaussianFactorGraph smoother = createSmoother(7); | ||||
| 
 | ||||
|   // Create the Bayes tree, expected to look like:
 | ||||
|  |  | |||
|  | @ -78,8 +78,7 @@ TEST(GaussianFactorGraph, eliminateOne_x1) { | |||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| TEST(GaussianFactorGraph, eliminateOne_x2) { | ||||
|   Ordering ordering; | ||||
|   ordering += X(2), L(1), X(1); | ||||
|   const Ordering ordering{X(2), L(1), X(1)}; | ||||
|   GaussianFactorGraph fg = createGaussianFactorGraph(); | ||||
|   auto actual = EliminateQR(fg, Ordering{X(2)}).first; | ||||
| 
 | ||||
|  | @ -94,8 +93,7 @@ TEST(GaussianFactorGraph, eliminateOne_x2) { | |||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| TEST(GaussianFactorGraph, eliminateOne_l1) { | ||||
|   Ordering ordering; | ||||
|   ordering += L(1), X(1), X(2); | ||||
|   const Ordering ordering{L(1), X(1), X(2)}; | ||||
|   GaussianFactorGraph fg = createGaussianFactorGraph(); | ||||
|   auto actual = EliminateQR(fg, Ordering{L(1)}).first; | ||||
| 
 | ||||
|  | @ -282,8 +280,7 @@ TEST(GaussianFactorGraph, elimination) { | |||
|   fg.emplace_shared<JacobianFactor>(X(2), Ap, b, sigma); | ||||
| 
 | ||||
|   // Eliminate
 | ||||
|   Ordering ordering; | ||||
|   ordering += X(1), X(2); | ||||
|   const Ordering ordering{X(1), X(2)}; | ||||
|   GaussianBayesNet bayesNet = *fg.eliminateSequential(); | ||||
| 
 | ||||
|   // Check matrix
 | ||||
|  | @ -348,7 +345,6 @@ static SharedDiagonal model = noiseModel::Isotropic::Sigma(2,1); | |||
| /* ************************************************************************* */ | ||||
| TEST(GaussianFactorGraph, replace) | ||||
| { | ||||
|   Ordering ord; ord += X(1),X(2),X(3),X(4),X(5),X(6); | ||||
|   SharedDiagonal noise(noiseModel::Isotropic::Sigma(3, 1.0)); | ||||
| 
 | ||||
|   GaussianFactorGraph::sharedFactor f1(new JacobianFactor( | ||||
|  |  | |||
|  | @ -33,7 +33,7 @@ using symbol_shorthand::L; | |||
| TEST( ISAM, iSAM_smoother ) | ||||
| { | ||||
|   Ordering ordering; | ||||
|   for (int t = 1; t <= 7; t++) ordering += X(t); | ||||
|   for (int t = 1; t <= 7; t++) ordering.push_back(X(t)); | ||||
| 
 | ||||
|   // Create smoother with 7 nodes
 | ||||
|   GaussianFactorGraph smoother = createSmoother(7); | ||||
|  |  | |||
|  | @ -78,20 +78,7 @@ TEST( GaussianJunctionTreeB, constructor2 ) { | |||
|   SymbolicEliminationTree stree(*symbolic, ordering); | ||||
|   GaussianJunctionTree actual(etree); | ||||
| 
 | ||||
|   Ordering o324; | ||||
|   o324 += X(3), X(2), X(4); | ||||
|   Ordering o56; | ||||
|   o56 += X(5), X(6); | ||||
|   Ordering o7; | ||||
|   o7 += X(7); | ||||
|   Ordering o1; | ||||
|   o1 += X(1); | ||||
| 
 | ||||
|   // Can no longer test these:
 | ||||
| //  Ordering sep1;
 | ||||
| //  Ordering sep2; sep2 += X(4);
 | ||||
| //  Ordering sep3; sep3 += X(6);
 | ||||
| //  Ordering sep4; sep4 += X(2);
 | ||||
|   const Ordering o324{X(3), X(2), X(4)}, o56{X(5), X(6)}, o7{X(7)}, o1{X(1)}; | ||||
| 
 | ||||
|   GaussianJunctionTree::sharedNode x324 = actual.roots().front(); | ||||
|   LONGS_EQUAL(2, x324->children.size()); | ||||
|  | @ -228,8 +215,7 @@ TEST(GaussianJunctionTreeB, optimizeMultiFrontal2) { | |||
| //  init.insert(X(0), Pose2());
 | ||||
| //  init.insert(X(1), Pose2(1.0, 0.0, 0.0));
 | ||||
| //
 | ||||
| //  Ordering ordering;
 | ||||
| //  ordering += X(1), X(0);
 | ||||
| //  const Ordering ordering{X(1), X(0)};
 | ||||
| //
 | ||||
| //  GaussianFactorGraph gfg = *fg.linearize(init, ordering);
 | ||||
| //
 | ||||
|  |  | |||
|  | @ -78,13 +78,13 @@ TEST( NonlinearFactorGraph, keys ) | |||
| /* ************************************************************************* */ | ||||
| TEST( NonlinearFactorGraph, GET_ORDERING) | ||||
| { | ||||
|   Ordering expected; expected += L(1), X(2), X(1); // For starting with l1,x1,x2
 | ||||
|   const Ordering expected{L(1), X(2), X(1)};  // For starting with l1,x1,x2
 | ||||
|   NonlinearFactorGraph nlfg = createNonlinearFactorGraph(); | ||||
|   Ordering actual = Ordering::Colamd(nlfg); | ||||
|   EXPECT(assert_equal(expected,actual)); | ||||
| 
 | ||||
|   // Constrained ordering - put x2 at the end
 | ||||
|   Ordering expectedConstrained; expectedConstrained += L(1), X(1), X(2); | ||||
|   const Ordering expectedConstrained{L(1), X(1), X(2)}; | ||||
|   FastMap<Key, int> constraints; | ||||
|   constraints[X(2)] = 1; | ||||
|   Ordering actualConstrained = Ordering::ColamdConstrained(nlfg, constraints); | ||||
|  | @ -198,8 +198,7 @@ TEST(NonlinearFactorGraph, UpdateCholesky) { | |||
|   EXPECT(assert_equal(expected, fg.updateCholesky(initial))); | ||||
| 
 | ||||
|   // solve with Ordering
 | ||||
|   Ordering ordering; | ||||
|   ordering += L(1), X(2), X(1); | ||||
|   const Ordering ordering{L(1), X(2), X(1)}; | ||||
|   EXPECT(assert_equal(expected, fg.updateCholesky(initial, ordering))); | ||||
| 
 | ||||
|   // solve with new method, heavily damped
 | ||||
|  | @ -251,8 +250,7 @@ TEST(testNonlinearFactorGraph, eliminate) { | |||
|   auto linearized = graph.linearize(values); | ||||
| 
 | ||||
|   // Eliminate
 | ||||
|   Ordering ordering; | ||||
|   ordering += 11, 21, 12, 22; | ||||
|   const Ordering ordering{11, 21, 12, 22}; | ||||
|   auto bn = linearized->eliminateSequential(ordering); | ||||
|   EXPECT_LONGS_EQUAL(4, bn->size()); | ||||
| } | ||||
|  |  | |||
|  | @ -217,7 +217,7 @@ BOOST_CLASS_EXPORT_GUID(GenericStereoFactor3D, "gtsam::GenericStereoFactor3D") | |||
| TEST (testSerializationSLAM, smallExample_linear) { | ||||
|   using namespace example; | ||||
| 
 | ||||
|   Ordering ordering; ordering += X(1),X(2),L(1); | ||||
|   const Ordering ordering{X(1), X(2), L(1)}; | ||||
|   EXPECT(equalsObj(ordering)); | ||||
|   EXPECT(equalsXML(ordering)); | ||||
|   EXPECT(equalsBinary(ordering)); | ||||
|  |  | |||
|  | @ -42,11 +42,9 @@ Symbol key(int x, int y) { return symbol_shorthand::X(1000 * x + y); } | |||
| /* ************************************************************************* */ | ||||
| TEST(SubgraphPreconditioner, planarOrdering) { | ||||
|   // Check canonical ordering
 | ||||
|   Ordering expected, ordering = planarOrdering(3); | ||||
|   expected += | ||||
|       key(3, 3), key(2, 3), key(1, 3), | ||||
|       key(3, 2), key(2, 2), key(1, 2), | ||||
|       key(3, 1), key(2, 1), key(1, 1); | ||||
|   Ordering ordering = planarOrdering(3), | ||||
|            expected{key(3, 3), key(2, 3), key(1, 3), key(3, 2), key(2, 2), | ||||
|                     key(1, 2), key(3, 1), key(2, 1), key(1, 1)}; | ||||
|   EXPECT(assert_equal(expected, ordering)); | ||||
| } | ||||
| 
 | ||||
|  |  | |||
|  | @ -115,8 +115,7 @@ int main() | |||
|   cout << ((double)n/seconds) << " calls/second" << endl; | ||||
| 
 | ||||
|   // time matrix_augmented
 | ||||
| //  Ordering ordering;
 | ||||
| //  ordering += _x2_, _l1_, _x1_;
 | ||||
| //  const Ordering ordering{_x2_, _l1_, _x1_};
 | ||||
| //  size_t n1 = 10000000;
 | ||||
| //  timeLog = clock();
 | ||||
| //
 | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue