diff --git a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp index 67cec8319..fa25407ff 100644 --- a/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp +++ b/gtsam/hybrid/tests/testHybridNonlinearISAM.cpp @@ -265,16 +265,12 @@ TEST(HybridNonlinearISAM, ApproxInference) { 1 1 1 Leaf 0.5 */ - auto discreteConditional_m0 = *dynamic_pointer_cast( + auto discreteConditional_m0 = *dynamic_pointer_cast( bayesTree[M(0)]->conditional()->inner()); EXPECT(discreteConditional_m0.keys() == KeyVector({M(0), M(1), M(2)})); - // Get the number of elements which are greater than 0. - auto count = [](const double &value, int count) { - return value > 0 ? count + 1 : count; - }; // Check that the number of leaves after pruning is 5. - EXPECT_LONGS_EQUAL(5, discreteConditional_m0.fold(count, 0)); + EXPECT_LONGS_EQUAL(5, discreteConditional_m0.nrValues()); // Check that the hybrid nodes of the bayes net match those of the pre-pruning // bayes net, at the same positions. @@ -520,12 +516,13 @@ TEST(HybridNonlinearISAM, NonTrivial) { // The final discrete graph should not be empty since we have eliminated // all continuous variables. - auto discreteTree = bayesTree[M(3)]->conditional()->asDiscrete(); + auto discreteTree = + bayesTree[M(3)]->conditional()->asDiscrete(); EXPECT_LONGS_EQUAL(3, discreteTree->size()); // Test if the optimal discrete mode assignment is (1, 1, 1). DiscreteFactorGraph discreteGraph; - discreteGraph.push_back(discreteTree); + discreteGraph.push_back(discreteTree->toDecisionTreeFactor()); DiscreteValues optimal_assignment = discreteGraph.optimize(); DiscreteValues expected_assignment;