Merge pull request #1309 from borglab/varun/pruning-fix
commit
256c664aaa
|
@ -129,7 +129,6 @@ void GaussianMixture::print(const std::string &s,
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/// Return the DiscreteKey vector as a set.
|
||||
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &dkeys) {
|
||||
std::set<DiscreteKey> s;
|
||||
s.insert(dkeys.begin(), dkeys.end());
|
||||
|
|
|
@ -163,6 +163,9 @@ class GTSAM_EXPORT GaussianMixture
|
|||
Sum add(const Sum &sum) const;
|
||||
};
|
||||
|
||||
/// Return the DiscreteKey vector as a set.
|
||||
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &dkeys);
|
||||
|
||||
// traits
|
||||
template <>
|
||||
struct traits<GaussianMixture> : public Testable<GaussianMixture> {};
|
||||
|
|
|
@ -42,13 +42,100 @@ DecisionTreeFactor::shared_ptr HybridBayesNet::discreteConditionals() const {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const {
|
||||
/**
|
||||
* @brief Helper function to get the pruner functional.
|
||||
*
|
||||
* @param decisionTree The probability decision tree of only discrete keys.
|
||||
* @return std::function<GaussianConditional::shared_ptr(
|
||||
* const Assignment<Key> &, const GaussianConditional::shared_ptr &)>
|
||||
*/
|
||||
std::function<double(const Assignment<Key> &, double)> prunerFunc(
|
||||
const DecisionTreeFactor &decisionTree,
|
||||
const HybridConditional &conditional) {
|
||||
// Get the discrete keys as sets for the decision tree
|
||||
// and the gaussian mixture.
|
||||
auto decisionTreeKeySet = DiscreteKeysAsSet(decisionTree.discreteKeys());
|
||||
auto conditionalKeySet = DiscreteKeysAsSet(conditional.discreteKeys());
|
||||
|
||||
auto pruner = [decisionTree, decisionTreeKeySet, conditionalKeySet](
|
||||
const Assignment<Key> &choices,
|
||||
double probability) -> double {
|
||||
// typecast so we can use this to get probability value
|
||||
DiscreteValues values(choices);
|
||||
// Case where the gaussian mixture has the same
|
||||
// discrete keys as the decision tree.
|
||||
if (conditionalKeySet == decisionTreeKeySet) {
|
||||
if (decisionTree(values) == 0) {
|
||||
return 0.0;
|
||||
} else {
|
||||
return probability;
|
||||
}
|
||||
} else {
|
||||
std::vector<DiscreteKey> set_diff;
|
||||
std::set_difference(decisionTreeKeySet.begin(), decisionTreeKeySet.end(),
|
||||
conditionalKeySet.begin(), conditionalKeySet.end(),
|
||||
std::back_inserter(set_diff));
|
||||
|
||||
const std::vector<DiscreteValues> assignments =
|
||||
DiscreteValues::CartesianProduct(set_diff);
|
||||
for (const DiscreteValues &assignment : assignments) {
|
||||
DiscreteValues augmented_values(values);
|
||||
augmented_values.insert(assignment.begin(), assignment.end());
|
||||
|
||||
// If any one of the sub-branches are non-zero,
|
||||
// we need this probability.
|
||||
if (decisionTree(augmented_values) > 0.0) {
|
||||
return probability;
|
||||
}
|
||||
}
|
||||
// If we are here, it means that all the sub-branches are 0,
|
||||
// so we prune.
|
||||
return 0.0;
|
||||
}
|
||||
};
|
||||
return pruner;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HybridBayesNet::updateDiscreteConditionals(
|
||||
const DecisionTreeFactor::shared_ptr &prunedDecisionTree) {
|
||||
KeyVector prunedTreeKeys = prunedDecisionTree->keys();
|
||||
|
||||
for (size_t i = 0; i < this->size(); i++) {
|
||||
HybridConditional::shared_ptr conditional = this->at(i);
|
||||
if (conditional->isDiscrete()) {
|
||||
// std::cout << demangle(typeid(conditional).name()) << std::endl;
|
||||
auto discrete = conditional->asDiscreteConditional();
|
||||
KeyVector frontals(discrete->frontals().begin(),
|
||||
discrete->frontals().end());
|
||||
|
||||
// Apply prunerFunc to the underlying AlgebraicDecisionTree
|
||||
auto discreteTree =
|
||||
boost::dynamic_pointer_cast<DecisionTreeFactor::ADT>(discrete);
|
||||
DecisionTreeFactor::ADT prunedDiscreteTree =
|
||||
discreteTree->apply(prunerFunc(*prunedDecisionTree, *conditional));
|
||||
|
||||
// Create the new (hybrid) conditional
|
||||
auto prunedDiscrete = boost::make_shared<DiscreteLookupTable>(
|
||||
frontals.size(), conditional->discreteKeys(), prunedDiscreteTree);
|
||||
conditional = boost::make_shared<HybridConditional>(prunedDiscrete);
|
||||
|
||||
// Add it back to the BayesNet
|
||||
this->at(i) = conditional;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
|
||||
// Get the decision tree of only the discrete keys
|
||||
auto discreteConditionals = this->discreteConditionals();
|
||||
const DecisionTreeFactor::shared_ptr discreteFactor =
|
||||
const DecisionTreeFactor::shared_ptr decisionTree =
|
||||
boost::make_shared<DecisionTreeFactor>(
|
||||
discreteConditionals->prune(maxNrLeaves));
|
||||
|
||||
this->updateDiscreteConditionals(decisionTree);
|
||||
|
||||
/* To Prune, we visitWith every leaf in the GaussianMixture.
|
||||
* For each leaf, using the assignment we can check the discrete decision tree
|
||||
* for 0.0 probability, then just set the leaf to a nullptr.
|
||||
|
@ -59,7 +146,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const {
|
|||
HybridBayesNet prunedBayesNetFragment;
|
||||
|
||||
// Go through all the conditionals in the
|
||||
// Bayes Net and prune them as per discreteFactor.
|
||||
// Bayes Net and prune them as per decisionTree.
|
||||
for (size_t i = 0; i < this->size(); i++) {
|
||||
HybridConditional::shared_ptr conditional = this->at(i);
|
||||
|
||||
|
@ -69,7 +156,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const {
|
|||
// Make a copy of the gaussian mixture and prune it!
|
||||
auto prunedGaussianMixture =
|
||||
boost::make_shared<GaussianMixture>(*gaussianMixture);
|
||||
prunedGaussianMixture->prune(*discreteFactor);
|
||||
prunedGaussianMixture->prune(*decisionTree);
|
||||
|
||||
// Type-erase and add to the pruned Bayes Net fragment.
|
||||
prunedBayesNetFragment.push_back(
|
||||
|
|
|
@ -113,7 +113,6 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
|
|||
*/
|
||||
VectorValues optimize(const DiscreteValues &assignment) const;
|
||||
|
||||
protected:
|
||||
/**
|
||||
* @brief Get all the discrete conditionals as a decision tree factor.
|
||||
*
|
||||
|
@ -123,11 +122,19 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
|
|||
|
||||
public:
|
||||
/// Prune the Hybrid Bayes Net such that we have at most maxNrLeaves leaves.
|
||||
HybridBayesNet prune(size_t maxNrLeaves) const;
|
||||
HybridBayesNet prune(size_t maxNrLeaves);
|
||||
|
||||
/// @}
|
||||
|
||||
private:
|
||||
/**
|
||||
* @brief Update the discrete conditionals with the pruned versions.
|
||||
*
|
||||
* @param prunedDecisionTree
|
||||
*/
|
||||
void updateDiscreteConditionals(
|
||||
const DecisionTreeFactor::shared_ptr &prunedDecisionTree);
|
||||
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
template <class ARCHIVE>
|
||||
|
|
|
@ -146,8 +146,8 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
void HybridBayesTree::prune(const size_t maxNrLeaves) {
|
||||
auto decisionTree = boost::dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
this->roots_.at(0)->conditional()->inner());
|
||||
auto decisionTree =
|
||||
this->roots_.at(0)->conditional()->asDiscreteConditional();
|
||||
|
||||
DecisionTreeFactor prunedDecisionTree = decisionTree->prune(maxNrLeaves);
|
||||
decisionTree->root_ = prunedDecisionTree.root_;
|
||||
|
|
|
@ -0,0 +1,112 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file HybridSmoother.cpp
|
||||
* @brief An incremental smoother for hybrid factor graphs
|
||||
* @author Varun Agrawal
|
||||
* @date October 2022
|
||||
*/
|
||||
|
||||
#include <gtsam/hybrid/HybridSmoother.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <unordered_set>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HybridSmoother::update(HybridGaussianFactorGraph graph,
|
||||
const Ordering &ordering,
|
||||
boost::optional<size_t> maxNrLeaves) {
|
||||
// Add the necessary conditionals from the previous timestep(s).
|
||||
std::tie(graph, hybridBayesNet_) =
|
||||
addConditionals(graph, hybridBayesNet_, ordering);
|
||||
|
||||
// Eliminate.
|
||||
auto bayesNetFragment = graph.eliminateSequential(ordering);
|
||||
|
||||
/// Prune
|
||||
if (maxNrLeaves) {
|
||||
// `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
|
||||
// all the conditionals with the same keys in bayesNetFragment.
|
||||
HybridBayesNet prunedBayesNetFragment =
|
||||
bayesNetFragment->prune(*maxNrLeaves);
|
||||
// Set the bayes net fragment to the pruned version
|
||||
bayesNetFragment =
|
||||
boost::make_shared<HybridBayesNet>(prunedBayesNetFragment);
|
||||
}
|
||||
|
||||
// Add the partial bayes net to the posterior bayes net.
|
||||
hybridBayesNet_.push_back<HybridBayesNet>(*bayesNetFragment);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::pair<HybridGaussianFactorGraph, HybridBayesNet>
|
||||
HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
|
||||
const HybridBayesNet &originalHybridBayesNet,
|
||||
const Ordering &ordering) const {
|
||||
HybridGaussianFactorGraph graph(originalGraph);
|
||||
HybridBayesNet hybridBayesNet(originalHybridBayesNet);
|
||||
|
||||
// If we are not at the first iteration, means we have conditionals to add.
|
||||
if (!hybridBayesNet.empty()) {
|
||||
// We add all relevant conditional mixtures on the last continuous variable
|
||||
// in the previous `hybridBayesNet` to the graph
|
||||
|
||||
// Conditionals to remove from the bayes net
|
||||
// since the conditional will be updated.
|
||||
std::vector<HybridConditional::shared_ptr> conditionals_to_erase;
|
||||
|
||||
// New conditionals to add to the graph
|
||||
gtsam::HybridBayesNet newConditionals;
|
||||
|
||||
// NOTE(Varun) Using a for-range loop doesn't work since some of the
|
||||
// conditionals are invalid pointers
|
||||
for (size_t i = 0; i < hybridBayesNet.size(); i++) {
|
||||
auto conditional = hybridBayesNet.at(i);
|
||||
|
||||
for (auto &key : conditional->frontals()) {
|
||||
if (std::find(ordering.begin(), ordering.end(), key) !=
|
||||
ordering.end()) {
|
||||
newConditionals.push_back(conditional);
|
||||
conditionals_to_erase.push_back(conditional);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Remove conditionals at the end so we don't affect the order in the
|
||||
// original bayes net.
|
||||
for (auto &&conditional : conditionals_to_erase) {
|
||||
auto it = find(hybridBayesNet.begin(), hybridBayesNet.end(), conditional);
|
||||
hybridBayesNet.erase(it);
|
||||
}
|
||||
|
||||
graph.push_back(newConditionals);
|
||||
// newConditionals.print("\n\n\nNew Conditionals to add back");
|
||||
}
|
||||
return {graph, hybridBayesNet};
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
GaussianMixture::shared_ptr HybridSmoother::gaussianMixture(
|
||||
size_t index) const {
|
||||
return boost::dynamic_pointer_cast<GaussianMixture>(
|
||||
hybridBayesNet_.at(index));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
const HybridBayesNet &HybridSmoother::hybridBayesNet() const {
|
||||
return hybridBayesNet_;
|
||||
}
|
||||
|
||||
} // namespace gtsam
|
|
@ -0,0 +1,73 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file HybridSmoother.h
|
||||
* @brief An incremental smoother for hybrid factor graphs
|
||||
* @author Varun Agrawal
|
||||
* @date October 2022
|
||||
*/
|
||||
|
||||
#include <gtsam/discrete/DiscreteFactorGraph.h>
|
||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class HybridSmoother {
|
||||
private:
|
||||
HybridBayesNet hybridBayesNet_;
|
||||
HybridGaussianFactorGraph remainingFactorGraph_;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Given new factors, perform an incremental update.
|
||||
* The relevant densities in the `hybridBayesNet` will be added to the input
|
||||
* graph (fragment), and then eliminated according to the `ordering`
|
||||
* presented. The remaining factor graph contains Gaussian mixture factors
|
||||
* that are not connected to the variables in the ordering, or a single
|
||||
* discrete factor on all discrete keys, plus all discrete factors in the
|
||||
* original graph.
|
||||
*
|
||||
* \note If maxComponents is given, we look at the discrete factor resulting
|
||||
* from this elimination, and prune it and the Gaussian components
|
||||
* corresponding to the pruned choices.
|
||||
*
|
||||
* @param graph The new factors, should be linear only
|
||||
* @param ordering The ordering for elimination, only continuous vars are
|
||||
* allowed
|
||||
* @param maxNrLeaves The maximum number of leaves in the new discrete factor,
|
||||
* if applicable
|
||||
*/
|
||||
void update(HybridGaussianFactorGraph graph, const Ordering& ordering,
|
||||
boost::optional<size_t> maxNrLeaves = boost::none);
|
||||
|
||||
/**
|
||||
* @brief Add conditionals from previous timestep as part of liquefication.
|
||||
*
|
||||
* @param graph The new factor graph for the current time step.
|
||||
* @param hybridBayesNet The hybrid bayes net containing all conditionals so
|
||||
* far.
|
||||
* @param ordering The elimination ordering.
|
||||
* @return std::pair<HybridGaussianFactorGraph, HybridBayesNet>
|
||||
*/
|
||||
std::pair<HybridGaussianFactorGraph, HybridBayesNet> addConditionals(
|
||||
const HybridGaussianFactorGraph& graph,
|
||||
const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const;
|
||||
|
||||
/// Get the Gaussian Mixture from the Bayes Net posterior at `index`.
|
||||
GaussianMixture::shared_ptr gaussianMixture(size_t index) const;
|
||||
|
||||
/// Return the Bayes Net posterior.
|
||||
const HybridBayesNet& hybridBayesNet() const;
|
||||
};
|
||||
|
||||
}; // namespace gtsam
|
|
@ -131,21 +131,29 @@ struct Switching {
|
|||
* @param between_sigma The stddev between poses.
|
||||
* @param prior_sigma The stddev on priors (also used for measurements).
|
||||
*/
|
||||
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1)
|
||||
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1,
|
||||
std::vector<double> measurements = {})
|
||||
: K(K) {
|
||||
// Create DiscreteKeys for binary K modes, modes[0] will not be used.
|
||||
for (size_t k = 0; k <= K; k++) {
|
||||
// Create DiscreteKeys for binary K modes.
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
modes.emplace_back(M(k), 2);
|
||||
}
|
||||
|
||||
// If measurements are not provided, we just have the robot moving forward.
|
||||
if (measurements.size() == 0) {
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
measurements.push_back(k);
|
||||
}
|
||||
}
|
||||
|
||||
// Create hybrid factor graph.
|
||||
// Add a prior on X(1).
|
||||
auto prior = boost::make_shared<PriorFactor<double>>(
|
||||
X(1), 0, noiseModel::Isotropic::Sigma(1, prior_sigma));
|
||||
X(0), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma));
|
||||
nonlinearFactorGraph.push_nonlinear(prior);
|
||||
|
||||
// Add "motion models".
|
||||
for (size_t k = 1; k < K; k++) {
|
||||
for (size_t k = 0; k < K - 1; k++) {
|
||||
KeyVector keys = {X(k), X(k + 1)};
|
||||
auto motion_models = motionModels(k, between_sigma);
|
||||
std::vector<NonlinearFactor::shared_ptr> components;
|
||||
|
@ -158,17 +166,17 @@ struct Switching {
|
|||
|
||||
// Add measurement factors
|
||||
auto measurement_noise = noiseModel::Isotropic::Sigma(1, prior_sigma);
|
||||
for (size_t k = 2; k <= K; k++) {
|
||||
for (size_t k = 1; k < K; k++) {
|
||||
nonlinearFactorGraph.emplace_nonlinear<PriorFactor<double>>(
|
||||
X(k), 1.0 * (k - 1), measurement_noise);
|
||||
X(k), measurements.at(k), measurement_noise);
|
||||
}
|
||||
|
||||
// Add "mode chain"
|
||||
addModeChain(&nonlinearFactorGraph);
|
||||
|
||||
// Create the linearization point.
|
||||
for (size_t k = 1; k <= K; k++) {
|
||||
linearizationPoint.insert<double>(X(k), static_cast<double>(k));
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
linearizationPoint.insert<double>(X(k), static_cast<double>(k + 1));
|
||||
}
|
||||
|
||||
// The ground truth is robot moving forward
|
||||
|
@ -187,11 +195,16 @@ struct Switching {
|
|||
return {still, moving};
|
||||
}
|
||||
|
||||
// Add "mode chain" to HybridNonlinearFactorGraph
|
||||
/**
|
||||
* @brief Add "mode chain" to HybridNonlinearFactorGraph from M(0) to M(K-2).
|
||||
* E.g. if K=4, we want M0, M1 and M2.
|
||||
*
|
||||
* @param fg The nonlinear factor graph to which the mode chain is added.
|
||||
*/
|
||||
void addModeChain(HybridNonlinearFactorGraph *fg) {
|
||||
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
|
||||
auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1");
|
||||
fg->push_discrete(prior);
|
||||
for (size_t k = 1; k < K - 1; k++) {
|
||||
for (size_t k = 0; k < K - 2; k++) {
|
||||
auto parents = {modes[k]};
|
||||
auto conditional = boost::make_shared<DiscreteConditional>(
|
||||
modes[k + 1], parents, "1/2 3/2");
|
||||
|
@ -199,11 +212,16 @@ struct Switching {
|
|||
}
|
||||
}
|
||||
|
||||
// Add "mode chain" to HybridGaussianFactorGraph
|
||||
/**
|
||||
* @brief Add "mode chain" to HybridGaussianFactorGraph from M(0) to M(K-2).
|
||||
* E.g. if K=4, we want M0, M1 and M2.
|
||||
*
|
||||
* @param fg The gaussian factor graph to which the mode chain is added.
|
||||
*/
|
||||
void addModeChain(HybridGaussianFactorGraph *fg) {
|
||||
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
|
||||
auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1");
|
||||
fg->push_discrete(prior);
|
||||
for (size_t k = 1; k < K - 1; k++) {
|
||||
for (size_t k = 0; k < K - 2; k++) {
|
||||
auto parents = {modes[k]};
|
||||
auto conditional = boost::make_shared<DiscreteConditional>(
|
||||
modes[k + 1], parents, "1/2 3/2");
|
||||
|
|
|
@ -82,9 +82,9 @@ TEST(HybridBayesNet, Choose) {
|
|||
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
|
||||
|
||||
DiscreteValues assignment;
|
||||
assignment[M(0)] = 1;
|
||||
assignment[M(1)] = 1;
|
||||
assignment[M(2)] = 1;
|
||||
assignment[M(3)] = 0;
|
||||
assignment[M(2)] = 0;
|
||||
|
||||
GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
|
||||
|
||||
|
@ -120,20 +120,20 @@ TEST(HybridBayesNet, OptimizeAssignment) {
|
|||
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
|
||||
|
||||
DiscreteValues assignment;
|
||||
assignment[M(0)] = 1;
|
||||
assignment[M(1)] = 1;
|
||||
assignment[M(2)] = 1;
|
||||
assignment[M(3)] = 1;
|
||||
|
||||
VectorValues delta = hybridBayesNet->optimize(assignment);
|
||||
|
||||
// The linearization point has the same value as the key index,
|
||||
// e.g. X(1) = 1, X(2) = 2,
|
||||
// e.g. X(0) = 1, X(1) = 2,
|
||||
// but the factors specify X(k) = k-1, so delta should be -1.
|
||||
VectorValues expected_delta;
|
||||
expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
|
||||
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
|
||||
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
|
||||
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
|
||||
expected_delta.insert(make_pair(X(4), -Vector1::Ones()));
|
||||
|
||||
EXPECT(assert_equal(expected_delta, delta));
|
||||
}
|
||||
|
@ -150,16 +150,16 @@ TEST(HybridBayesNet, Optimize) {
|
|||
HybridValues delta = hybridBayesNet->optimize();
|
||||
|
||||
DiscreteValues expectedAssignment;
|
||||
expectedAssignment[M(1)] = 1;
|
||||
expectedAssignment[M(2)] = 0;
|
||||
expectedAssignment[M(3)] = 1;
|
||||
expectedAssignment[M(0)] = 1;
|
||||
expectedAssignment[M(1)] = 0;
|
||||
expectedAssignment[M(2)] = 1;
|
||||
EXPECT(assert_equal(expectedAssignment, delta.discrete()));
|
||||
|
||||
VectorValues expectedValues;
|
||||
expectedValues.insert(X(1), -0.999904 * Vector1::Ones());
|
||||
expectedValues.insert(X(2), -0.99029 * Vector1::Ones());
|
||||
expectedValues.insert(X(3), -1.00971 * Vector1::Ones());
|
||||
expectedValues.insert(X(4), -1.0001 * Vector1::Ones());
|
||||
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
|
||||
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
|
||||
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
|
||||
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());
|
||||
|
||||
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
|
||||
}
|
||||
|
@ -175,10 +175,10 @@ TEST(HybridBayesNet, OptimizeMultifrontal) {
|
|||
HybridValues delta = hybridBayesTree->optimize();
|
||||
|
||||
VectorValues expectedValues;
|
||||
expectedValues.insert(X(1), -0.999904 * Vector1::Ones());
|
||||
expectedValues.insert(X(2), -0.99029 * Vector1::Ones());
|
||||
expectedValues.insert(X(3), -1.00971 * Vector1::Ones());
|
||||
expectedValues.insert(X(4), -1.0001 * Vector1::Ones());
|
||||
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
|
||||
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
|
||||
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
|
||||
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());
|
||||
|
||||
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
|
||||
}
|
||||
|
@ -201,6 +201,55 @@ TEST(HybridBayesNet, Prune) {
|
|||
EXPECT(assert_equal(delta.continuous(), pruned_delta.continuous()));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test bayes net updateDiscreteConditionals
|
||||
TEST(HybridBayesNet, UpdateDiscreteConditionals) {
|
||||
Switching s(4);
|
||||
|
||||
Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
|
||||
HybridBayesNet::shared_ptr hybridBayesNet =
|
||||
s.linearizedFactorGraph.eliminateSequential(hybridOrdering);
|
||||
|
||||
size_t maxNrLeaves = 3;
|
||||
auto discreteConditionals = hybridBayesNet->discreteConditionals();
|
||||
const DecisionTreeFactor::shared_ptr prunedDecisionTree =
|
||||
boost::make_shared<DecisionTreeFactor>(
|
||||
discreteConditionals->prune(maxNrLeaves));
|
||||
|
||||
EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/,
|
||||
prunedDecisionTree->nrLeaves());
|
||||
|
||||
auto original_discrete_conditionals =
|
||||
*(hybridBayesNet->at(4)->asDiscreteConditional());
|
||||
|
||||
// Prune!
|
||||
hybridBayesNet->prune(maxNrLeaves);
|
||||
|
||||
// Functor to verify values against the original_discrete_conditionals
|
||||
auto checker = [&](const Assignment<Key>& assignment,
|
||||
double probability) -> double {
|
||||
// typecast so we can use this to get probability value
|
||||
DiscreteValues choices(assignment);
|
||||
if (prunedDecisionTree->operator()(choices) == 0) {
|
||||
EXPECT_DOUBLES_EQUAL(0.0, probability, 1e-9);
|
||||
} else {
|
||||
EXPECT_DOUBLES_EQUAL(original_discrete_conditionals(choices), probability,
|
||||
1e-9);
|
||||
}
|
||||
return 0.0;
|
||||
};
|
||||
|
||||
// Get the pruned discrete conditionals as an AlgebraicDecisionTree
|
||||
auto pruned_discrete_conditionals =
|
||||
hybridBayesNet->at(4)->asDiscreteConditional();
|
||||
auto discrete_conditional_tree =
|
||||
boost::dynamic_pointer_cast<DecisionTreeFactor::ADT>(
|
||||
pruned_discrete_conditionals);
|
||||
|
||||
// The checker functor verifies the values for us.
|
||||
discrete_conditional_tree->apply(checker);
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test HybridBayesNet serialization.
|
||||
TEST(HybridBayesNet, Serialization) {
|
||||
|
|
|
@ -60,9 +60,9 @@ TEST(HybridBayesTree, OptimizeAssignment) {
|
|||
isam.update(graph1);
|
||||
|
||||
DiscreteValues assignment;
|
||||
assignment[M(0)] = 1;
|
||||
assignment[M(1)] = 1;
|
||||
assignment[M(2)] = 1;
|
||||
assignment[M(3)] = 1;
|
||||
|
||||
VectorValues delta = isam.optimize(assignment);
|
||||
|
||||
|
@ -70,16 +70,16 @@ TEST(HybridBayesTree, OptimizeAssignment) {
|
|||
// e.g. X(1) = 1, X(2) = 2,
|
||||
// but the factors specify X(k) = k-1, so delta should be -1.
|
||||
VectorValues expected_delta;
|
||||
expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
|
||||
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
|
||||
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
|
||||
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
|
||||
expected_delta.insert(make_pair(X(4), -Vector1::Ones()));
|
||||
|
||||
EXPECT(assert_equal(expected_delta, delta));
|
||||
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t k = 1; k <= s.K; k++) ordering += X(k);
|
||||
for (size_t k = 0; k < s.K; k++) ordering += X(k);
|
||||
|
||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||
|
@ -123,7 +123,7 @@ TEST(HybridBayesTree, Optimize) {
|
|||
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t k = 1; k <= s.K; k++) ordering += X(k);
|
||||
for (size_t k = 0; k < s.K; k++) ordering += X(k);
|
||||
|
||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file testHybridEstimation.cpp
|
||||
* @brief Unit tests for end-to-end Hybrid Estimation
|
||||
* @author Varun Agrawal
|
||||
*/
|
||||
|
||||
#include <gtsam/geometry/Pose2.h>
|
||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||
#include <gtsam/hybrid/HybridNonlinearISAM.h>
|
||||
#include <gtsam/hybrid/HybridSmoother.h>
|
||||
#include <gtsam/hybrid/MixtureFactor.h>
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/linear/GaussianBayesNet.h>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/linear/JacobianFactor.h>
|
||||
#include <gtsam/linear/NoiseModel.h>
|
||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||
#include <gtsam/nonlinear/PriorFactor.h>
|
||||
#include <gtsam/slam/BetweenFactor.h>
|
||||
|
||||
// Include for test suite
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
#include "Switching.h"
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
using symbol_shorthand::X;
|
||||
|
||||
Ordering getOrdering(HybridGaussianFactorGraph& factors,
|
||||
const HybridGaussianFactorGraph& newFactors) {
|
||||
factors += newFactors;
|
||||
// Get all the discrete keys from the factors
|
||||
KeySet allDiscrete = factors.discreteKeys();
|
||||
|
||||
// Create KeyVector with continuous keys followed by discrete keys.
|
||||
KeyVector newKeysDiscreteLast;
|
||||
const KeySet newFactorKeys = newFactors.keys();
|
||||
// Insert continuous keys first.
|
||||
for (auto& k : newFactorKeys) {
|
||||
if (!allDiscrete.exists(k)) {
|
||||
newKeysDiscreteLast.push_back(k);
|
||||
}
|
||||
}
|
||||
|
||||
// Insert discrete keys at the end
|
||||
std::copy(allDiscrete.begin(), allDiscrete.end(),
|
||||
std::back_inserter(newKeysDiscreteLast));
|
||||
|
||||
const VariableIndex index(factors);
|
||||
|
||||
// Get an ordering where the new keys are eliminated last
|
||||
Ordering ordering = Ordering::ColamdConstrainedLast(
|
||||
index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()),
|
||||
true);
|
||||
return ordering;
|
||||
}
|
||||
|
||||
/****************************************************************************/
|
||||
// Test approximate inference with an additional pruning step.
|
||||
TEST(HybridNonlinearISAM, Incremental) {
|
||||
size_t K = 10;
|
||||
std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6};
|
||||
// Ground truth discrete seq
|
||||
std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0};
|
||||
Switching switching(K, 1.0, 0.1, measurements);
|
||||
// HybridNonlinearISAM smoother;
|
||||
HybridSmoother smoother;
|
||||
HybridNonlinearFactorGraph graph;
|
||||
Values initial;
|
||||
|
||||
// Add the X(0) prior
|
||||
graph.push_back(switching.nonlinearFactorGraph.at(0));
|
||||
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
|
||||
|
||||
HybridGaussianFactorGraph linearized;
|
||||
HybridGaussianFactorGraph bayesNet;
|
||||
|
||||
for (size_t k = 1; k < K; k++) {
|
||||
std::cout << ">>>>>>>>>>>>>>>>>>> k=" << k << std::endl;
|
||||
// Motion Model
|
||||
graph.push_back(switching.nonlinearFactorGraph.at(k));
|
||||
// Measurement
|
||||
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
|
||||
|
||||
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
|
||||
|
||||
bayesNet = smoother.hybridBayesNet();
|
||||
linearized = *graph.linearize(initial);
|
||||
Ordering ordering = getOrdering(bayesNet, linearized);
|
||||
|
||||
smoother.update(linearized, ordering, 3);
|
||||
graph.resize(0);
|
||||
}
|
||||
HybridValues delta = smoother.hybridBayesNet().optimize();
|
||||
|
||||
Values result = initial.retract(delta.continuous());
|
||||
|
||||
DiscreteValues expected_discrete;
|
||||
for (size_t k = 0; k < K - 1; k++) {
|
||||
expected_discrete[M(k)] = discrete_seq[k];
|
||||
}
|
||||
EXPECT(assert_equal(expected_discrete, delta.discrete()));
|
||||
|
||||
Values expected_continuous;
|
||||
for (size_t k = 0; k < K; k++) {
|
||||
expected_continuous.insert(X(k), measurements[k]);
|
||||
}
|
||||
EXPECT(assert_equal(expected_continuous, result));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
TestResult tr;
|
||||
return TestRegistry::runAllTests(tr);
|
||||
}
|
||||
/* ************************************************************************* */
|
|
@ -531,34 +531,34 @@ TEST(HybridGaussianFactorGraph, Conditionals) {
|
|||
|
||||
hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
|
||||
Ordering ordering;
|
||||
ordering.push_back(X(1));
|
||||
ordering.push_back(X(0));
|
||||
HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering);
|
||||
|
||||
hfg.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
|
||||
hfg.push_back(*bayes_net);
|
||||
hfg.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
|
||||
hfg.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
|
||||
ordering.push_back(X(1));
|
||||
ordering.push_back(X(2));
|
||||
ordering.push_back(X(3));
|
||||
ordering.push_back(M(0));
|
||||
ordering.push_back(M(1));
|
||||
ordering.push_back(M(2));
|
||||
|
||||
bayes_net = hfg.eliminateSequential(ordering);
|
||||
|
||||
HybridValues result = bayes_net->optimize();
|
||||
|
||||
Values expected_continuous;
|
||||
expected_continuous.insert<double>(X(1), 0);
|
||||
expected_continuous.insert<double>(X(2), 1);
|
||||
expected_continuous.insert<double>(X(3), 2);
|
||||
expected_continuous.insert<double>(X(4), 4);
|
||||
expected_continuous.insert<double>(X(0), 0);
|
||||
expected_continuous.insert<double>(X(1), 1);
|
||||
expected_continuous.insert<double>(X(2), 2);
|
||||
expected_continuous.insert<double>(X(3), 4);
|
||||
Values result_continuous =
|
||||
switching.linearizationPoint.retract(result.continuous());
|
||||
EXPECT(assert_equal(expected_continuous, result_continuous));
|
||||
|
||||
DiscreteValues expected_discrete;
|
||||
expected_discrete[M(0)] = 1;
|
||||
expected_discrete[M(1)] = 1;
|
||||
expected_discrete[M(2)] = 1;
|
||||
EXPECT(assert_equal(expected_discrete, result.discrete()));
|
||||
}
|
||||
|
||||
|
|
|
@ -54,40 +54,40 @@ TEST(HybridGaussianElimination, IncrementalElimination) {
|
|||
// Create initial factor graph
|
||||
// * * *
|
||||
// | | |
|
||||
// X1 -*- X2 -*- X3
|
||||
// \*-M1-*/
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
|
||||
// X0 -*- X1 -*- X2
|
||||
// \*-M0-*/
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0)
|
||||
|
||||
// Run update step
|
||||
isam.update(graph1);
|
||||
|
||||
// Check that after update we have 3 hybrid Bayes net nodes:
|
||||
// P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2)
|
||||
// Check that after update we have 2 hybrid Bayes net nodes:
|
||||
// P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1)
|
||||
EXPECT_LONGS_EQUAL(3, isam.size());
|
||||
EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector{X(1)});
|
||||
EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)}));
|
||||
EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
||||
EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
||||
EXPECT(isam[X(0)]->conditional()->frontals() == KeyVector{X(0)});
|
||||
EXPECT(isam[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)}));
|
||||
EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
|
||||
EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)}));
|
||||
|
||||
/********************************************************/
|
||||
// New factor graph for incremental update.
|
||||
HybridGaussianFactorGraph graph2;
|
||||
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2)
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1)
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2)
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1)
|
||||
|
||||
isam.update(graph2);
|
||||
|
||||
// Check that after the second update we have
|
||||
// 1 additional hybrid Bayes net node:
|
||||
// P(X2, X3 | M1, M2)
|
||||
// P(X1, X2 | M0, M1)
|
||||
EXPECT_LONGS_EQUAL(3, isam.size());
|
||||
EXPECT(isam[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
||||
EXPECT(isam[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
||||
EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
|
||||
EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(0), M(1)}));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
|
@ -100,104 +100,104 @@ TEST(HybridGaussianElimination, IncrementalInference) {
|
|||
// Create initial factor graph
|
||||
// * * *
|
||||
// | | |
|
||||
// X1 -*- X2 -*- X3
|
||||
// X0 -*- X1 -*- X2
|
||||
// | |
|
||||
// *-M1 - * - M2
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
|
||||
// *-M0 - * - M1
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0)
|
||||
|
||||
// Run update step
|
||||
isam.update(graph1);
|
||||
|
||||
auto discreteConditional_m1 =
|
||||
isam[M(1)]->conditional()->asDiscreteConditional();
|
||||
EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)}));
|
||||
auto discreteConditional_m0 =
|
||||
isam[M(0)]->conditional()->asDiscreteConditional();
|
||||
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
|
||||
|
||||
/********************************************************/
|
||||
// New factor graph for incremental update.
|
||||
HybridGaussianFactorGraph graph2;
|
||||
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2)
|
||||
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1)
|
||||
|
||||
isam.update(graph2);
|
||||
|
||||
/********************************************************/
|
||||
// Run batch elimination so we can compare results.
|
||||
Ordering ordering;
|
||||
ordering += X(0);
|
||||
ordering += X(1);
|
||||
ordering += X(2);
|
||||
ordering += X(3);
|
||||
|
||||
// Now we calculate the actual factors using full elimination
|
||||
// Now we calculate the expected factors using full elimination
|
||||
HybridBayesTree::shared_ptr expectedHybridBayesTree;
|
||||
HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph;
|
||||
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
|
||||
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
||||
|
||||
// The densities on X(0) should be the same
|
||||
auto x0_conditional =
|
||||
dynamic_pointer_cast<GaussianMixture>(isam[X(0)]->conditional()->inner());
|
||||
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
|
||||
|
||||
// The densities on X(1) should be the same
|
||||
auto x1_conditional =
|
||||
dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner());
|
||||
auto actual_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional));
|
||||
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
|
||||
|
||||
// The densities on X(2) should be the same
|
||||
auto x2_conditional =
|
||||
dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner());
|
||||
auto actual_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional));
|
||||
|
||||
// The densities on X(3) should be the same
|
||||
auto x3_conditional =
|
||||
dynamic_pointer_cast<GaussianMixture>(isam[X(3)]->conditional()->inner());
|
||||
auto actual_x3_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional));
|
||||
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
|
||||
|
||||
// We only perform manual continuous elimination for 0,0.
|
||||
// The other discrete probabilities on M(2) are calculated the same way
|
||||
Ordering discrete_ordering;
|
||||
discrete_ordering += M(0);
|
||||
discrete_ordering += M(1);
|
||||
discrete_ordering += M(2);
|
||||
HybridBayesTree::shared_ptr discreteBayesTree =
|
||||
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
|
||||
|
||||
DiscreteValues m00;
|
||||
m00[M(1)] = 0, m00[M(2)] = 0;
|
||||
m00[M(0)] = 0, m00[M(1)] = 0;
|
||||
DiscreteConditional decisionTree =
|
||||
*(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional();
|
||||
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
|
||||
double m00_prob = decisionTree(m00);
|
||||
|
||||
auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional();
|
||||
auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional();
|
||||
|
||||
// Test if the probability values are as expected with regression tests.
|
||||
DiscreteValues assignment;
|
||||
EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
|
||||
assignment[M(0)] = 0;
|
||||
assignment[M(1)] = 0;
|
||||
assignment[M(2)] = 0;
|
||||
EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(1)] = 1;
|
||||
assignment[M(2)] = 0;
|
||||
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(0)] = 1;
|
||||
assignment[M(1)] = 0;
|
||||
assignment[M(2)] = 1;
|
||||
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
|
||||
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(0)] = 0;
|
||||
assignment[M(1)] = 1;
|
||||
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(0)] = 1;
|
||||
assignment[M(1)] = 1;
|
||||
assignment[M(2)] = 1;
|
||||
EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
|
||||
|
||||
// Check if the clique conditional generated from incremental elimination
|
||||
// matches that of batch elimination.
|
||||
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
|
||||
auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
(*expectedChordal)[M(2)]->conditional()->inner());
|
||||
(*expectedChordal)[M(1)]->conditional()->inner());
|
||||
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
isam[M(2)]->conditional()->inner());
|
||||
isam[M(1)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
|
||||
}
|
||||
|
||||
|
@ -208,13 +208,13 @@ TEST(HybridGaussianElimination, Approx_inference) {
|
|||
HybridGaussianISAM incrementalHybrid;
|
||||
HybridGaussianFactorGraph graph1;
|
||||
|
||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
||||
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
|
||||
for (size_t i = 1; i < 4; i++) {
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
||||
}
|
||||
|
||||
// Add the Gaussian factors, 1 prior on X(1),
|
||||
// 3 measurements on X(2), X(3), X(4)
|
||||
// Add the Gaussian factors, 1 prior on X(0),
|
||||
// 3 measurements on X(1), X(2), X(3)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(0));
|
||||
for (size_t i = 4; i <= 7; i++) {
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
||||
|
@ -222,7 +222,7 @@ TEST(HybridGaussianElimination, Approx_inference) {
|
|||
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t j = 1; j <= 4; j++) {
|
||||
for (size_t j = 0; j < 4; j++) {
|
||||
ordering += X(j);
|
||||
}
|
||||
|
||||
|
@ -271,26 +271,26 @@ TEST(HybridGaussianElimination, Approx_inference) {
|
|||
1 1 1 Leaf 0.5
|
||||
*/
|
||||
|
||||
auto discreteConditional_m1 = *dynamic_pointer_cast<DiscreteConditional>(
|
||||
incrementalHybrid[M(1)]->conditional()->inner());
|
||||
EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)}));
|
||||
auto discreteConditional_m0 = *dynamic_pointer_cast<DiscreteConditional>(
|
||||
incrementalHybrid[M(0)]->conditional()->inner());
|
||||
EXPECT(discreteConditional_m0.keys() == KeyVector({M(0), M(1), M(2)}));
|
||||
|
||||
// Get the number of elements which are greater than 0.
|
||||
auto count = [](const double &value, int count) {
|
||||
return value > 0 ? count + 1 : count;
|
||||
};
|
||||
// Check that the number of leaves after pruning is 5.
|
||||
EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0));
|
||||
EXPECT_LONGS_EQUAL(5, discreteConditional_m0.fold(count, 0));
|
||||
|
||||
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
||||
// bayes net, at the same positions.
|
||||
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||
unprunedHybridBayesTree->clique(X(4))->conditional()->inner());
|
||||
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
|
||||
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||
incrementalHybrid[X(4)]->conditional()->inner());
|
||||
incrementalHybrid[X(3)]->conditional()->inner());
|
||||
|
||||
std::vector<std::pair<DiscreteValues, double>> assignments =
|
||||
discreteConditional_m1.enumerate();
|
||||
discreteConditional_m0.enumerate();
|
||||
// Loop over all assignments and check the pruned components
|
||||
for (auto &&av : assignments) {
|
||||
const DiscreteValues &assignment = av.first;
|
||||
|
@ -314,13 +314,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
|
|||
HybridGaussianFactorGraph graph1;
|
||||
|
||||
/***** Run Round 1 *****/
|
||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
||||
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
|
||||
for (size_t i = 1; i < 4; i++) {
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
||||
}
|
||||
|
||||
// Add the Gaussian factors, 1 prior on X(1),
|
||||
// 3 measurements on X(2), X(3), X(4)
|
||||
// Add the Gaussian factors, 1 prior on X(0),
|
||||
// 3 measurements on X(1), X(2), X(3)
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(0));
|
||||
for (size_t i = 5; i <= 7; i++) {
|
||||
graph1.push_back(switching.linearizedFactorGraph.at(i));
|
||||
|
@ -335,13 +335,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
|
|||
// each with 2, 4, 8, and 5 (pruned) leaves respetively.
|
||||
EXPECT_LONGS_EQUAL(4, incrementalHybrid.size());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
2, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents());
|
||||
2, incrementalHybrid[X(0)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
3, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents());
|
||||
3, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
|
||||
|
||||
/***** Run Round 2 *****/
|
||||
HybridGaussianFactorGraph graph2;
|
||||
|
@ -356,9 +356,9 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
|
|||
// with 5 (pruned) leaves.
|
||||
CHECK_EQUAL(5, incrementalHybrid.size());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
|
||||
5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, incrementalHybrid[X(5)]->conditional()->asMixture()->nrComponents());
|
||||
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
|
||||
}
|
||||
|
||||
/* ************************************************************************/
|
||||
|
@ -370,7 +370,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
|||
/*************** Run Round 1 ***************/
|
||||
HybridNonlinearFactorGraph fg;
|
||||
|
||||
// Add a prior on pose x1 at the origin.
|
||||
// Add a prior on pose x0 at the origin.
|
||||
// A prior factor consists of a mean and
|
||||
// a noise model (covariance matrix)
|
||||
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
||||
|
|
|
@ -263,7 +263,7 @@ TEST(HybridFactorGraph, EliminationTree) {
|
|||
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
||||
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||
|
||||
// Create elimination tree.
|
||||
HybridEliminationTree etree(self.linearizedFactorGraph, ordering);
|
||||
|
@ -271,9 +271,9 @@ TEST(HybridFactorGraph, EliminationTree) {
|
|||
}
|
||||
|
||||
/****************************************************************************
|
||||
*Test elimination function by eliminating x1 in *-x1-*-x2 graph.
|
||||
*Test elimination function by eliminating x0 in *-x0-*-x1 graph.
|
||||
*/
|
||||
TEST(GaussianElimination, Eliminate_x1) {
|
||||
TEST(GaussianElimination, Eliminate_x0) {
|
||||
Switching self(3);
|
||||
|
||||
// Gather factors on x1, has a simple Gaussian and a mixture factor.
|
||||
|
@ -283,9 +283,9 @@ TEST(GaussianElimination, Eliminate_x1) {
|
|||
// Add first hybrid factor
|
||||
factors.push_back(self.linearizedFactorGraph[1]);
|
||||
|
||||
// Eliminate x1
|
||||
// Eliminate x0
|
||||
Ordering ordering;
|
||||
ordering += X(1);
|
||||
ordering += X(0);
|
||||
|
||||
auto result = EliminateHybrid(factors, ordering);
|
||||
CHECK(result.first);
|
||||
|
@ -296,20 +296,20 @@ TEST(GaussianElimination, Eliminate_x1) {
|
|||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain.
|
||||
* m1/ \m2
|
||||
* Test elimination function by eliminating x1 in x0-*-x1-*-x2 chain.
|
||||
* m0/ \m1
|
||||
*/
|
||||
TEST(HybridsGaussianElimination, Eliminate_x2) {
|
||||
TEST(HybridsGaussianElimination, Eliminate_x1) {
|
||||
Switching self(3);
|
||||
|
||||
// Gather factors on x2, will be two mixture factors (with x1 and x3, resp.).
|
||||
// Gather factors on x1, will be two mixture factors (with x0 and x2, resp.).
|
||||
HybridGaussianFactorGraph factors;
|
||||
factors.push_back(self.linearizedFactorGraph[1]); // involves m1
|
||||
factors.push_back(self.linearizedFactorGraph[2]); // involves m2
|
||||
factors.push_back(self.linearizedFactorGraph[1]); // involves m0
|
||||
factors.push_back(self.linearizedFactorGraph[2]); // involves m1
|
||||
|
||||
// Eliminate x2
|
||||
// Eliminate x1
|
||||
Ordering ordering;
|
||||
ordering += X(2);
|
||||
ordering += X(1);
|
||||
|
||||
std::pair<HybridConditional::shared_ptr, HybridFactor::shared_ptr> result =
|
||||
EliminateHybrid(factors, ordering);
|
||||
|
@ -326,28 +326,28 @@ TEST(HybridsGaussianElimination, Eliminate_x2) {
|
|||
GaussianFactorGraph::shared_ptr batchGFG(double between,
|
||||
Values linearizationPoint) {
|
||||
NonlinearFactorGraph graph;
|
||||
graph.addPrior<double>(X(1), 0, Isotropic::Sigma(1, 0.1));
|
||||
graph.addPrior<double>(X(0), 0, Isotropic::Sigma(1, 0.1));
|
||||
|
||||
auto between_x1_x2 = boost::make_shared<MotionModel>(
|
||||
X(1), X(2), between, Isotropic::Sigma(1, 1.0));
|
||||
auto between_x0_x1 = boost::make_shared<MotionModel>(
|
||||
X(0), X(1), between, Isotropic::Sigma(1, 1.0));
|
||||
|
||||
graph.push_back(between_x1_x2);
|
||||
graph.push_back(between_x0_x1);
|
||||
|
||||
return graph.linearize(linearizationPoint);
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Test elimination function by eliminating x1 and x2 in graph.
|
||||
* Test elimination function by eliminating x0 and x1 in graph.
|
||||
*/
|
||||
TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
|
||||
Switching self(2, 1.0, 0.1);
|
||||
|
||||
auto factors = self.linearizedFactorGraph;
|
||||
|
||||
// Eliminate x1
|
||||
// Eliminate x0
|
||||
Ordering ordering;
|
||||
ordering += X(0);
|
||||
ordering += X(1);
|
||||
ordering += X(2);
|
||||
|
||||
HybridConditional::shared_ptr hybridConditionalMixture;
|
||||
HybridFactor::shared_ptr factorOnModes;
|
||||
|
@ -359,7 +359,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
|
|||
dynamic_pointer_cast<GaussianMixture>(hybridConditionalMixture->inner());
|
||||
|
||||
CHECK(gaussianConditionalMixture);
|
||||
// Frontals = [x1, x2]
|
||||
// Frontals = [x0, x1]
|
||||
EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals());
|
||||
// 1 parent, which is the mode
|
||||
EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents());
|
||||
|
@ -387,7 +387,7 @@ TEST(HybridFactorGraph, Partial_Elimination) {
|
|||
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
||||
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||
|
||||
// Eliminate partially.
|
||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||
|
@ -397,18 +397,18 @@ TEST(HybridFactorGraph, Partial_Elimination) {
|
|||
|
||||
CHECK(hybridBayesNet);
|
||||
EXPECT_LONGS_EQUAL(3, hybridBayesNet->size());
|
||||
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
|
||||
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
|
||||
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)});
|
||||
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)}));
|
||||
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)});
|
||||
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
|
||||
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)});
|
||||
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)}));
|
||||
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)});
|
||||
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)}));
|
||||
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)});
|
||||
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)}));
|
||||
|
||||
CHECK(remainingFactorGraph);
|
||||
EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size());
|
||||
EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)}));
|
||||
EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)}));
|
||||
EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(1), M(2)}));
|
||||
EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(0)}));
|
||||
EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(1), M(0)}));
|
||||
EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(0), M(1)}));
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
|
@ -427,7 +427,7 @@ TEST(HybridFactorGraph, Full_Elimination) {
|
|||
{
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
||||
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||
|
||||
// Eliminate partially.
|
||||
std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) =
|
||||
|
@ -440,15 +440,15 @@ TEST(HybridFactorGraph, Full_Elimination) {
|
|||
discrete_fg.push_back(df->inner());
|
||||
}
|
||||
ordering.clear();
|
||||
for (size_t k = 1; k < self.K; k++) ordering += M(k);
|
||||
for (size_t k = 0; k < self.K - 1; k++) ordering += M(k);
|
||||
discreteBayesNet =
|
||||
*discrete_fg.eliminateSequential(ordering, EliminateForMPE);
|
||||
}
|
||||
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
||||
for (size_t k = 1; k < self.K; k++) ordering += M(k);
|
||||
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||
for (size_t k = 0; k < self.K - 1; k++) ordering += M(k);
|
||||
|
||||
// Eliminate partially.
|
||||
HybridBayesNet::shared_ptr hybridBayesNet =
|
||||
|
@ -456,23 +456,23 @@ TEST(HybridFactorGraph, Full_Elimination) {
|
|||
|
||||
CHECK(hybridBayesNet);
|
||||
EXPECT_LONGS_EQUAL(5, hybridBayesNet->size());
|
||||
// p(x1 | x2, m1)
|
||||
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
|
||||
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
|
||||
// p(x2 | x3, m1, m2)
|
||||
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)});
|
||||
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)}));
|
||||
// p(x3 | m1, m2)
|
||||
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)});
|
||||
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
|
||||
// P(m1 | m2)
|
||||
EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)});
|
||||
EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)}));
|
||||
// p(x0 | x1, m0)
|
||||
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)});
|
||||
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)}));
|
||||
// p(x1 | x2, m0, m1)
|
||||
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)});
|
||||
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)}));
|
||||
// p(x2 | m0, m1)
|
||||
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)});
|
||||
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)}));
|
||||
// P(m0 | m1)
|
||||
EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(0)});
|
||||
EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(1)}));
|
||||
EXPECT(
|
||||
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(3)->inner())
|
||||
->equals(*discreteBayesNet.at(0)));
|
||||
// P(m2)
|
||||
EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)});
|
||||
// P(m1)
|
||||
EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(1)});
|
||||
EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents());
|
||||
EXPECT(
|
||||
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(4)->inner())
|
||||
|
@ -489,7 +489,7 @@ TEST(HybridFactorGraph, Printing) {
|
|||
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
|
||||
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||
|
||||
// Eliminate partially.
|
||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||
|
@ -499,14 +499,37 @@ TEST(HybridFactorGraph, Printing) {
|
|||
|
||||
string expected_hybridFactorGraph = R"(
|
||||
size: 7
|
||||
factor 0: Continuous [x1]
|
||||
factor 0: Continuous [x0]
|
||||
|
||||
A[x1] = [
|
||||
A[x0] = [
|
||||
10
|
||||
]
|
||||
b = [ -10 ]
|
||||
No noise model
|
||||
factor 1: Hybrid [x1 x2; m1]{
|
||||
factor 1: Hybrid [x0 x1; m0]{
|
||||
Choice(m0)
|
||||
0 Leaf :
|
||||
A[x0] = [
|
||||
-1
|
||||
]
|
||||
A[x1] = [
|
||||
1
|
||||
]
|
||||
b = [ -1 ]
|
||||
No noise model
|
||||
|
||||
1 Leaf :
|
||||
A[x0] = [
|
||||
-1
|
||||
]
|
||||
A[x1] = [
|
||||
1
|
||||
]
|
||||
b = [ -0 ]
|
||||
No noise model
|
||||
|
||||
}
|
||||
factor 2: Hybrid [x1 x2; m1]{
|
||||
Choice(m1)
|
||||
0 Leaf :
|
||||
A[x1] = [
|
||||
|
@ -529,54 +552,31 @@ factor 1: Hybrid [x1 x2; m1]{
|
|||
No noise model
|
||||
|
||||
}
|
||||
factor 2: Hybrid [x2 x3; m2]{
|
||||
Choice(m2)
|
||||
0 Leaf :
|
||||
A[x2] = [
|
||||
-1
|
||||
]
|
||||
A[x3] = [
|
||||
1
|
||||
]
|
||||
b = [ -1 ]
|
||||
No noise model
|
||||
factor 3: Continuous [x1]
|
||||
|
||||
1 Leaf :
|
||||
A[x2] = [
|
||||
-1
|
||||
A[x1] = [
|
||||
10
|
||||
]
|
||||
A[x3] = [
|
||||
1
|
||||
]
|
||||
b = [ -0 ]
|
||||
b = [ -10 ]
|
||||
No noise model
|
||||
|
||||
}
|
||||
factor 3: Continuous [x2]
|
||||
factor 4: Continuous [x2]
|
||||
|
||||
A[x2] = [
|
||||
10
|
||||
]
|
||||
b = [ -10 ]
|
||||
No noise model
|
||||
factor 4: Continuous [x3]
|
||||
|
||||
A[x3] = [
|
||||
10
|
||||
]
|
||||
b = [ -10 ]
|
||||
No noise model
|
||||
factor 5: Discrete [m1]
|
||||
P( m1 ):
|
||||
factor 5: Discrete [m0]
|
||||
P( m0 ):
|
||||
Leaf 0.5
|
||||
|
||||
factor 6: Discrete [m2 m1]
|
||||
P( m2 | m1 ):
|
||||
Choice(m2)
|
||||
0 Choice(m1)
|
||||
factor 6: Discrete [m1 m0]
|
||||
P( m1 | m0 ):
|
||||
Choice(m1)
|
||||
0 Choice(m0)
|
||||
0 0 Leaf 0.33333333
|
||||
0 1 Leaf 0.6
|
||||
1 Choice(m1)
|
||||
1 Choice(m0)
|
||||
1 0 Leaf 0.66666667
|
||||
1 1 Leaf 0.4
|
||||
|
||||
|
@ -586,71 +586,71 @@ factor 6: Discrete [m2 m1]
|
|||
// Expected output for hybridBayesNet.
|
||||
string expected_hybridBayesNet = R"(
|
||||
size: 3
|
||||
factor 0: Hybrid P( x1 | x2 m1)
|
||||
Discrete Keys = (m1, 2),
|
||||
Choice(m1)
|
||||
0 Leaf p(x1 | x2)
|
||||
factor 0: Hybrid P( x0 | x1 m0)
|
||||
Discrete Keys = (m0, 2),
|
||||
Choice(m0)
|
||||
0 Leaf p(x0 | x1)
|
||||
R = [ 10.0499 ]
|
||||
S[x2] = [ -0.0995037 ]
|
||||
S[x1] = [ -0.0995037 ]
|
||||
d = [ -9.85087 ]
|
||||
No noise model
|
||||
|
||||
1 Leaf p(x1 | x2)
|
||||
1 Leaf p(x0 | x1)
|
||||
R = [ 10.0499 ]
|
||||
S[x2] = [ -0.0995037 ]
|
||||
S[x1] = [ -0.0995037 ]
|
||||
d = [ -9.95037 ]
|
||||
No noise model
|
||||
|
||||
factor 1: Hybrid P( x2 | x3 m1 m2)
|
||||
Discrete Keys = (m1, 2), (m2, 2),
|
||||
Choice(m2)
|
||||
0 Choice(m1)
|
||||
0 0 Leaf p(x2 | x3)
|
||||
factor 1: Hybrid P( x1 | x2 m0 m1)
|
||||
Discrete Keys = (m0, 2), (m1, 2),
|
||||
Choice(m1)
|
||||
0 Choice(m0)
|
||||
0 0 Leaf p(x1 | x2)
|
||||
R = [ 10.099 ]
|
||||
S[x3] = [ -0.0990196 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -9.99901 ]
|
||||
No noise model
|
||||
|
||||
0 1 Leaf p(x2 | x3)
|
||||
0 1 Leaf p(x1 | x2)
|
||||
R = [ 10.099 ]
|
||||
S[x3] = [ -0.0990196 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -9.90098 ]
|
||||
No noise model
|
||||
|
||||
1 Choice(m1)
|
||||
1 0 Leaf p(x2 | x3)
|
||||
1 Choice(m0)
|
||||
1 0 Leaf p(x1 | x2)
|
||||
R = [ 10.099 ]
|
||||
S[x3] = [ -0.0990196 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -10.098 ]
|
||||
No noise model
|
||||
|
||||
1 1 Leaf p(x2 | x3)
|
||||
1 1 Leaf p(x1 | x2)
|
||||
R = [ 10.099 ]
|
||||
S[x3] = [ -0.0990196 ]
|
||||
S[x2] = [ -0.0990196 ]
|
||||
d = [ -10 ]
|
||||
No noise model
|
||||
|
||||
factor 2: Hybrid P( x3 | m1 m2)
|
||||
Discrete Keys = (m1, 2), (m2, 2),
|
||||
Choice(m2)
|
||||
0 Choice(m1)
|
||||
0 0 Leaf p(x3)
|
||||
factor 2: Hybrid P( x2 | m0 m1)
|
||||
Discrete Keys = (m0, 2), (m1, 2),
|
||||
Choice(m1)
|
||||
0 Choice(m0)
|
||||
0 0 Leaf p(x2)
|
||||
R = [ 10.0494 ]
|
||||
d = [ -10.1489 ]
|
||||
No noise model
|
||||
|
||||
0 1 Leaf p(x3)
|
||||
0 1 Leaf p(x2)
|
||||
R = [ 10.0494 ]
|
||||
d = [ -10.1479 ]
|
||||
No noise model
|
||||
|
||||
1 Choice(m1)
|
||||
1 0 Leaf p(x3)
|
||||
1 Choice(m0)
|
||||
1 0 Leaf p(x2)
|
||||
R = [ 10.0494 ]
|
||||
d = [ -10.0504 ]
|
||||
No noise model
|
||||
|
||||
1 1 Leaf p(x3)
|
||||
1 1 Leaf p(x2)
|
||||
R = [ 10.0494 ]
|
||||
d = [ -10.0494 ]
|
||||
No noise model
|
||||
|
@ -669,7 +669,7 @@ factor 2: Hybrid P( x3 | m1 m2)
|
|||
TEST(HybridFactorGraph, DefaultDecisionTree) {
|
||||
HybridNonlinearFactorGraph fg;
|
||||
|
||||
// Add a prior on pose x1 at the origin.
|
||||
// Add a prior on pose x0 at the origin.
|
||||
// A prior factor consists of a mean and a noise model (covariance matrix)
|
||||
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
||||
auto priorNoise = noiseModel::Diagonal::Sigmas(
|
||||
|
|
|
@ -55,47 +55,47 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
|
|||
// Create initial factor graph
|
||||
// * * *
|
||||
// | | |
|
||||
// X1 -*- X2 -*- X3
|
||||
// \*-M1-*/
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1)
|
||||
// X0 -*- X1 -*- X2
|
||||
// \*-M0-*/
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0)
|
||||
|
||||
initial.insert<double>(X(1), 1);
|
||||
initial.insert<double>(X(2), 2);
|
||||
initial.insert<double>(X(3), 3);
|
||||
initial.insert<double>(X(0), 1);
|
||||
initial.insert<double>(X(1), 2);
|
||||
initial.insert<double>(X(2), 3);
|
||||
|
||||
// Run update step
|
||||
isam.update(graph1, initial);
|
||||
|
||||
// Check that after update we have 3 hybrid Bayes net nodes:
|
||||
// P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2)
|
||||
// P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1)
|
||||
HybridGaussianISAM bayesTree = isam.bayesTree();
|
||||
EXPECT_LONGS_EQUAL(3, bayesTree.size());
|
||||
EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector{X(1)});
|
||||
EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)}));
|
||||
EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
||||
EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
||||
EXPECT(bayesTree[X(0)]->conditional()->frontals() == KeyVector{X(0)});
|
||||
EXPECT(bayesTree[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)}));
|
||||
EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
|
||||
EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)}));
|
||||
|
||||
/********************************************************/
|
||||
// New factor graph for incremental update.
|
||||
HybridNonlinearFactorGraph graph2;
|
||||
initial = Values();
|
||||
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1)
|
||||
|
||||
isam.update(graph2, initial);
|
||||
|
||||
bayesTree = isam.bayesTree();
|
||||
// Check that after the second update we have
|
||||
// 1 additional hybrid Bayes net node:
|
||||
// P(X2, X3 | M1, M2)
|
||||
// P(X1, X2 | M0, M1)
|
||||
EXPECT_LONGS_EQUAL(3, bayesTree.size());
|
||||
EXPECT(bayesTree[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
|
||||
EXPECT(bayesTree[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)}));
|
||||
EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
|
||||
EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(0), M(1)}));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
|
@ -109,35 +109,35 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
|||
// Create initial factor graph
|
||||
// * * *
|
||||
// | | |
|
||||
// X1 -*- X2 -*- X3
|
||||
// X0 -*- X1 -*- X2
|
||||
// | |
|
||||
// *-M1 - * - M2
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1)
|
||||
// *-M0 - * - M1
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0)
|
||||
|
||||
initial.insert<double>(X(1), 1);
|
||||
initial.insert<double>(X(2), 2);
|
||||
initial.insert<double>(X(0), 1);
|
||||
initial.insert<double>(X(1), 2);
|
||||
|
||||
// Run update step
|
||||
isam.update(graph1, initial);
|
||||
HybridGaussianISAM bayesTree = isam.bayesTree();
|
||||
|
||||
auto discreteConditional_m1 =
|
||||
bayesTree[M(1)]->conditional()->asDiscreteConditional();
|
||||
EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)}));
|
||||
auto discreteConditional_m0 =
|
||||
bayesTree[M(0)]->conditional()->asDiscreteConditional();
|
||||
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
|
||||
|
||||
/********************************************************/
|
||||
// New factor graph for incremental update.
|
||||
HybridNonlinearFactorGraph graph2;
|
||||
initial = Values();
|
||||
|
||||
initial.insert<double>(X(3), 3);
|
||||
initial.insert<double>(X(2), 3);
|
||||
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2)
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1)
|
||||
|
||||
isam.update(graph2, initial);
|
||||
bayesTree = isam.bayesTree();
|
||||
|
@ -145,9 +145,9 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
|||
/********************************************************/
|
||||
// Run batch elimination so we can compare results.
|
||||
Ordering ordering;
|
||||
ordering += X(0);
|
||||
ordering += X(1);
|
||||
ordering += X(2);
|
||||
ordering += X(3);
|
||||
|
||||
// Now we calculate the actual factors using full elimination
|
||||
HybridBayesTree::shared_ptr expectedHybridBayesTree;
|
||||
|
@ -155,67 +155,67 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
|||
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
|
||||
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
|
||||
|
||||
// The densities on X(1) should be the same
|
||||
auto x0_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
bayesTree[X(0)]->conditional()->inner());
|
||||
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
|
||||
|
||||
// The densities on X(1) should be the same
|
||||
auto x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
bayesTree[X(1)]->conditional()->inner());
|
||||
auto actual_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional));
|
||||
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
|
||||
|
||||
// The densities on X(2) should be the same
|
||||
auto x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
bayesTree[X(2)]->conditional()->inner());
|
||||
auto actual_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional));
|
||||
|
||||
// The densities on X(3) should be the same
|
||||
auto x3_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
bayesTree[X(3)]->conditional()->inner());
|
||||
auto actual_x3_conditional = dynamic_pointer_cast<GaussianMixture>(
|
||||
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional));
|
||||
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
|
||||
|
||||
// We only perform manual continuous elimination for 0,0.
|
||||
// The other discrete probabilities on M(2) are calculated the same way
|
||||
// The other discrete probabilities on M(1) are calculated the same way
|
||||
Ordering discrete_ordering;
|
||||
discrete_ordering += M(0);
|
||||
discrete_ordering += M(1);
|
||||
discrete_ordering += M(2);
|
||||
HybridBayesTree::shared_ptr discreteBayesTree =
|
||||
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
|
||||
|
||||
DiscreteValues m00;
|
||||
m00[M(1)] = 0, m00[M(2)] = 0;
|
||||
m00[M(0)] = 0, m00[M(1)] = 0;
|
||||
DiscreteConditional decisionTree =
|
||||
*(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional();
|
||||
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
|
||||
double m00_prob = decisionTree(m00);
|
||||
|
||||
auto discreteConditional =
|
||||
bayesTree[M(2)]->conditional()->asDiscreteConditional();
|
||||
bayesTree[M(1)]->conditional()->asDiscreteConditional();
|
||||
|
||||
// Test if the probability values are as expected with regression tests.
|
||||
DiscreteValues assignment;
|
||||
EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
|
||||
assignment[M(0)] = 0;
|
||||
assignment[M(1)] = 0;
|
||||
assignment[M(2)] = 0;
|
||||
EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(1)] = 1;
|
||||
assignment[M(2)] = 0;
|
||||
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(0)] = 1;
|
||||
assignment[M(1)] = 0;
|
||||
assignment[M(2)] = 1;
|
||||
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
|
||||
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(0)] = 0;
|
||||
assignment[M(1)] = 1;
|
||||
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
|
||||
assignment[M(0)] = 1;
|
||||
assignment[M(1)] = 1;
|
||||
assignment[M(2)] = 1;
|
||||
EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
|
||||
|
||||
// Check if the clique conditional generated from incremental elimination
|
||||
// matches that of batch elimination.
|
||||
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
|
||||
auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
(*expectedChordal)[M(2)]->conditional()->inner());
|
||||
(*expectedChordal)[M(1)]->conditional()->inner());
|
||||
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
|
||||
bayesTree[M(2)]->conditional()->inner());
|
||||
bayesTree[M(1)]->conditional()->inner());
|
||||
EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
|
||||
}
|
||||
|
||||
|
@ -227,22 +227,22 @@ TEST(HybridNonlinearISAM, Approx_inference) {
|
|||
HybridNonlinearFactorGraph graph1;
|
||||
Values initial;
|
||||
|
||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
||||
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
|
||||
for (size_t i = 1; i < 4; i++) {
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||
}
|
||||
|
||||
// Add the Gaussian factors, 1 prior on X(1),
|
||||
// 3 measurements on X(2), X(3), X(4)
|
||||
// Add the Gaussian factors, 1 prior on X(0),
|
||||
// 3 measurements on X(1), X(2), X(3)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0));
|
||||
for (size_t i = 4; i <= 7; i++) {
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||
initial.insert<double>(X(i - 3), i - 3);
|
||||
initial.insert<double>(X(i - 4), i - 3);
|
||||
}
|
||||
|
||||
// Create ordering.
|
||||
Ordering ordering;
|
||||
for (size_t j = 1; j <= 4; j++) {
|
||||
for (size_t j = 0; j < 4; j++) {
|
||||
ordering += X(j);
|
||||
}
|
||||
|
||||
|
@ -292,26 +292,26 @@ TEST(HybridNonlinearISAM, Approx_inference) {
|
|||
1 1 1 Leaf 0.5
|
||||
*/
|
||||
|
||||
auto discreteConditional_m1 = *dynamic_pointer_cast<DiscreteConditional>(
|
||||
bayesTree[M(1)]->conditional()->inner());
|
||||
EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)}));
|
||||
auto discreteConditional_m0 = *dynamic_pointer_cast<DiscreteConditional>(
|
||||
bayesTree[M(0)]->conditional()->inner());
|
||||
EXPECT(discreteConditional_m0.keys() == KeyVector({M(0), M(1), M(2)}));
|
||||
|
||||
// Get the number of elements which are greater than 0.
|
||||
auto count = [](const double &value, int count) {
|
||||
return value > 0 ? count + 1 : count;
|
||||
};
|
||||
// Check that the number of leaves after pruning is 5.
|
||||
EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0));
|
||||
EXPECT_LONGS_EQUAL(5, discreteConditional_m0.fold(count, 0));
|
||||
|
||||
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
|
||||
// bayes net, at the same positions.
|
||||
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||
unprunedHybridBayesTree->clique(X(4))->conditional()->inner());
|
||||
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
|
||||
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
|
||||
bayesTree[X(4)]->conditional()->inner());
|
||||
bayesTree[X(3)]->conditional()->inner());
|
||||
|
||||
std::vector<std::pair<DiscreteValues, double>> assignments =
|
||||
discreteConditional_m1.enumerate();
|
||||
discreteConditional_m0.enumerate();
|
||||
// Loop over all assignments and check the pruned components
|
||||
for (auto &&av : assignments) {
|
||||
const DiscreteValues &assignment = av.first;
|
||||
|
@ -336,18 +336,18 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
|
|||
Values initial;
|
||||
|
||||
/***** Run Round 1 *****/
|
||||
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
|
||||
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
|
||||
for (size_t i = 1; i < 4; i++) {
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||
}
|
||||
|
||||
// Add the Gaussian factors, 1 prior on X(1),
|
||||
// 3 measurements on X(2), X(3), X(4)
|
||||
// Add the Gaussian factors, 1 prior on X(0),
|
||||
// 3 measurements on X(1), X(2), X(3)
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(0));
|
||||
initial.insert<double>(X(1), 1);
|
||||
initial.insert<double>(X(0), 1);
|
||||
for (size_t i = 5; i <= 7; i++) {
|
||||
graph1.push_back(switching.nonlinearFactorGraph.at(i));
|
||||
initial.insert<double>(X(i - 3), i - 3);
|
||||
initial.insert<double>(X(i - 4), i - 3);
|
||||
}
|
||||
|
||||
// Run update with pruning
|
||||
|
@ -361,20 +361,20 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
|
|||
// each with 2, 4, 8, and 5 (pruned) leaves respetively.
|
||||
EXPECT_LONGS_EQUAL(4, bayesTree.size());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
2, bayesTree[X(1)]->conditional()->asMixture()->nrComponents());
|
||||
2, bayesTree[X(0)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
3, bayesTree[X(2)]->conditional()->asMixture()->nrComponents());
|
||||
3, bayesTree[X(1)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, bayesTree[X(2)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
|
||||
|
||||
/***** Run Round 2 *****/
|
||||
HybridGaussianFactorGraph graph2;
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x4-x5
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x5 measurement
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x3-x4
|
||||
graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x4 measurement
|
||||
initial = Values();
|
||||
initial.insert<double>(X(5), 5);
|
||||
initial.insert<double>(X(4), 5);
|
||||
|
||||
// Run update with pruning a second time.
|
||||
incrementalHybrid.update(graph2, initial);
|
||||
|
@ -386,9 +386,9 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
|
|||
// with 5 (pruned) leaves.
|
||||
CHECK_EQUAL(5, bayesTree.size());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
|
||||
5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents());
|
||||
EXPECT_LONGS_EQUAL(
|
||||
5, bayesTree[X(5)]->conditional()->asMixture()->nrComponents());
|
||||
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
|
||||
}
|
||||
|
||||
/* ************************************************************************/
|
||||
|
@ -401,7 +401,7 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
|||
HybridNonlinearFactorGraph fg;
|
||||
HybridNonlinearISAM inc;
|
||||
|
||||
// Add a prior on pose x1 at the origin.
|
||||
// Add a prior on pose x0 at the origin.
|
||||
// A prior factor consists of a mean and
|
||||
// a noise model (covariance matrix)
|
||||
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
|
||||
|
|
Loading…
Reference in New Issue