Merge pull request #1339 from borglab/hybrid/new-elimination
commit
da5d3a242b
|
|
@ -105,7 +105,7 @@ bool GaussianMixture::equals(const HybridFactor &lf, double tol) const {
|
|||
/* *******************************************************************************/
|
||||
void GaussianMixture::print(const std::string &s,
|
||||
const KeyFormatter &formatter) const {
|
||||
std::cout << s;
|
||||
std::cout << (s.empty() ? "" : s + "\n");
|
||||
if (isContinuous()) std::cout << "Continuous ";
|
||||
if (isDiscrete()) std::cout << "Discrete ";
|
||||
if (isHybrid()) std::cout << "Hybrid ";
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@
|
|||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* Elimination Tree type for Hybrid
|
||||
* Elimination Tree type for Hybrid Factor Graphs.
|
||||
*
|
||||
* @ingroup hybrid
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -257,13 +257,14 @@ hybridElimination(const HybridGaussianFactorGraph &factors,
|
|||
// If there are no more continuous parents, then we should create here a
|
||||
// DiscreteFactor, with the error for each discrete choice.
|
||||
if (keysOfSeparator.empty()) {
|
||||
// TODO(Varun) Use the math from the iMHS_Math-1-indexed document
|
||||
VectorValues empty_values;
|
||||
auto factorProb = [&](const GaussianFactor::shared_ptr &factor) {
|
||||
if (!factor) {
|
||||
return 0.0; // If nullptr, return 0.0 probability
|
||||
} else {
|
||||
return 1.0;
|
||||
double error =
|
||||
0.5 * std::abs(factor->augmentedInformation().determinant());
|
||||
return std::exp(-error);
|
||||
}
|
||||
};
|
||||
DecisionTree<Key, double> fdt(separatorFactors, factorProb);
|
||||
|
|
@ -551,166 +552,4 @@ HybridGaussianFactorGraph::separateContinuousDiscreteOrdering(
|
|||
return std::make_pair(continuous_ordering, discrete_ordering);
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
boost::shared_ptr<HybridGaussianFactorGraph::BayesNetType>
|
||||
HybridGaussianFactorGraph::eliminateHybridSequential(
|
||||
const boost::optional<Ordering> continuous,
|
||||
const boost::optional<Ordering> discrete, const Eliminate &function,
|
||||
OptionalVariableIndex variableIndex) const {
|
||||
const Ordering continuous_ordering =
|
||||
continuous ? *continuous : Ordering(this->continuousKeys());
|
||||
const Ordering discrete_ordering =
|
||||
discrete ? *discrete : Ordering(this->discreteKeys());
|
||||
|
||||
// Eliminate continuous
|
||||
HybridBayesNet::shared_ptr bayesNet;
|
||||
HybridGaussianFactorGraph::shared_ptr discreteGraph;
|
||||
std::tie(bayesNet, discreteGraph) =
|
||||
BaseEliminateable::eliminatePartialSequential(continuous_ordering,
|
||||
function, variableIndex);
|
||||
|
||||
// Get the last continuous conditional which will have all the discrete keys
|
||||
HybridConditional::shared_ptr last_conditional =
|
||||
bayesNet->at(bayesNet->size() - 1);
|
||||
DiscreteKeys discrete_keys = last_conditional->discreteKeys();
|
||||
|
||||
// If not discrete variables, return the eliminated bayes net.
|
||||
if (discrete_keys.size() == 0) {
|
||||
return bayesNet;
|
||||
}
|
||||
|
||||
// DecisionTree for P'(X|M, Z) for all mode sequences M
|
||||
const AlgebraicDecisionTree<Key> probPrimeTree =
|
||||
this->continuousProbPrimes(discrete_keys, bayesNet);
|
||||
|
||||
// Add the model selection factor P(M|Z)
|
||||
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
||||
|
||||
// Perform discrete elimination
|
||||
HybridBayesNet::shared_ptr discreteBayesNet =
|
||||
discreteGraph->BaseEliminateable::eliminateSequential(
|
||||
discrete_ordering, function, variableIndex);
|
||||
|
||||
bayesNet->add(*discreteBayesNet);
|
||||
|
||||
return bayesNet;
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
boost::shared_ptr<HybridGaussianFactorGraph::BayesNetType>
|
||||
HybridGaussianFactorGraph::eliminateSequential(
|
||||
OptionalOrderingType orderingType, const Eliminate &function,
|
||||
OptionalVariableIndex variableIndex) const {
|
||||
return BaseEliminateable::eliminateSequential(orderingType, function,
|
||||
variableIndex);
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
boost::shared_ptr<HybridGaussianFactorGraph::BayesNetType>
|
||||
HybridGaussianFactorGraph::eliminateSequential(
|
||||
const Ordering &ordering, const Eliminate &function,
|
||||
OptionalVariableIndex variableIndex) const {
|
||||
// Segregate the continuous and the discrete keys
|
||||
Ordering continuous_ordering, discrete_ordering;
|
||||
std::tie(continuous_ordering, discrete_ordering) =
|
||||
this->separateContinuousDiscreteOrdering(ordering);
|
||||
|
||||
return this->eliminateHybridSequential(continuous_ordering, discrete_ordering,
|
||||
function, variableIndex);
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
boost::shared_ptr<HybridGaussianFactorGraph::BayesTreeType>
|
||||
HybridGaussianFactorGraph::eliminateHybridMultifrontal(
|
||||
const boost::optional<Ordering> continuous,
|
||||
const boost::optional<Ordering> discrete, const Eliminate &function,
|
||||
OptionalVariableIndex variableIndex) const {
|
||||
const Ordering continuous_ordering =
|
||||
continuous ? *continuous : Ordering(this->continuousKeys());
|
||||
const Ordering discrete_ordering =
|
||||
discrete ? *discrete : Ordering(this->discreteKeys());
|
||||
|
||||
// Eliminate continuous
|
||||
HybridBayesTree::shared_ptr bayesTree;
|
||||
HybridGaussianFactorGraph::shared_ptr discreteGraph;
|
||||
std::tie(bayesTree, discreteGraph) =
|
||||
BaseEliminateable::eliminatePartialMultifrontal(continuous_ordering,
|
||||
function, variableIndex);
|
||||
|
||||
// Get the last continuous conditional which will have all the discrete
|
||||
const Key last_continuous_key = continuous_ordering.back();
|
||||
HybridConditional::shared_ptr last_conditional =
|
||||
(*bayesTree)[last_continuous_key]->conditional();
|
||||
DiscreteKeys discrete_keys = last_conditional->discreteKeys();
|
||||
|
||||
// If not discrete variables, return the eliminated bayes net.
|
||||
if (discrete_keys.size() == 0) {
|
||||
return bayesTree;
|
||||
}
|
||||
|
||||
// DecisionTree for P'(X|M, Z) for all mode sequences M
|
||||
const AlgebraicDecisionTree<Key> probPrimeTree =
|
||||
this->continuousProbPrimes(discrete_keys, bayesTree);
|
||||
|
||||
// Add the model selection factor P(M|Z)
|
||||
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
||||
|
||||
// Eliminate discrete variables to get the discrete bayes tree.
|
||||
// This bayes tree will be updated with the
|
||||
// continuous variables as the child nodes.
|
||||
HybridBayesTree::shared_ptr updatedBayesTree =
|
||||
discreteGraph->BaseEliminateable::eliminateMultifrontal(discrete_ordering,
|
||||
function);
|
||||
|
||||
// Get the clique with all the discrete keys.
|
||||
// There should only be 1 clique.
|
||||
const HybridBayesTree::sharedClique discrete_clique =
|
||||
(*updatedBayesTree)[discrete_ordering.at(0)];
|
||||
|
||||
std::set<HybridBayesTreeClique::shared_ptr> clique_set;
|
||||
for (auto node : bayesTree->nodes()) {
|
||||
clique_set.insert(node.second);
|
||||
}
|
||||
|
||||
// Set the root of the bayes tree as the discrete clique
|
||||
for (auto clique : clique_set) {
|
||||
if (clique->conditional()->parents() ==
|
||||
discrete_clique->conditional()->frontals()) {
|
||||
updatedBayesTree->addClique(clique, discrete_clique);
|
||||
|
||||
} else {
|
||||
// Remove the clique from the children of the parents since it will get
|
||||
// added again in addClique.
|
||||
auto clique_it = std::find(clique->parent()->children.begin(),
|
||||
clique->parent()->children.end(), clique);
|
||||
clique->parent()->children.erase(clique_it);
|
||||
updatedBayesTree->addClique(clique, clique->parent());
|
||||
}
|
||||
}
|
||||
return updatedBayesTree;
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
boost::shared_ptr<HybridGaussianFactorGraph::BayesTreeType>
|
||||
HybridGaussianFactorGraph::eliminateMultifrontal(
|
||||
OptionalOrderingType orderingType, const Eliminate &function,
|
||||
OptionalVariableIndex variableIndex) const {
|
||||
return BaseEliminateable::eliminateMultifrontal(orderingType, function,
|
||||
variableIndex);
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
boost::shared_ptr<HybridGaussianFactorGraph::BayesTreeType>
|
||||
HybridGaussianFactorGraph::eliminateMultifrontal(
|
||||
const Ordering &ordering, const Eliminate &function,
|
||||
OptionalVariableIndex variableIndex) const {
|
||||
// Segregate the continuous and the discrete keys
|
||||
Ordering continuous_ordering, discrete_ordering;
|
||||
std::tie(continuous_ordering, discrete_ordering) =
|
||||
this->separateContinuousDiscreteOrdering(ordering);
|
||||
|
||||
return this->eliminateHybridMultifrontal(
|
||||
continuous_ordering, discrete_ordering, function, variableIndex);
|
||||
}
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
|||
|
|
@ -302,57 +302,7 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
|
|||
std::pair<Ordering, Ordering> separateContinuousDiscreteOrdering(
|
||||
const Ordering& ordering) const;
|
||||
|
||||
/**
|
||||
* @brief Custom elimination function which computes the correct
|
||||
* continuous probabilities. Returns a bayes net.
|
||||
*
|
||||
* @param continuous Optional ordering for all continuous variables.
|
||||
* @param discrete Optional ordering for all discrete variables.
|
||||
* @return boost::shared_ptr<BayesNetType>
|
||||
*/
|
||||
boost::shared_ptr<BayesNetType> eliminateHybridSequential(
|
||||
const boost::optional<Ordering> continuous = boost::none,
|
||||
const boost::optional<Ordering> discrete = boost::none,
|
||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||
OptionalVariableIndex variableIndex = boost::none) const;
|
||||
|
||||
/// Sequential elimination overload for hybrid
|
||||
boost::shared_ptr<BayesNetType> eliminateSequential(
|
||||
OptionalOrderingType orderingType = boost::none,
|
||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||
OptionalVariableIndex variableIndex = boost::none) const;
|
||||
|
||||
/// Sequential elimination overload for hybrid
|
||||
boost::shared_ptr<BayesNetType> eliminateSequential(
|
||||
const Ordering& ordering,
|
||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||
OptionalVariableIndex variableIndex = boost::none) const;
|
||||
|
||||
/**
|
||||
* @brief Custom elimination function which computes the correct
|
||||
* continuous probabilities. Returns a bayes tree.
|
||||
*
|
||||
* @param continuous Optional ordering for all continuous variables.
|
||||
* @param discrete Optional ordering for all discrete variables.
|
||||
* @return boost::shared_ptr<BayesTreeType>
|
||||
*/
|
||||
boost::shared_ptr<BayesTreeType> eliminateHybridMultifrontal(
|
||||
const boost::optional<Ordering> continuous = boost::none,
|
||||
const boost::optional<Ordering> discrete = boost::none,
|
||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||
OptionalVariableIndex variableIndex = boost::none) const;
|
||||
|
||||
/// Multifrontal elimination overload for hybrid
|
||||
boost::shared_ptr<BayesTreeType> eliminateMultifrontal(
|
||||
OptionalOrderingType orderingType = boost::none,
|
||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||
OptionalVariableIndex variableIndex = boost::none) const;
|
||||
|
||||
/// Multifrontal elimination overload for hybrid
|
||||
boost::shared_ptr<BayesTreeType> eliminateMultifrontal(
|
||||
const Ordering& ordering,
|
||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||
OptionalVariableIndex variableIndex = boost::none) const;
|
||||
|
||||
|
||||
/**
|
||||
* @brief Return a Colamd constrained ordering where the discrete keys are
|
||||
|
|
|
|||
|
|
@ -51,10 +51,11 @@ class HybridEliminationTree;
|
|||
*/
|
||||
class GTSAM_EXPORT HybridJunctionTree
|
||||
: public JunctionTree<HybridBayesTree, HybridGaussianFactorGraph> {
|
||||
|
||||
public:
|
||||
typedef JunctionTree<HybridBayesTree, HybridGaussianFactorGraph>
|
||||
Base; ///< Base class
|
||||
typedef HybridJunctionTree This; ///< This class
|
||||
typedef HybridJunctionTree This; ///< This class
|
||||
typedef boost::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ void HybridSmoother::update(HybridGaussianFactorGraph graph,
|
|||
addConditionals(graph, hybridBayesNet_, ordering);
|
||||
|
||||
// Eliminate.
|
||||
auto bayesNetFragment = graph.eliminateHybridSequential();
|
||||
auto bayesNetFragment = graph.eliminateSequential(ordering);
|
||||
|
||||
/// Prune
|
||||
if (maxNrLeaves) {
|
||||
|
|
|
|||
|
|
@ -164,25 +164,6 @@ TEST(HybridBayesNet, Optimize) {
|
|||
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test bayes net multifrontal optimize
|
||||
TEST(HybridBayesNet, OptimizeMultifrontal) {
|
||||
Switching s(4);
|
||||
|
||||
Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
|
||||
HybridBayesTree::shared_ptr hybridBayesTree =
|
||||
s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering);
|
||||
HybridValues delta = hybridBayesTree->optimize();
|
||||
|
||||
VectorValues expectedValues;
|
||||
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
|
||||
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
|
||||
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
|
||||
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());
|
||||
|
||||
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test bayes net error
|
||||
TEST(HybridBayesNet, Error) {
|
||||
|
|
|
|||
|
|
@ -32,6 +32,25 @@ using noiseModel::Isotropic;
|
|||
using symbol_shorthand::M;
|
||||
using symbol_shorthand::X;
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test multifrontal optimize
|
||||
TEST(HybridBayesTree, OptimizeMultifrontal) {
|
||||
Switching s(4);
|
||||
|
||||
Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
|
||||
HybridBayesTree::shared_ptr hybridBayesTree =
|
||||
s.linearizedFactorGraph.eliminateMultifrontal(hybridOrdering);
|
||||
HybridValues delta = hybridBayesTree->optimize();
|
||||
|
||||
VectorValues expectedValues;
|
||||
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
|
||||
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
|
||||
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
|
||||
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());
|
||||
|
||||
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test for optimizing a HybridBayesTree with a given assignment.
|
||||
TEST(HybridBayesTree, OptimizeAssignment) {
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
* @author Varun Agrawal
|
||||
*/
|
||||
|
||||
#include <gtsam/discrete/DiscreteBayesNet.h>
|
||||
#include <gtsam/geometry/Pose2.h>
|
||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
|
||||
|
|
@ -70,6 +71,28 @@ Ordering getOrdering(HybridGaussianFactorGraph& factors,
|
|||
return ordering;
|
||||
}
|
||||
|
||||
TEST(HybridEstimation, Full) {
|
||||
size_t K = 3;
|
||||
std::vector<double> measurements = {0, 1, 2};
|
||||
// Ground truth discrete seq
|
||||
std::vector<size_t> discrete_seq = {1, 1, 0};
|
||||
// Switching example of robot moving in 1D
|
||||
// with given measurements and equal mode priors.
|
||||
Switching switching(K, 1.0, 0.1, measurements, "1/1 1/1");
|
||||
HybridGaussianFactorGraph graph = switching.linearizedFactorGraph;
|
||||
|
||||
Ordering hybridOrdering;
|
||||
hybridOrdering += X(0);
|
||||
hybridOrdering += X(1);
|
||||
hybridOrdering += X(2);
|
||||
hybridOrdering += M(0);
|
||||
hybridOrdering += M(1);
|
||||
HybridBayesNet::shared_ptr bayesNet =
|
||||
graph.eliminateSequential(hybridOrdering);
|
||||
|
||||
EXPECT_LONGS_EQUAL(5, bayesNet->size());
|
||||
}
|
||||
|
||||
/****************************************************************************/
|
||||
// Test approximate inference with an additional pruning step.
|
||||
TEST(HybridEstimation, Incremental) {
|
||||
|
|
@ -258,8 +281,10 @@ TEST(HybridEstimation, Probability) {
|
|||
|
||||
VectorValues values = bayes_net->optimize();
|
||||
|
||||
expected_errors.push_back(linear_graph->error(values));
|
||||
expected_prob_primes.push_back(linear_graph->probPrime(values));
|
||||
double error = linear_graph->error(values);
|
||||
expected_errors.push_back(error);
|
||||
double prob_prime = linear_graph->probPrime(values);
|
||||
expected_prob_primes.push_back(prob_prime);
|
||||
}
|
||||
|
||||
// Switching example of robot moving in 1D with given measurements and equal
|
||||
|
|
@ -269,52 +294,21 @@ TEST(HybridEstimation, Probability) {
|
|||
auto graph = switching.linearizedFactorGraph;
|
||||
Ordering ordering = getOrdering(graph, HybridGaussianFactorGraph());
|
||||
|
||||
AlgebraicDecisionTree<Key> expected_probPrimeTree = probPrimeTree(graph);
|
||||
|
||||
// Eliminate continuous
|
||||
Ordering continuous_ordering(graph.continuousKeys());
|
||||
HybridBayesNet::shared_ptr bayesNet;
|
||||
HybridGaussianFactorGraph::shared_ptr discreteGraph;
|
||||
std::tie(bayesNet, discreteGraph) =
|
||||
graph.eliminatePartialSequential(continuous_ordering);
|
||||
|
||||
// Get the last continuous conditional which will have all the discrete keys
|
||||
auto last_conditional = bayesNet->at(bayesNet->size() - 1);
|
||||
DiscreteKeys discrete_keys = last_conditional->discreteKeys();
|
||||
|
||||
const std::vector<DiscreteValues> assignments =
|
||||
DiscreteValues::CartesianProduct(discrete_keys);
|
||||
|
||||
// Reverse discrete keys order for correct tree construction
|
||||
std::reverse(discrete_keys.begin(), discrete_keys.end());
|
||||
|
||||
// Create a decision tree of all the different VectorValues
|
||||
DecisionTree<Key, VectorValues::shared_ptr> delta_tree =
|
||||
graph.continuousDelta(discrete_keys, bayesNet, assignments);
|
||||
|
||||
AlgebraicDecisionTree<Key> probPrimeTree =
|
||||
graph.continuousProbPrimes(discrete_keys, bayesNet);
|
||||
|
||||
EXPECT(assert_equal(expected_probPrimeTree, probPrimeTree));
|
||||
HybridBayesNet::shared_ptr bayesNet = graph.eliminateSequential(ordering);
|
||||
auto discreteConditional = bayesNet->atDiscrete(bayesNet->size() - 3);
|
||||
|
||||
// Test if the probPrimeTree matches the probability of
|
||||
// the individual factor graphs
|
||||
for (size_t i = 0; i < pow(2, K - 1); i++) {
|
||||
Assignment<Key> discrete_assignment;
|
||||
DiscreteValues discrete_assignment;
|
||||
for (size_t v = 0; v < discrete_seq_map[i].size(); v++) {
|
||||
discrete_assignment[M(v)] = discrete_seq_map[i][v];
|
||||
}
|
||||
EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i),
|
||||
probPrimeTree(discrete_assignment), 1e-8);
|
||||
double discrete_transition_prob = 0.25;
|
||||
EXPECT_DOUBLES_EQUAL(expected_prob_primes.at(i) * discrete_transition_prob,
|
||||
(*discreteConditional)(discrete_assignment), 1e-8);
|
||||
}
|
||||
|
||||
discreteGraph->add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
||||
|
||||
Ordering discrete(graph.discreteKeys());
|
||||
auto discreteBayesNet =
|
||||
discreteGraph->BaseEliminateable::eliminateSequential(discrete);
|
||||
bayesNet->add(*discreteBayesNet);
|
||||
|
||||
HybridValues hybrid_values = bayesNet->optimize();
|
||||
|
||||
// This is the correct sequence as designed
|
||||
|
|
|
|||
|
|
@ -182,8 +182,9 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
|
|||
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
|
||||
|
||||
hfg.add(DecisionTreeFactor(m1, {2, 8}));
|
||||
//TODO(Varun) Adding extra discrete variable not connected to continuous variable throws segfault
|
||||
// hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
|
||||
// TODO(Varun) Adding extra discrete variable not connected to continuous
|
||||
// variable throws segfault
|
||||
// hfg.add(DecisionTreeFactor({{M(1), 2}, {M(2), 2}}, "1 2 3 4"));
|
||||
|
||||
HybridBayesTree::shared_ptr result =
|
||||
hfg.eliminateMultifrontal(hfg.getHybridOrdering());
|
||||
|
|
|
|||
|
|
@ -178,7 +178,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {
|
|||
|
||||
// Test the probability values with regression tests.
|
||||
DiscreteValues assignment;
|
||||
EXPECT(assert_equal(0.166667, m00_prob, 1e-5));
|
||||
EXPECT(assert_equal(0.0619233, m00_prob, 1e-5));
|
||||
assignment[M(0)] = 0;
|
||||
assignment[M(1)] = 0;
|
||||
EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5));
|
||||
|
|
|
|||
|
|
@ -372,8 +372,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
|
|||
dynamic_pointer_cast<DecisionTreeFactor>(hybridDiscreteFactor->inner());
|
||||
CHECK(discreteFactor);
|
||||
EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size());
|
||||
// All leaves should be probability 1 since this is not P*(X|M,Z)
|
||||
EXPECT(discreteFactor->root_->isLeaf());
|
||||
EXPECT(discreteFactor->root_->isLeaf() == false);
|
||||
|
||||
// TODO(Varun) Test emplace_discrete
|
||||
}
|
||||
|
|
@ -386,11 +385,11 @@ TEST(HybridFactorGraph, Partial_Elimination) {
|
|||
|
||||
auto linearizedFactorGraph = self.linearizedFactorGraph;
|
||||
|
||||
// Create ordering.
|
||||
// Create ordering of only continuous variables.
|
||||
Ordering ordering;
|
||||
for (size_t k = 0; k < self.K; k++) ordering += X(k);
|
||||
|
||||
// Eliminate partially.
|
||||
// Eliminate partially i.e. only continuous part.
|
||||
HybridBayesNet::shared_ptr hybridBayesNet;
|
||||
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
|
||||
std::tie(hybridBayesNet, remainingFactorGraph) =
|
||||
|
|
@ -441,14 +440,6 @@ TEST(HybridFactorGraph, Full_Elimination) {
|
|||
discrete_fg.push_back(df->inner());
|
||||
}
|
||||
|
||||
// Get the probabilit P*(X | M, Z)
|
||||
DiscreteKeys discrete_keys =
|
||||
remainingFactorGraph_partial->at(2)->discreteKeys();
|
||||
AlgebraicDecisionTree<Key> probPrimeTree =
|
||||
linearizedFactorGraph.continuousProbPrimes(discrete_keys,
|
||||
hybridBayesNet_partial);
|
||||
discrete_fg.add(DecisionTreeFactor(discrete_keys, probPrimeTree));
|
||||
|
||||
ordering.clear();
|
||||
for (size_t k = 0; k < self.K - 1; k++) ordering += M(k);
|
||||
discreteBayesNet =
|
||||
|
|
|
|||
|
|
@ -197,7 +197,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
|||
|
||||
// Test the probability values with regression tests.
|
||||
DiscreteValues assignment;
|
||||
EXPECT(assert_equal(0.166667, m00_prob, 1e-5));
|
||||
EXPECT(assert_equal(0.0619233, m00_prob, 1e-5));
|
||||
assignment[M(0)] = 0;
|
||||
assignment[M(1)] = 0;
|
||||
EXPECT(assert_equal(0.0619233, (*discreteConditional)(assignment), 1e-5));
|
||||
|
|
|
|||
Loading…
Reference in New Issue