Merge pull request #1288 from borglab/misc/fixes
commit
3e25e7d493
|
@ -33,8 +33,6 @@ static const Point2 P(0.2, 0.7);
|
||||||
static const Rot2 R = Rot2::fromAngle(0.3);
|
static const Rot2 R = Rot2::fromAngle(0.3);
|
||||||
static const double s = 4;
|
static const double s = 4;
|
||||||
|
|
||||||
const double degree = M_PI / 180;
|
|
||||||
|
|
||||||
//******************************************************************************
|
//******************************************************************************
|
||||||
TEST(Similarity2, Concepts) {
|
TEST(Similarity2, Concepts) {
|
||||||
BOOST_CONCEPT_ASSERT((IsGroup<Similarity2>));
|
BOOST_CONCEPT_ASSERT((IsGroup<Similarity2>));
|
||||||
|
|
|
@ -52,7 +52,6 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1,
|
||||||
HybridFactor::HybridFactor(const KeyVector &keys)
|
HybridFactor::HybridFactor(const KeyVector &keys)
|
||||||
: Base(keys),
|
: Base(keys),
|
||||||
isContinuous_(true),
|
isContinuous_(true),
|
||||||
nrContinuous_(keys.size()),
|
|
||||||
continuousKeys_(keys) {}
|
continuousKeys_(keys) {}
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
|
@ -62,7 +61,6 @@ HybridFactor::HybridFactor(const KeyVector &continuousKeys,
|
||||||
isDiscrete_((continuousKeys.size() == 0) && (discreteKeys.size() != 0)),
|
isDiscrete_((continuousKeys.size() == 0) && (discreteKeys.size() != 0)),
|
||||||
isContinuous_((continuousKeys.size() != 0) && (discreteKeys.size() == 0)),
|
isContinuous_((continuousKeys.size() != 0) && (discreteKeys.size() == 0)),
|
||||||
isHybrid_((continuousKeys.size() != 0) && (discreteKeys.size() != 0)),
|
isHybrid_((continuousKeys.size() != 0) && (discreteKeys.size() != 0)),
|
||||||
nrContinuous_(continuousKeys.size()),
|
|
||||||
discreteKeys_(discreteKeys),
|
discreteKeys_(discreteKeys),
|
||||||
continuousKeys_(continuousKeys) {}
|
continuousKeys_(continuousKeys) {}
|
||||||
|
|
||||||
|
|
|
@ -47,9 +47,6 @@ class GTSAM_EXPORT HybridFactor : public Factor {
|
||||||
bool isContinuous_ = false;
|
bool isContinuous_ = false;
|
||||||
bool isHybrid_ = false;
|
bool isHybrid_ = false;
|
||||||
|
|
||||||
// TODO(Varun) remove
|
|
||||||
size_t nrContinuous_ = 0;
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
// Set of DiscreteKeys for this factor.
|
// Set of DiscreteKeys for this factor.
|
||||||
DiscreteKeys discreteKeys_;
|
DiscreteKeys discreteKeys_;
|
||||||
|
|
|
@ -31,9 +31,7 @@ template class EliminatableClusterTree<HybridBayesTree,
|
||||||
template class JunctionTree<HybridBayesTree, HybridGaussianFactorGraph>;
|
template class JunctionTree<HybridBayesTree, HybridGaussianFactorGraph>;
|
||||||
|
|
||||||
struct HybridConstructorTraversalData {
|
struct HybridConstructorTraversalData {
|
||||||
typedef
|
typedef HybridJunctionTree::Node Node;
|
||||||
typename JunctionTree<HybridBayesTree, HybridGaussianFactorGraph>::Node
|
|
||||||
Node;
|
|
||||||
typedef
|
typedef
|
||||||
typename JunctionTree<HybridBayesTree,
|
typename JunctionTree<HybridBayesTree,
|
||||||
HybridGaussianFactorGraph>::sharedNode sharedNode;
|
HybridGaussianFactorGraph>::sharedNode sharedNode;
|
||||||
|
@ -62,6 +60,7 @@ struct HybridConstructorTraversalData {
|
||||||
data.junctionTreeNode = boost::make_shared<Node>(node->key, node->factors);
|
data.junctionTreeNode = boost::make_shared<Node>(node->key, node->factors);
|
||||||
parentData.junctionTreeNode->addChild(data.junctionTreeNode);
|
parentData.junctionTreeNode->addChild(data.junctionTreeNode);
|
||||||
|
|
||||||
|
// Add all the discrete keys in the hybrid factors to the current data
|
||||||
for (HybridFactor::shared_ptr& f : node->factors) {
|
for (HybridFactor::shared_ptr& f : node->factors) {
|
||||||
for (auto& k : f->discreteKeys()) {
|
for (auto& k : f->discreteKeys()) {
|
||||||
data.discreteKeys.insert(k.first);
|
data.discreteKeys.insert(k.first);
|
||||||
|
@ -72,8 +71,8 @@ struct HybridConstructorTraversalData {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Post-order visitor function
|
// Post-order visitor function
|
||||||
static void ConstructorTraversalVisitorPostAlg2(
|
static void ConstructorTraversalVisitorPost(
|
||||||
const boost::shared_ptr<HybridEliminationTree::Node>& ETreeNode,
|
const boost::shared_ptr<HybridEliminationTree::Node>& node,
|
||||||
const HybridConstructorTraversalData& data) {
|
const HybridConstructorTraversalData& data) {
|
||||||
// In this post-order visitor, we combine the symbolic elimination results
|
// In this post-order visitor, we combine the symbolic elimination results
|
||||||
// from the elimination tree children and symbolically eliminate the current
|
// from the elimination tree children and symbolically eliminate the current
|
||||||
|
@ -86,15 +85,15 @@ struct HybridConstructorTraversalData {
|
||||||
|
|
||||||
// Do symbolic elimination for this node
|
// Do symbolic elimination for this node
|
||||||
SymbolicFactors symbolicFactors;
|
SymbolicFactors symbolicFactors;
|
||||||
symbolicFactors.reserve(ETreeNode->factors.size() +
|
symbolicFactors.reserve(node->factors.size() +
|
||||||
data.childSymbolicFactors.size());
|
data.childSymbolicFactors.size());
|
||||||
// Add ETree node factors
|
// Add ETree node factors
|
||||||
symbolicFactors += ETreeNode->factors;
|
symbolicFactors += node->factors;
|
||||||
// Add symbolic factors passed up from children
|
// Add symbolic factors passed up from children
|
||||||
symbolicFactors += data.childSymbolicFactors;
|
symbolicFactors += data.childSymbolicFactors;
|
||||||
|
|
||||||
Ordering keyAsOrdering;
|
Ordering keyAsOrdering;
|
||||||
keyAsOrdering.push_back(ETreeNode->key);
|
keyAsOrdering.push_back(node->key);
|
||||||
SymbolicConditional::shared_ptr conditional;
|
SymbolicConditional::shared_ptr conditional;
|
||||||
SymbolicFactor::shared_ptr separatorFactor;
|
SymbolicFactor::shared_ptr separatorFactor;
|
||||||
boost::tie(conditional, separatorFactor) =
|
boost::tie(conditional, separatorFactor) =
|
||||||
|
@ -105,19 +104,19 @@ struct HybridConstructorTraversalData {
|
||||||
data.parentData->childSymbolicFactors.push_back(separatorFactor);
|
data.parentData->childSymbolicFactors.push_back(separatorFactor);
|
||||||
data.parentData->discreteKeys.merge(data.discreteKeys);
|
data.parentData->discreteKeys.merge(data.discreteKeys);
|
||||||
|
|
||||||
sharedNode node = data.junctionTreeNode;
|
sharedNode jt_node = data.junctionTreeNode;
|
||||||
const FastVector<SymbolicConditional::shared_ptr>& childConditionals =
|
const FastVector<SymbolicConditional::shared_ptr>& childConditionals =
|
||||||
data.childSymbolicConditionals;
|
data.childSymbolicConditionals;
|
||||||
node->problemSize_ = (int)(conditional->size() * symbolicFactors.size());
|
jt_node->problemSize_ = (int)(conditional->size() * symbolicFactors.size());
|
||||||
|
|
||||||
// Merge our children if they are in our clique - if our conditional has
|
// Merge our children if they are in our clique - if our conditional has
|
||||||
// exactly one fewer parent than our child's conditional.
|
// exactly one fewer parent than our child's conditional.
|
||||||
const size_t nrParents = conditional->nrParents();
|
const size_t nrParents = conditional->nrParents();
|
||||||
const size_t nrChildren = node->nrChildren();
|
const size_t nrChildren = jt_node->nrChildren();
|
||||||
assert(childConditionals.size() == nrChildren);
|
assert(childConditionals.size() == nrChildren);
|
||||||
|
|
||||||
// decide which children to merge, as index into children
|
// decide which children to merge, as index into children
|
||||||
std::vector<size_t> nrChildrenFrontals = node->nrFrontalsOfChildren();
|
std::vector<size_t> nrChildrenFrontals = jt_node->nrFrontalsOfChildren();
|
||||||
std::vector<bool> merge(nrChildren, false);
|
std::vector<bool> merge(nrChildren, false);
|
||||||
size_t nrFrontals = 1;
|
size_t nrFrontals = 1;
|
||||||
for (size_t i = 0; i < nrChildren; i++) {
|
for (size_t i = 0; i < nrChildren; i++) {
|
||||||
|
@ -137,7 +136,7 @@ struct HybridConstructorTraversalData {
|
||||||
}
|
}
|
||||||
|
|
||||||
// now really merge
|
// now really merge
|
||||||
node->mergeChildren(merge);
|
jt_node->mergeChildren(merge);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -161,7 +160,7 @@ HybridJunctionTree::HybridJunctionTree(
|
||||||
// the junction tree roots
|
// the junction tree roots
|
||||||
treeTraversal::DepthFirstForest(eliminationTree, rootData,
|
treeTraversal::DepthFirstForest(eliminationTree, rootData,
|
||||||
Data::ConstructorTraversalVisitorPre,
|
Data::ConstructorTraversalVisitorPre,
|
||||||
Data::ConstructorTraversalVisitorPostAlg2);
|
Data::ConstructorTraversalVisitorPost);
|
||||||
|
|
||||||
// Assign roots from the dummy node
|
// Assign roots from the dummy node
|
||||||
this->addChildrenAsRoots(rootData.junctionTreeNode);
|
this->addChildrenAsRoots(rootData.junctionTreeNode);
|
||||||
|
|
|
@ -128,9 +128,6 @@ struct Switching {
|
||||||
/// Create with given number of time steps.
|
/// Create with given number of time steps.
|
||||||
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1)
|
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1)
|
||||||
: K(K) {
|
: K(K) {
|
||||||
using symbol_shorthand::M;
|
|
||||||
using symbol_shorthand::X;
|
|
||||||
|
|
||||||
// Create DiscreteKeys for binary K modes, modes[0] will not be used.
|
// Create DiscreteKeys for binary K modes, modes[0] will not be used.
|
||||||
for (size_t k = 0; k <= K; k++) {
|
for (size_t k = 0; k <= K; k++) {
|
||||||
modes.emplace_back(M(k), 2);
|
modes.emplace_back(M(k), 2);
|
||||||
|
@ -175,9 +172,6 @@ struct Switching {
|
||||||
// Create motion models for a given time step
|
// Create motion models for a given time step
|
||||||
static std::vector<MotionModel::shared_ptr> motionModels(size_t k,
|
static std::vector<MotionModel::shared_ptr> motionModels(size_t k,
|
||||||
double sigma = 1.0) {
|
double sigma = 1.0) {
|
||||||
using symbol_shorthand::M;
|
|
||||||
using symbol_shorthand::X;
|
|
||||||
|
|
||||||
auto noise_model = noiseModel::Isotropic::Sigma(1, sigma);
|
auto noise_model = noiseModel::Isotropic::Sigma(1, sigma);
|
||||||
auto still =
|
auto still =
|
||||||
boost::make_shared<MotionModel>(X(k), X(k + 1), 0.0, noise_model),
|
boost::make_shared<MotionModel>(X(k), X(k + 1), 0.0, noise_model),
|
||||||
|
|
|
@ -257,14 +257,6 @@ TEST(GaussianElimination, Eliminate_x1) {
|
||||||
// Add first hybrid factor
|
// Add first hybrid factor
|
||||||
factors.push_back(self.linearizedFactorGraph[1]);
|
factors.push_back(self.linearizedFactorGraph[1]);
|
||||||
|
|
||||||
// TODO(Varun) remove this block since sum is no longer exposed.
|
|
||||||
// // Check that sum works:
|
|
||||||
// auto sum = factors.sum();
|
|
||||||
// Assignment<Key> mode;
|
|
||||||
// mode[M(1)] = 1;
|
|
||||||
// auto actual = sum(mode); // Selects one of 2 modes.
|
|
||||||
// EXPECT_LONGS_EQUAL(2, actual.size()); // Prior and motion model.
|
|
||||||
|
|
||||||
// Eliminate x1
|
// Eliminate x1
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
ordering += X(1);
|
ordering += X(1);
|
||||||
|
@ -289,15 +281,6 @@ TEST(HybridsGaussianElimination, Eliminate_x2) {
|
||||||
factors.push_back(self.linearizedFactorGraph[1]); // involves m1
|
factors.push_back(self.linearizedFactorGraph[1]); // involves m1
|
||||||
factors.push_back(self.linearizedFactorGraph[2]); // involves m2
|
factors.push_back(self.linearizedFactorGraph[2]); // involves m2
|
||||||
|
|
||||||
// TODO(Varun) remove this block since sum is no longer exposed.
|
|
||||||
// // Check that sum works:
|
|
||||||
// auto sum = factors.sum();
|
|
||||||
// Assignment<Key> mode;
|
|
||||||
// mode[M(1)] = 0;
|
|
||||||
// mode[M(2)] = 1;
|
|
||||||
// auto actual = sum(mode); // Selects one of 4 mode
|
|
||||||
// combinations. EXPECT_LONGS_EQUAL(2, actual.size()); // 2 motion models.
|
|
||||||
|
|
||||||
// Eliminate x2
|
// Eliminate x2
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
ordering += X(2);
|
ordering += X(2);
|
||||||
|
@ -364,51 +347,10 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
|
||||||
CHECK(discreteFactor);
|
CHECK(discreteFactor);
|
||||||
EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size());
|
EXPECT_LONGS_EQUAL(1, discreteFactor->discreteKeys().size());
|
||||||
EXPECT(discreteFactor->root_->isLeaf() == false);
|
EXPECT(discreteFactor->root_->isLeaf() == false);
|
||||||
|
|
||||||
|
//TODO(Varun) Test emplace_discrete
|
||||||
}
|
}
|
||||||
|
|
||||||
// /*
|
|
||||||
// ****************************************************************************/
|
|
||||||
// /// Test the toDecisionTreeFactor method
|
|
||||||
// TEST(HybridFactorGraph, ToDecisionTreeFactor) {
|
|
||||||
// size_t K = 3;
|
|
||||||
|
|
||||||
// // Provide tight sigma values so that the errors are visibly different.
|
|
||||||
// double between_sigma = 5e-8, prior_sigma = 1e-7;
|
|
||||||
|
|
||||||
// Switching self(K, between_sigma, prior_sigma);
|
|
||||||
|
|
||||||
// // Clear out discrete factors since sum() cannot hanldle those
|
|
||||||
// HybridGaussianFactorGraph linearizedFactorGraph(
|
|
||||||
// self.linearizedFactorGraph.gaussianGraph(), DiscreteFactorGraph(),
|
|
||||||
// self.linearizedFactorGraph.dcGraph());
|
|
||||||
|
|
||||||
// auto decisionTreeFactor = linearizedFactorGraph.toDecisionTreeFactor();
|
|
||||||
|
|
||||||
// auto allAssignments =
|
|
||||||
// DiscreteValues::CartesianProduct(linearizedFactorGraph.discreteKeys());
|
|
||||||
|
|
||||||
// // Get the error of the discrete assignment m1=0, m2=1.
|
|
||||||
// double actual = (*decisionTreeFactor)(allAssignments[1]);
|
|
||||||
|
|
||||||
// /********************************************/
|
|
||||||
// // Create equivalent factor graph for m1=0, m2=1
|
|
||||||
// GaussianFactorGraph graph = linearizedFactorGraph.gaussianGraph();
|
|
||||||
|
|
||||||
// for (auto &p : linearizedFactorGraph.dcGraph()) {
|
|
||||||
// if (auto mixture =
|
|
||||||
// boost::dynamic_pointer_cast<DCGaussianMixtureFactor>(p)) {
|
|
||||||
// graph.add((*mixture)(allAssignments[1]));
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// VectorValues values = graph.optimize();
|
|
||||||
// double expected = graph.probPrime(values);
|
|
||||||
// /********************************************/
|
|
||||||
// EXPECT_DOUBLES_EQUAL(expected, actual, 1e-12);
|
|
||||||
// // REGRESSION:
|
|
||||||
// EXPECT_DOUBLES_EQUAL(0.6125, actual, 1e-4);
|
|
||||||
// }
|
|
||||||
|
|
||||||
/****************************************************************************
|
/****************************************************************************
|
||||||
* Test partial elimination
|
* Test partial elimination
|
||||||
*/
|
*/
|
||||||
|
@ -428,7 +370,6 @@ TEST(HybridFactorGraph, Partial_Elimination) {
|
||||||
linearizedFactorGraph.eliminatePartialSequential(ordering);
|
linearizedFactorGraph.eliminatePartialSequential(ordering);
|
||||||
|
|
||||||
CHECK(hybridBayesNet);
|
CHECK(hybridBayesNet);
|
||||||
// GTSAM_PRINT(*hybridBayesNet); // HybridBayesNet
|
|
||||||
EXPECT_LONGS_EQUAL(3, hybridBayesNet->size());
|
EXPECT_LONGS_EQUAL(3, hybridBayesNet->size());
|
||||||
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
|
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
|
||||||
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
|
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
|
||||||
|
@ -438,7 +379,6 @@ TEST(HybridFactorGraph, Partial_Elimination) {
|
||||||
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
|
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
|
||||||
|
|
||||||
CHECK(remainingFactorGraph);
|
CHECK(remainingFactorGraph);
|
||||||
// GTSAM_PRINT(*remainingFactorGraph); // HybridFactorGraph
|
|
||||||
EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size());
|
EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size());
|
||||||
EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)}));
|
EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)}));
|
||||||
EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)}));
|
EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)}));
|
||||||
|
@ -721,13 +661,8 @@ TEST(HybridFactorGraph, DefaultDecisionTree) {
|
||||||
moving = boost::make_shared<PlanarMotionModel>(X(0), X(1), odometry,
|
moving = boost::make_shared<PlanarMotionModel>(X(0), X(1), odometry,
|
||||||
noise_model);
|
noise_model);
|
||||||
std::vector<PlanarMotionModel::shared_ptr> motion_models = {still, moving};
|
std::vector<PlanarMotionModel::shared_ptr> motion_models = {still, moving};
|
||||||
// TODO(Varun) Make a templated constructor for MixtureFactor which does this?
|
|
||||||
std::vector<NonlinearFactor::shared_ptr> components;
|
|
||||||
for (auto&& f : motion_models) {
|
|
||||||
components.push_back(boost::dynamic_pointer_cast<NonlinearFactor>(f));
|
|
||||||
}
|
|
||||||
fg.emplace_hybrid<MixtureFactor>(
|
fg.emplace_hybrid<MixtureFactor>(
|
||||||
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
|
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, motion_models);
|
||||||
|
|
||||||
// Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
|
// Add Range-Bearing measurements to from X0 to L0 and X1 to L1.
|
||||||
// create a noise model for the landmark measurements
|
// create a noise model for the landmark measurements
|
||||||
|
|
|
@ -33,7 +33,7 @@ struct ConstructorTraversalData {
|
||||||
typedef typename JunctionTree<BAYESTREE, GRAPH>::sharedNode sharedNode;
|
typedef typename JunctionTree<BAYESTREE, GRAPH>::sharedNode sharedNode;
|
||||||
|
|
||||||
ConstructorTraversalData* const parentData;
|
ConstructorTraversalData* const parentData;
|
||||||
sharedNode myJTNode;
|
sharedNode junctionTreeNode;
|
||||||
FastVector<SymbolicConditional::shared_ptr> childSymbolicConditionals;
|
FastVector<SymbolicConditional::shared_ptr> childSymbolicConditionals;
|
||||||
FastVector<SymbolicFactor::shared_ptr> childSymbolicFactors;
|
FastVector<SymbolicFactor::shared_ptr> childSymbolicFactors;
|
||||||
|
|
||||||
|
@ -53,8 +53,9 @@ struct ConstructorTraversalData {
|
||||||
// a traversal data structure with its own JT node, and create a child
|
// a traversal data structure with its own JT node, and create a child
|
||||||
// pointer in its parent.
|
// pointer in its parent.
|
||||||
ConstructorTraversalData myData = ConstructorTraversalData(&parentData);
|
ConstructorTraversalData myData = ConstructorTraversalData(&parentData);
|
||||||
myData.myJTNode = boost::make_shared<Node>(node->key, node->factors);
|
myData.junctionTreeNode =
|
||||||
parentData.myJTNode->addChild(myData.myJTNode);
|
boost::make_shared<Node>(node->key, node->factors);
|
||||||
|
parentData.junctionTreeNode->addChild(myData.junctionTreeNode);
|
||||||
return myData;
|
return myData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,7 +92,7 @@ struct ConstructorTraversalData {
|
||||||
myData.parentData->childSymbolicConditionals.push_back(myConditional);
|
myData.parentData->childSymbolicConditionals.push_back(myConditional);
|
||||||
myData.parentData->childSymbolicFactors.push_back(mySeparatorFactor);
|
myData.parentData->childSymbolicFactors.push_back(mySeparatorFactor);
|
||||||
|
|
||||||
sharedNode node = myData.myJTNode;
|
sharedNode node = myData.junctionTreeNode;
|
||||||
const FastVector<SymbolicConditional::shared_ptr>& childConditionals =
|
const FastVector<SymbolicConditional::shared_ptr>& childConditionals =
|
||||||
myData.childSymbolicConditionals;
|
myData.childSymbolicConditionals;
|
||||||
node->problemSize_ = (int) (myConditional->size() * symbolicFactors.size());
|
node->problemSize_ = (int) (myConditional->size() * symbolicFactors.size());
|
||||||
|
@ -138,14 +139,14 @@ JunctionTree<BAYESTREE, GRAPH>::JunctionTree(
|
||||||
typedef typename EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>::Node ETreeNode;
|
typedef typename EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>::Node ETreeNode;
|
||||||
typedef ConstructorTraversalData<BAYESTREE, GRAPH, ETreeNode> Data;
|
typedef ConstructorTraversalData<BAYESTREE, GRAPH, ETreeNode> Data;
|
||||||
Data rootData(0);
|
Data rootData(0);
|
||||||
rootData.myJTNode = boost::make_shared<typename Base::Node>(); // Make a dummy node to gather
|
// Make a dummy node to gather the junction tree roots
|
||||||
// the junction tree roots
|
rootData.junctionTreeNode = boost::make_shared<typename Base::Node>();
|
||||||
treeTraversal::DepthFirstForest(eliminationTree, rootData,
|
treeTraversal::DepthFirstForest(eliminationTree, rootData,
|
||||||
Data::ConstructorTraversalVisitorPre,
|
Data::ConstructorTraversalVisitorPre,
|
||||||
Data::ConstructorTraversalVisitorPostAlg2);
|
Data::ConstructorTraversalVisitorPostAlg2);
|
||||||
|
|
||||||
// Assign roots from the dummy node
|
// Assign roots from the dummy node
|
||||||
this->addChildrenAsRoots(rootData.myJTNode);
|
this->addChildrenAsRoots(rootData.junctionTreeNode);
|
||||||
|
|
||||||
// Transfer remaining factors from elimination tree
|
// Transfer remaining factors from elimination tree
|
||||||
Base::remainingFactors_ = eliminationTree.remainingFactors();
|
Base::remainingFactors_ = eliminationTree.remainingFactors();
|
||||||
|
|
Loading…
Reference in New Issue