Merge pull request #156 from borglab/fix/boost-optional
boost::optional<const Ordering &>release/4.3a0
commit
ed7f949385
|
@ -46,16 +46,20 @@ DiscreteConditional::DiscreteConditional(const size_t nrFrontals,
|
||||||
|
|
||||||
/* ******************************************************************************** */
|
/* ******************************************************************************** */
|
||||||
DiscreteConditional::DiscreteConditional(const DecisionTreeFactor& joint,
|
DiscreteConditional::DiscreteConditional(const DecisionTreeFactor& joint,
|
||||||
const DecisionTreeFactor& marginal, const boost::optional<Ordering>& orderedKeys) :
|
const DecisionTreeFactor& marginal) :
|
||||||
BaseFactor(
|
BaseFactor(
|
||||||
ISDEBUG("DiscreteConditional::COUNT") ? joint : joint / marginal), BaseConditional(
|
ISDEBUG("DiscreteConditional::COUNT") ? joint : joint / marginal), BaseConditional(
|
||||||
joint.size()-marginal.size()) {
|
joint.size()-marginal.size()) {
|
||||||
if (ISDEBUG("DiscreteConditional::DiscreteConditional"))
|
if (ISDEBUG("DiscreteConditional::DiscreteConditional"))
|
||||||
cout << (firstFrontalKey()) << endl; //TODO Print all keys
|
cout << (firstFrontalKey()) << endl; //TODO Print all keys
|
||||||
if (orderedKeys) {
|
}
|
||||||
keys_.clear();
|
|
||||||
keys_.insert(keys_.end(), orderedKeys->begin(), orderedKeys->end());
|
/* ******************************************************************************** */
|
||||||
}
|
DiscreteConditional::DiscreteConditional(const DecisionTreeFactor& joint,
|
||||||
|
const DecisionTreeFactor& marginal, const Ordering& orderedKeys) :
|
||||||
|
DiscreteConditional(joint, marginal) {
|
||||||
|
keys_.clear();
|
||||||
|
keys_.insert(keys_.end(), orderedKeys.begin(), orderedKeys.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ******************************************************************************** */
|
/* ******************************************************************************** */
|
||||||
|
|
|
@ -60,7 +60,11 @@ public:
|
||||||
|
|
||||||
/** construct P(X|Y)=P(X,Y)/P(Y) from P(X,Y) and P(Y) */
|
/** construct P(X|Y)=P(X,Y)/P(Y) from P(X,Y) and P(Y) */
|
||||||
DiscreteConditional(const DecisionTreeFactor& joint,
|
DiscreteConditional(const DecisionTreeFactor& joint,
|
||||||
const DecisionTreeFactor& marginal, const boost::optional<Ordering>& orderedKeys = boost::none);
|
const DecisionTreeFactor& marginal);
|
||||||
|
|
||||||
|
/** construct P(X|Y)=P(X,Y)/P(Y) from P(X,Y) and P(Y) */
|
||||||
|
DiscreteConditional(const DecisionTreeFactor& joint,
|
||||||
|
const DecisionTreeFactor& marginal, const Ordering& orderedKeys);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Combine several conditional into a single one.
|
* Combine several conditional into a single one.
|
||||||
|
|
|
@ -262,7 +262,7 @@ namespace gtsam {
|
||||||
|
|
||||||
// Now, marginalize out everything that is not variable j
|
// Now, marginalize out everything that is not variable j
|
||||||
BayesNetType marginalBN = *cliqueMarginal.marginalMultifrontalBayesNet(
|
BayesNetType marginalBN = *cliqueMarginal.marginalMultifrontalBayesNet(
|
||||||
Ordering(cref_list_of<1,Key>(j)), boost::none, function);
|
Ordering(cref_list_of<1,Key>(j)), function);
|
||||||
|
|
||||||
// The Bayes net should contain only one conditional for variable j, so return it
|
// The Bayes net should contain only one conditional for variable j, so return it
|
||||||
return marginalBN.front();
|
return marginalBN.front();
|
||||||
|
@ -383,7 +383,7 @@ namespace gtsam {
|
||||||
}
|
}
|
||||||
|
|
||||||
// now, marginalize out everything that is not variable j1 or j2
|
// now, marginalize out everything that is not variable j1 or j2
|
||||||
return p_BC1C2.marginalMultifrontalBayesNet(Ordering(cref_list_of<2,Key>(j1)(j2)), boost::none, function);
|
return p_BC1C2.marginalMultifrontalBayesNet(Ordering(cref_list_of<2,Key>(j1)(j2)), function);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -171,7 +171,7 @@ namespace gtsam {
|
||||||
|
|
||||||
// The variables we want to keepSet are exactly the ones in S
|
// The variables we want to keepSet are exactly the ones in S
|
||||||
KeyVector indicesS(this->conditional()->beginParents(), this->conditional()->endParents());
|
KeyVector indicesS(this->conditional()->beginParents(), this->conditional()->endParents());
|
||||||
cachedSeparatorMarginal_ = *p_Cp.marginalMultifrontalBayesNet(Ordering(indicesS), boost::none, function);
|
cachedSeparatorMarginal_ = *p_Cp.marginalMultifrontalBayesNet(Ordering(indicesS), function);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,33 +28,19 @@ namespace gtsam {
|
||||||
template<class FACTORGRAPH>
|
template<class FACTORGRAPH>
|
||||||
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType>
|
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType>
|
||||||
EliminateableFactorGraph<FACTORGRAPH>::eliminateSequential(
|
EliminateableFactorGraph<FACTORGRAPH>::eliminateSequential(
|
||||||
OptionalOrdering ordering, const Eliminate& function,
|
OptionalOrderingType orderingType, const Eliminate& function,
|
||||||
OptionalVariableIndex variableIndex, OptionalOrderingType orderingType) const
|
OptionalVariableIndex variableIndex) const {
|
||||||
{
|
if(!variableIndex) {
|
||||||
if(ordering && variableIndex) {
|
|
||||||
gttic(eliminateSequential);
|
|
||||||
// Do elimination
|
|
||||||
EliminationTreeType etree(asDerived(), *variableIndex, *ordering);
|
|
||||||
boost::shared_ptr<BayesNetType> bayesNet;
|
|
||||||
boost::shared_ptr<FactorGraphType> factorGraph;
|
|
||||||
boost::tie(bayesNet,factorGraph) = etree.eliminate(function);
|
|
||||||
// If any factors are remaining, the ordering was incomplete
|
|
||||||
if(!factorGraph->empty())
|
|
||||||
throw InconsistentEliminationRequested();
|
|
||||||
// Return the Bayes net
|
|
||||||
return bayesNet;
|
|
||||||
}
|
|
||||||
else if(!variableIndex) {
|
|
||||||
// If no VariableIndex provided, compute one and call this function again IMPORTANT: we check
|
// If no VariableIndex provided, compute one and call this function again IMPORTANT: we check
|
||||||
// for no variable index first so that it's always computed if we need to call COLAMD because
|
// for no variable index first so that it's always computed if we need to call COLAMD because
|
||||||
// no Ordering is provided.
|
// no Ordering is provided. When removing optional from VariableIndex, create VariableIndex
|
||||||
|
// before creating ordering.
|
||||||
VariableIndex computedVariableIndex(asDerived());
|
VariableIndex computedVariableIndex(asDerived());
|
||||||
return eliminateSequential(ordering, function, computedVariableIndex, orderingType);
|
return eliminateSequential(function, computedVariableIndex, orderingType);
|
||||||
}
|
}
|
||||||
else /*if(!ordering)*/ {
|
else {
|
||||||
// If no Ordering provided, compute one and call this function again. We are guaranteed to
|
// Compute an ordering and call this function again. We are guaranteed to have a
|
||||||
// have a VariableIndex already here because we computed one if needed in the previous 'else'
|
// VariableIndex already here because we computed one if needed in the previous 'if' block.
|
||||||
// block.
|
|
||||||
if (orderingType == Ordering::METIS) {
|
if (orderingType == Ordering::METIS) {
|
||||||
Ordering computedOrdering = Ordering::Metis(asDerived());
|
Ordering computedOrdering = Ordering::Metis(asDerived());
|
||||||
return eliminateSequential(computedOrdering, function, variableIndex, orderingType);
|
return eliminateSequential(computedOrdering, function, variableIndex, orderingType);
|
||||||
|
@ -65,17 +51,75 @@ namespace gtsam {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
template<class FACTORGRAPH>
|
||||||
|
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType>
|
||||||
|
EliminateableFactorGraph<FACTORGRAPH>::eliminateSequential(
|
||||||
|
const Ordering& ordering, const Eliminate& function,
|
||||||
|
OptionalVariableIndex variableIndex) const
|
||||||
|
{
|
||||||
|
if(!variableIndex) {
|
||||||
|
// If no VariableIndex provided, compute one and call this function again
|
||||||
|
VariableIndex computedVariableIndex(asDerived());
|
||||||
|
return eliminateSequential(ordering, function, computedVariableIndex);
|
||||||
|
} else {
|
||||||
|
gttic(eliminateSequential);
|
||||||
|
// Do elimination
|
||||||
|
EliminationTreeType etree(asDerived(), *variableIndex, ordering);
|
||||||
|
boost::shared_ptr<BayesNetType> bayesNet;
|
||||||
|
boost::shared_ptr<FactorGraphType> factorGraph;
|
||||||
|
boost::tie(bayesNet,factorGraph) = etree.eliminate(function);
|
||||||
|
// If any factors are remaining, the ordering was incomplete
|
||||||
|
if(!factorGraph->empty())
|
||||||
|
throw InconsistentEliminationRequested();
|
||||||
|
// Return the Bayes net
|
||||||
|
return bayesNet;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
template<class FACTORGRAPH>
|
template<class FACTORGRAPH>
|
||||||
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType>
|
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType>
|
||||||
EliminateableFactorGraph<FACTORGRAPH>::eliminateMultifrontal(
|
EliminateableFactorGraph<FACTORGRAPH>::eliminateMultifrontal(
|
||||||
OptionalOrdering ordering, const Eliminate& function,
|
OptionalOrderingType orderingType, const Eliminate& function,
|
||||||
OptionalVariableIndex variableIndex, OptionalOrderingType orderingType) const
|
OptionalVariableIndex variableIndex) const
|
||||||
{
|
{
|
||||||
if(ordering && variableIndex) {
|
if(!variableIndex) {
|
||||||
|
// If no VariableIndex provided, compute one and call this function again IMPORTANT: we check
|
||||||
|
// for no variable index first so that it's always computed if we need to call COLAMD because
|
||||||
|
// no Ordering is provided. When removing optional from VariableIndex, create VariableIndex
|
||||||
|
// before creating ordering.
|
||||||
|
VariableIndex computedVariableIndex(asDerived());
|
||||||
|
return eliminateMultifrontal(function, computedVariableIndex, orderingType);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Compute an ordering and call this function again. We are guaranteed to have a
|
||||||
|
// VariableIndex already here because we computed one if needed in the previous 'if' block.
|
||||||
|
if (orderingType == Ordering::METIS) {
|
||||||
|
Ordering computedOrdering = Ordering::Metis(asDerived());
|
||||||
|
return eliminateMultifrontal(computedOrdering, function, variableIndex, orderingType);
|
||||||
|
} else {
|
||||||
|
Ordering computedOrdering = Ordering::Colamd(*variableIndex);
|
||||||
|
return eliminateMultifrontal(computedOrdering, function, variableIndex, orderingType);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
template<class FACTORGRAPH>
|
||||||
|
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType>
|
||||||
|
EliminateableFactorGraph<FACTORGRAPH>::eliminateMultifrontal(
|
||||||
|
const Ordering& ordering, const Eliminate& function,
|
||||||
|
OptionalVariableIndex variableIndex) const
|
||||||
|
{
|
||||||
|
if(!variableIndex) {
|
||||||
|
// If no VariableIndex provided, compute one and call this function again
|
||||||
|
VariableIndex computedVariableIndex(asDerived());
|
||||||
|
return eliminateMultifrontal(ordering, function, computedVariableIndex);
|
||||||
|
} else {
|
||||||
gttic(eliminateMultifrontal);
|
gttic(eliminateMultifrontal);
|
||||||
// Do elimination with given ordering
|
// Do elimination with given ordering
|
||||||
EliminationTreeType etree(asDerived(), *variableIndex, *ordering);
|
EliminationTreeType etree(asDerived(), *variableIndex, ordering);
|
||||||
JunctionTreeType junctionTree(etree);
|
JunctionTreeType junctionTree(etree);
|
||||||
boost::shared_ptr<BayesTreeType> bayesTree;
|
boost::shared_ptr<BayesTreeType> bayesTree;
|
||||||
boost::shared_ptr<FactorGraphType> factorGraph;
|
boost::shared_ptr<FactorGraphType> factorGraph;
|
||||||
|
@ -86,25 +130,6 @@ namespace gtsam {
|
||||||
// Return the Bayes tree
|
// Return the Bayes tree
|
||||||
return bayesTree;
|
return bayesTree;
|
||||||
}
|
}
|
||||||
else if(!variableIndex) {
|
|
||||||
// If no VariableIndex provided, compute one and call this function again IMPORTANT: we check
|
|
||||||
// for no variable index first so that it's always computed if we need to call COLAMD because
|
|
||||||
// no Ordering is provided.
|
|
||||||
VariableIndex computedVariableIndex(asDerived());
|
|
||||||
return eliminateMultifrontal(ordering, function, computedVariableIndex, orderingType);
|
|
||||||
}
|
|
||||||
else /*if(!ordering)*/ {
|
|
||||||
// If no Ordering provided, compute one and call this function again. We are guaranteed to
|
|
||||||
// have a VariableIndex already here because we computed one if needed in the previous 'else'
|
|
||||||
// block.
|
|
||||||
if (orderingType == Ordering::METIS) {
|
|
||||||
Ordering computedOrdering = Ordering::Metis(asDerived());
|
|
||||||
return eliminateMultifrontal(computedOrdering, function, variableIndex, orderingType);
|
|
||||||
} else {
|
|
||||||
Ordering computedOrdering = Ordering::Colamd(*variableIndex);
|
|
||||||
return eliminateMultifrontal(computedOrdering, function, variableIndex, orderingType);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
@ -191,57 +216,65 @@ namespace gtsam {
|
||||||
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType>
|
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType>
|
||||||
EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesNet(
|
EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesNet(
|
||||||
boost::variant<const Ordering&, const KeyVector&> variables,
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
OptionalOrdering marginalizedVariableOrdering,
|
|
||||||
const Eliminate& function, OptionalVariableIndex variableIndex) const
|
const Eliminate& function, OptionalVariableIndex variableIndex) const
|
||||||
{
|
{
|
||||||
if(variableIndex)
|
if(!variableIndex) {
|
||||||
{
|
// If no variable index is provided, compute one and call this function again
|
||||||
if(marginalizedVariableOrdering)
|
VariableIndex index(asDerived());
|
||||||
{
|
return marginalMultifrontalBayesNet(variables, function, index);
|
||||||
gttic(marginalMultifrontalBayesNet);
|
|
||||||
// An ordering was provided for the marginalized variables, so we can first eliminate them
|
|
||||||
// in the order requested.
|
|
||||||
boost::shared_ptr<BayesTreeType> bayesTree;
|
|
||||||
boost::shared_ptr<FactorGraphType> factorGraph;
|
|
||||||
boost::tie(bayesTree,factorGraph) =
|
|
||||||
eliminatePartialMultifrontal(*marginalizedVariableOrdering, function, *variableIndex);
|
|
||||||
|
|
||||||
if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables))
|
|
||||||
{
|
|
||||||
// An ordering was also provided for the unmarginalized variables, so we can also
|
|
||||||
// eliminate them in the order requested.
|
|
||||||
return factorGraph->eliminateSequential(*varsAsOrdering, function);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// No ordering was provided for the unmarginalized variables, so order them with COLAMD.
|
|
||||||
return factorGraph->eliminateSequential(boost::none, function);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// No ordering was provided for the marginalized variables, so order them using constrained
|
|
||||||
// COLAMD.
|
|
||||||
bool unmarginalizedAreOrdered = (boost::get<const Ordering&>(&variables) != 0);
|
|
||||||
const KeyVector* variablesOrOrdering =
|
|
||||||
unmarginalizedAreOrdered ?
|
|
||||||
boost::get<const Ordering&>(&variables) : boost::get<const KeyVector&>(&variables);
|
|
||||||
|
|
||||||
Ordering totalOrdering =
|
|
||||||
Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered);
|
|
||||||
|
|
||||||
// Split up ordering
|
|
||||||
const size_t nVars = variablesOrOrdering->size();
|
|
||||||
Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars);
|
|
||||||
Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end());
|
|
||||||
|
|
||||||
// Call this function again with the computed orderings
|
|
||||||
return marginalMultifrontalBayesNet(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
|
// No ordering was provided for the marginalized variables, so order them using constrained
|
||||||
|
// COLAMD.
|
||||||
|
bool unmarginalizedAreOrdered = (boost::get<const Ordering&>(&variables) != 0);
|
||||||
|
const KeyVector* variablesOrOrdering =
|
||||||
|
unmarginalizedAreOrdered ?
|
||||||
|
boost::get<const Ordering&>(&variables) : boost::get<const KeyVector&>(&variables);
|
||||||
|
|
||||||
|
Ordering totalOrdering =
|
||||||
|
Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered);
|
||||||
|
|
||||||
|
// Split up ordering
|
||||||
|
const size_t nVars = variablesOrOrdering->size();
|
||||||
|
Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars);
|
||||||
|
Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end());
|
||||||
|
|
||||||
|
// Call this function again with the computed orderings
|
||||||
|
return marginalMultifrontalBayesNet(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
template<class FACTORGRAPH>
|
||||||
|
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType>
|
||||||
|
EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesNet(
|
||||||
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
|
const Ordering& marginalizedVariableOrdering,
|
||||||
|
const Eliminate& function, OptionalVariableIndex variableIndex) const
|
||||||
|
{
|
||||||
|
if(!variableIndex) {
|
||||||
// If no variable index is provided, compute one and call this function again
|
// If no variable index is provided, compute one and call this function again
|
||||||
VariableIndex index(asDerived());
|
VariableIndex index(asDerived());
|
||||||
return marginalMultifrontalBayesNet(variables, marginalizedVariableOrdering, function, index);
|
return marginalMultifrontalBayesNet(variables, marginalizedVariableOrdering, function, index);
|
||||||
|
} else {
|
||||||
|
gttic(marginalMultifrontalBayesNet);
|
||||||
|
// An ordering was provided for the marginalized variables, so we can first eliminate them
|
||||||
|
// in the order requested.
|
||||||
|
boost::shared_ptr<BayesTreeType> bayesTree;
|
||||||
|
boost::shared_ptr<FactorGraphType> factorGraph;
|
||||||
|
boost::tie(bayesTree,factorGraph) =
|
||||||
|
eliminatePartialMultifrontal(marginalizedVariableOrdering, function, *variableIndex);
|
||||||
|
|
||||||
|
if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables))
|
||||||
|
{
|
||||||
|
// An ordering was also provided for the unmarginalized variables, so we can also
|
||||||
|
// eliminate them in the order requested.
|
||||||
|
return factorGraph->eliminateSequential(*varsAsOrdering, function);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// No ordering was provided for the unmarginalized variables, so order them with COLAMD.
|
||||||
|
return factorGraph->eliminateSequential(function);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -250,57 +283,65 @@ namespace gtsam {
|
||||||
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType>
|
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType>
|
||||||
EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesTree(
|
EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesTree(
|
||||||
boost::variant<const Ordering&, const KeyVector&> variables,
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
OptionalOrdering marginalizedVariableOrdering,
|
|
||||||
const Eliminate& function, OptionalVariableIndex variableIndex) const
|
const Eliminate& function, OptionalVariableIndex variableIndex) const
|
||||||
{
|
{
|
||||||
if(variableIndex)
|
if(!variableIndex) {
|
||||||
{
|
// If no variable index is provided, compute one and call this function again
|
||||||
if(marginalizedVariableOrdering)
|
VariableIndex computedVariableIndex(asDerived());
|
||||||
{
|
return marginalMultifrontalBayesTree(variables, function, computedVariableIndex);
|
||||||
gttic(marginalMultifrontalBayesTree);
|
|
||||||
// An ordering was provided for the marginalized variables, so we can first eliminate them
|
|
||||||
// in the order requested.
|
|
||||||
boost::shared_ptr<BayesTreeType> bayesTree;
|
|
||||||
boost::shared_ptr<FactorGraphType> factorGraph;
|
|
||||||
boost::tie(bayesTree,factorGraph) =
|
|
||||||
eliminatePartialMultifrontal(*marginalizedVariableOrdering, function, *variableIndex);
|
|
||||||
|
|
||||||
if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables))
|
|
||||||
{
|
|
||||||
// An ordering was also provided for the unmarginalized variables, so we can also
|
|
||||||
// eliminate them in the order requested.
|
|
||||||
return factorGraph->eliminateMultifrontal(*varsAsOrdering, function);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// No ordering was provided for the unmarginalized variables, so order them with COLAMD.
|
|
||||||
return factorGraph->eliminateMultifrontal(boost::none, function);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// No ordering was provided for the marginalized variables, so order them using constrained
|
|
||||||
// COLAMD.
|
|
||||||
bool unmarginalizedAreOrdered = (boost::get<const Ordering&>(&variables) != 0);
|
|
||||||
const KeyVector* variablesOrOrdering =
|
|
||||||
unmarginalizedAreOrdered ?
|
|
||||||
boost::get<const Ordering&>(&variables) : boost::get<const KeyVector&>(&variables);
|
|
||||||
|
|
||||||
Ordering totalOrdering =
|
|
||||||
Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered);
|
|
||||||
|
|
||||||
// Split up ordering
|
|
||||||
const size_t nVars = variablesOrOrdering->size();
|
|
||||||
Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars);
|
|
||||||
Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end());
|
|
||||||
|
|
||||||
// Call this function again with the computed orderings
|
|
||||||
return marginalMultifrontalBayesTree(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
|
// No ordering was provided for the marginalized variables, so order them using constrained
|
||||||
|
// COLAMD.
|
||||||
|
bool unmarginalizedAreOrdered = (boost::get<const Ordering&>(&variables) != 0);
|
||||||
|
const KeyVector* variablesOrOrdering =
|
||||||
|
unmarginalizedAreOrdered ?
|
||||||
|
boost::get<const Ordering&>(&variables) : boost::get<const KeyVector&>(&variables);
|
||||||
|
|
||||||
|
Ordering totalOrdering =
|
||||||
|
Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered);
|
||||||
|
|
||||||
|
// Split up ordering
|
||||||
|
const size_t nVars = variablesOrOrdering->size();
|
||||||
|
Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars);
|
||||||
|
Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end());
|
||||||
|
|
||||||
|
// Call this function again with the computed orderings
|
||||||
|
return marginalMultifrontalBayesTree(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
template<class FACTORGRAPH>
|
||||||
|
boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType>
|
||||||
|
EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesTree(
|
||||||
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
|
const Ordering& marginalizedVariableOrdering,
|
||||||
|
const Eliminate& function, OptionalVariableIndex variableIndex) const
|
||||||
|
{
|
||||||
|
if(!variableIndex) {
|
||||||
// If no variable index is provided, compute one and call this function again
|
// If no variable index is provided, compute one and call this function again
|
||||||
VariableIndex computedVariableIndex(asDerived());
|
VariableIndex computedVariableIndex(asDerived());
|
||||||
return marginalMultifrontalBayesTree(variables, marginalizedVariableOrdering, function, computedVariableIndex);
|
return marginalMultifrontalBayesTree(variables, marginalizedVariableOrdering, function, computedVariableIndex);
|
||||||
|
} else {
|
||||||
|
gttic(marginalMultifrontalBayesTree);
|
||||||
|
// An ordering was provided for the marginalized variables, so we can first eliminate them
|
||||||
|
// in the order requested.
|
||||||
|
boost::shared_ptr<BayesTreeType> bayesTree;
|
||||||
|
boost::shared_ptr<FactorGraphType> factorGraph;
|
||||||
|
boost::tie(bayesTree,factorGraph) =
|
||||||
|
eliminatePartialMultifrontal(marginalizedVariableOrdering, function, *variableIndex);
|
||||||
|
|
||||||
|
if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables))
|
||||||
|
{
|
||||||
|
// An ordering was also provided for the unmarginalized variables, so we can also
|
||||||
|
// eliminate them in the order requested.
|
||||||
|
return factorGraph->eliminateMultifrontal(*varsAsOrdering, function);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// No ordering was provided for the unmarginalized variables, so order them with COLAMD.
|
||||||
|
return factorGraph->eliminateMultifrontal(function);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -88,9 +88,6 @@ namespace gtsam {
|
||||||
/// The function type that does a single dense elimination step on a subgraph.
|
/// The function type that does a single dense elimination step on a subgraph.
|
||||||
typedef boost::function<EliminationResult(const FactorGraphType&, const Ordering&)> Eliminate;
|
typedef boost::function<EliminationResult(const FactorGraphType&, const Ordering&)> Eliminate;
|
||||||
|
|
||||||
/// Typedef for an optional ordering as an argument to elimination functions
|
|
||||||
typedef boost::optional<const Ordering&> OptionalOrdering;
|
|
||||||
|
|
||||||
/// Typedef for an optional variable index as an argument to elimination functions
|
/// Typedef for an optional variable index as an argument to elimination functions
|
||||||
typedef boost::optional<const VariableIndex&> OptionalVariableIndex;
|
typedef boost::optional<const VariableIndex&> OptionalVariableIndex;
|
||||||
|
|
||||||
|
@ -108,24 +105,38 @@ namespace gtsam {
|
||||||
* <b> Example - METIS ordering for elimination
|
* <b> Example - METIS ordering for elimination
|
||||||
* \code
|
* \code
|
||||||
* boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(OrderingType::METIS);
|
* boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(OrderingType::METIS);
|
||||||
*
|
|
||||||
* <b> Example - Full QR elimination in specified order:
|
|
||||||
* \code
|
|
||||||
* boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(EliminateQR, myOrdering);
|
|
||||||
* \endcode
|
* \endcode
|
||||||
*
|
*
|
||||||
* <b> Example - Reusing an existing VariableIndex to improve performance, and using COLAMD ordering: </b>
|
* <b> Example - Reusing an existing VariableIndex to improve performance, and using COLAMD ordering: </b>
|
||||||
* \code
|
* \code
|
||||||
* VariableIndex varIndex(graph); // Build variable index
|
* VariableIndex varIndex(graph); // Build variable index
|
||||||
* Data data = otherFunctionUsingVariableIndex(graph, varIndex); // Other code that uses variable index
|
* Data data = otherFunctionUsingVariableIndex(graph, varIndex); // Other code that uses variable index
|
||||||
* boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(EliminateQR, boost::none, varIndex);
|
* boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(EliminateQR, varIndex, boost::none);
|
||||||
* \endcode
|
* \endcode
|
||||||
* */
|
* */
|
||||||
boost::shared_ptr<BayesNetType> eliminateSequential(
|
boost::shared_ptr<BayesNetType> eliminateSequential(
|
||||||
OptionalOrdering ordering = boost::none,
|
OptionalOrderingType orderingType = boost::none,
|
||||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
OptionalVariableIndex variableIndex = boost::none,
|
OptionalVariableIndex variableIndex = boost::none) const;
|
||||||
OptionalOrderingType orderingType = boost::none) const;
|
|
||||||
|
/** Do sequential elimination of all variables to produce a Bayes net.
|
||||||
|
*
|
||||||
|
* <b> Example - Full QR elimination in specified order:
|
||||||
|
* \code
|
||||||
|
* boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(myOrdering, EliminateQR);
|
||||||
|
* \endcode
|
||||||
|
*
|
||||||
|
* <b> Example - Reusing an existing VariableIndex to improve performance: </b>
|
||||||
|
* \code
|
||||||
|
* VariableIndex varIndex(graph); // Build variable index
|
||||||
|
* Data data = otherFunctionUsingVariableIndex(graph, varIndex); // Other code that uses variable index
|
||||||
|
* boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(myOrdering, EliminateQR, varIndex, boost::none);
|
||||||
|
* \endcode
|
||||||
|
* */
|
||||||
|
boost::shared_ptr<BayesNetType> eliminateSequential(
|
||||||
|
const Ordering& ordering,
|
||||||
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
|
OptionalVariableIndex variableIndex = boost::none) const;
|
||||||
|
|
||||||
/** Do multifrontal elimination of all variables to produce a Bayes tree. If an ordering is not
|
/** Do multifrontal elimination of all variables to produce a Bayes tree. If an ordering is not
|
||||||
* provided, the ordering will be computed using either COLAMD or METIS, dependeing on
|
* provided, the ordering will be computed using either COLAMD or METIS, dependeing on
|
||||||
|
@ -136,11 +147,6 @@ namespace gtsam {
|
||||||
* boost::shared_ptr<GaussianBayesTree> result = graph.eliminateMultifrontal(EliminateCholesky);
|
* boost::shared_ptr<GaussianBayesTree> result = graph.eliminateMultifrontal(EliminateCholesky);
|
||||||
* \endcode
|
* \endcode
|
||||||
*
|
*
|
||||||
* <b> Example - Full QR elimination in specified order:
|
|
||||||
* \code
|
|
||||||
* boost::shared_ptr<GaussianBayesTree> result = graph.eliminateMultifrontal(EliminateQR, myOrdering);
|
|
||||||
* \endcode
|
|
||||||
*
|
|
||||||
* <b> Example - Reusing an existing VariableIndex to improve performance, and using COLAMD ordering: </b>
|
* <b> Example - Reusing an existing VariableIndex to improve performance, and using COLAMD ordering: </b>
|
||||||
* \code
|
* \code
|
||||||
* VariableIndex varIndex(graph); // Build variable index
|
* VariableIndex varIndex(graph); // Build variable index
|
||||||
|
@ -149,10 +155,23 @@ namespace gtsam {
|
||||||
* \endcode
|
* \endcode
|
||||||
* */
|
* */
|
||||||
boost::shared_ptr<BayesTreeType> eliminateMultifrontal(
|
boost::shared_ptr<BayesTreeType> eliminateMultifrontal(
|
||||||
OptionalOrdering ordering = boost::none,
|
OptionalOrderingType orderingType = boost::none,
|
||||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
OptionalVariableIndex variableIndex = boost::none,
|
OptionalVariableIndex variableIndex = boost::none) const;
|
||||||
OptionalOrderingType orderingType = boost::none) const;
|
|
||||||
|
/** Do multifrontal elimination of all variables to produce a Bayes tree. If an ordering is not
|
||||||
|
* provided, the ordering will be computed using either COLAMD or METIS, dependeing on
|
||||||
|
* the parameter orderingType (Ordering::COLAMD or Ordering::METIS)
|
||||||
|
*
|
||||||
|
* <b> Example - Full QR elimination in specified order:
|
||||||
|
* \code
|
||||||
|
* boost::shared_ptr<GaussianBayesTree> result = graph.eliminateMultifrontal(EliminateQR, myOrdering);
|
||||||
|
* \endcode
|
||||||
|
* */
|
||||||
|
boost::shared_ptr<BayesTreeType> eliminateMultifrontal(
|
||||||
|
const Ordering& ordering,
|
||||||
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
|
OptionalVariableIndex variableIndex = boost::none) const;
|
||||||
|
|
||||||
/** Do sequential elimination of some variables, in \c ordering provided, to produce a Bayes net
|
/** Do sequential elimination of some variables, in \c ordering provided, to produce a Bayes net
|
||||||
* and a remaining factor graph. This computes the factorization \f$ p(X) = p(A|B) p(B) \f$,
|
* and a remaining factor graph. This computes the factorization \f$ p(X) = p(A|B) p(B) \f$,
|
||||||
|
@ -194,20 +213,47 @@ namespace gtsam {
|
||||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
OptionalVariableIndex variableIndex = boost::none) const;
|
OptionalVariableIndex variableIndex = boost::none) const;
|
||||||
|
|
||||||
/** Compute the marginal of the requested variables and return the result as a Bayes net.
|
/** Compute the marginal of the requested variables and return the result as a Bayes net. Uses
|
||||||
|
* COLAMD marginalization ordering by default
|
||||||
* @param variables Determines the variables whose marginal to compute, if provided as an
|
* @param variables Determines the variables whose marginal to compute, if provided as an
|
||||||
* Ordering they will be ordered in the returned BayesNet as specified, and if provided
|
* Ordering they will be ordered in the returned BayesNet as specified, and if provided
|
||||||
* as a KeyVector they will be ordered using constrained COLAMD.
|
* as a KeyVector they will be ordered using constrained COLAMD.
|
||||||
* @param marginalizedVariableOrdering Optional ordering for the variables being marginalized
|
|
||||||
* out, i.e. all variables not in \c variables. If this is boost::none, the ordering
|
|
||||||
* will be computed with COLAMD.
|
|
||||||
* @param function Optional dense elimination function, if not provided the default will be
|
* @param function Optional dense elimination function, if not provided the default will be
|
||||||
* used.
|
* used.
|
||||||
* @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not
|
* @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not
|
||||||
* provided one will be computed. */
|
* provided one will be computed. */
|
||||||
boost::shared_ptr<BayesNetType> marginalMultifrontalBayesNet(
|
boost::shared_ptr<BayesNetType> marginalMultifrontalBayesNet(
|
||||||
boost::variant<const Ordering&, const KeyVector&> variables,
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
OptionalOrdering marginalizedVariableOrdering = boost::none,
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
|
OptionalVariableIndex variableIndex = boost::none) const;
|
||||||
|
|
||||||
|
/** Compute the marginal of the requested variables and return the result as a Bayes net.
|
||||||
|
* @param variables Determines the variables whose marginal to compute, if provided as an
|
||||||
|
* Ordering they will be ordered in the returned BayesNet as specified, and if provided
|
||||||
|
* as a KeyVector they will be ordered using constrained COLAMD.
|
||||||
|
* @param marginalizedVariableOrdering Ordering for the variables being marginalized out,
|
||||||
|
* i.e. all variables not in \c variables.
|
||||||
|
* @param function Optional dense elimination function, if not provided the default will be
|
||||||
|
* used.
|
||||||
|
* @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not
|
||||||
|
* provided one will be computed. */
|
||||||
|
boost::shared_ptr<BayesNetType> marginalMultifrontalBayesNet(
|
||||||
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
|
const Ordering& marginalizedVariableOrdering,
|
||||||
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
|
OptionalVariableIndex variableIndex = boost::none) const;
|
||||||
|
|
||||||
|
/** Compute the marginal of the requested variables and return the result as a Bayes tree. Uses
|
||||||
|
* COLAMD marginalization order by default
|
||||||
|
* @param variables Determines the variables whose marginal to compute, if provided as an
|
||||||
|
* Ordering they will be ordered in the returned BayesNet as specified, and if provided
|
||||||
|
* as a KeyVector they will be ordered using constrained COLAMD.
|
||||||
|
* @param function Optional dense elimination function, if not provided the default will be
|
||||||
|
* used.
|
||||||
|
* @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not
|
||||||
|
* provided one will be computed. */
|
||||||
|
boost::shared_ptr<BayesTreeType> marginalMultifrontalBayesTree(
|
||||||
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
OptionalVariableIndex variableIndex = boost::none) const;
|
OptionalVariableIndex variableIndex = boost::none) const;
|
||||||
|
|
||||||
|
@ -215,16 +261,15 @@ namespace gtsam {
|
||||||
* @param variables Determines the variables whose marginal to compute, if provided as an
|
* @param variables Determines the variables whose marginal to compute, if provided as an
|
||||||
* Ordering they will be ordered in the returned BayesNet as specified, and if provided
|
* Ordering they will be ordered in the returned BayesNet as specified, and if provided
|
||||||
* as a KeyVector they will be ordered using constrained COLAMD.
|
* as a KeyVector they will be ordered using constrained COLAMD.
|
||||||
* @param marginalizedVariableOrdering Optional ordering for the variables being marginalized
|
* @param marginalizedVariableOrdering Ordering for the variables being marginalized out,
|
||||||
* out, i.e. all variables not in \c variables. If this is boost::none, the ordering
|
* i.e. all variables not in \c variables.
|
||||||
* will be computed with COLAMD.
|
|
||||||
* @param function Optional dense elimination function, if not provided the default will be
|
* @param function Optional dense elimination function, if not provided the default will be
|
||||||
* used.
|
* used.
|
||||||
* @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not
|
* @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not
|
||||||
* provided one will be computed. */
|
* provided one will be computed. */
|
||||||
boost::shared_ptr<BayesTreeType> marginalMultifrontalBayesTree(
|
boost::shared_ptr<BayesTreeType> marginalMultifrontalBayesTree(
|
||||||
boost::variant<const Ordering&, const KeyVector&> variables,
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
OptionalOrdering marginalizedVariableOrdering = boost::none,
|
const Ordering& marginalizedVariableOrdering,
|
||||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
OptionalVariableIndex variableIndex = boost::none) const;
|
OptionalVariableIndex variableIndex = boost::none) const;
|
||||||
|
|
||||||
|
@ -241,6 +286,59 @@ namespace gtsam {
|
||||||
|
|
||||||
// Access the derived factor graph class
|
// Access the derived factor graph class
|
||||||
FactorGraphType& asDerived() { return static_cast<FactorGraphType&>(*this); }
|
FactorGraphType& asDerived() { return static_cast<FactorGraphType&>(*this); }
|
||||||
|
|
||||||
|
public:
|
||||||
|
/** \deprecated ordering and orderingType shouldn't both be specified */
|
||||||
|
boost::shared_ptr<BayesNetType> eliminateSequential(
|
||||||
|
const Ordering& ordering,
|
||||||
|
const Eliminate& function,
|
||||||
|
OptionalVariableIndex variableIndex,
|
||||||
|
OptionalOrderingType orderingType) const {
|
||||||
|
return eliminateSequential(ordering, function, variableIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** \deprecated orderingType specified first for consistency */
|
||||||
|
boost::shared_ptr<BayesNetType> eliminateSequential(
|
||||||
|
const Eliminate& function,
|
||||||
|
OptionalVariableIndex variableIndex = boost::none,
|
||||||
|
OptionalOrderingType orderingType = boost::none) const {
|
||||||
|
return eliminateSequential(orderingType, function, variableIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** \deprecated ordering and orderingType shouldn't both be specified */
|
||||||
|
boost::shared_ptr<BayesTreeType> eliminateMultifrontal(
|
||||||
|
const Ordering& ordering,
|
||||||
|
const Eliminate& function,
|
||||||
|
OptionalVariableIndex variableIndex,
|
||||||
|
OptionalOrderingType orderingType) const {
|
||||||
|
return eliminateMultifrontal(ordering, function, variableIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** \deprecated orderingType specified first for consistency */
|
||||||
|
boost::shared_ptr<BayesTreeType> eliminateMultifrontal(
|
||||||
|
const Eliminate& function,
|
||||||
|
OptionalVariableIndex variableIndex = boost::none,
|
||||||
|
OptionalOrderingType orderingType = boost::none) const {
|
||||||
|
return eliminateMultifrontal(orderingType, function, variableIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** \deprecated */
|
||||||
|
boost::shared_ptr<BayesNetType> marginalMultifrontalBayesNet(
|
||||||
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
|
boost::none_t,
|
||||||
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
|
OptionalVariableIndex variableIndex = boost::none) const {
|
||||||
|
return marginalMultifrontalBayesNet(variables, function, variableIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** \deprecated */
|
||||||
|
boost::shared_ptr<BayesTreeType> marginalMultifrontalBayesTree(
|
||||||
|
boost::variant<const Ordering&, const KeyVector&> variables,
|
||||||
|
boost::none_t,
|
||||||
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate,
|
||||||
|
OptionalVariableIndex variableIndex = boost::none) const {
|
||||||
|
return marginalMultifrontalBayesTree(variables, function, variableIndex);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -156,16 +156,17 @@ namespace gtsam {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
pair<Matrix, Vector> GaussianBayesNet::matrix(boost::optional<const Ordering&> ordering) const {
|
pair<Matrix, Vector> GaussianBayesNet::matrix(const Ordering& ordering) const {
|
||||||
if (ordering) {
|
// Convert to a GaussianFactorGraph and use its machinery
|
||||||
// Convert to a GaussianFactorGraph and use its machinery
|
GaussianFactorGraph factorGraph(*this);
|
||||||
GaussianFactorGraph factorGraph(*this);
|
return factorGraph.jacobian(ordering);
|
||||||
return factorGraph.jacobian(ordering);
|
}
|
||||||
} else {
|
|
||||||
// recursively call with default ordering
|
/* ************************************************************************* */
|
||||||
const auto defaultOrdering = this->ordering();
|
pair<Matrix, Vector> GaussianBayesNet::matrix() const {
|
||||||
return matrix(defaultOrdering);
|
// recursively call with default ordering
|
||||||
}
|
const auto defaultOrdering = this->ordering();
|
||||||
|
return matrix(defaultOrdering);
|
||||||
}
|
}
|
||||||
|
|
||||||
///* ************************************************************************* */
|
///* ************************************************************************* */
|
||||||
|
|
|
@ -92,7 +92,14 @@ namespace gtsam {
|
||||||
* Will return upper-triangular matrix only when using 'ordering' above.
|
* Will return upper-triangular matrix only when using 'ordering' above.
|
||||||
* In case Bayes net is incomplete zero columns are added to the end.
|
* In case Bayes net is incomplete zero columns are added to the end.
|
||||||
*/
|
*/
|
||||||
std::pair<Matrix, Vector> matrix(boost::optional<const Ordering&> ordering = boost::none) const;
|
std::pair<Matrix, Vector> matrix(const Ordering& ordering) const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return (dense) upper-triangular matrix representation
|
||||||
|
* Will return upper-triangular matrix only when using 'ordering' above.
|
||||||
|
* In case Bayes net is incomplete zero columns are added to the end.
|
||||||
|
*/
|
||||||
|
std::pair<Matrix, Vector> matrix() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Optimize along the gradient direction, with a closed-form computation to perform the line
|
* Optimize along the gradient direction, with a closed-form computation to perform the line
|
||||||
|
|
|
@ -190,33 +190,62 @@ namespace gtsam {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Matrix GaussianFactorGraph::augmentedJacobian(
|
Matrix GaussianFactorGraph::augmentedJacobian(
|
||||||
boost::optional<const Ordering&> optionalOrdering) const {
|
const Ordering& ordering) const {
|
||||||
// combine all factors
|
// combine all factors
|
||||||
JacobianFactor combined(*this, optionalOrdering);
|
JacobianFactor combined(*this, ordering);
|
||||||
|
return combined.augmentedJacobian();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
Matrix GaussianFactorGraph::augmentedJacobian() const {
|
||||||
|
// combine all factors
|
||||||
|
JacobianFactor combined(*this);
|
||||||
return combined.augmentedJacobian();
|
return combined.augmentedJacobian();
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
pair<Matrix, Vector> GaussianFactorGraph::jacobian(
|
pair<Matrix, Vector> GaussianFactorGraph::jacobian(
|
||||||
boost::optional<const Ordering&> optionalOrdering) const {
|
const Ordering& ordering) const {
|
||||||
Matrix augmented = augmentedJacobian(optionalOrdering);
|
Matrix augmented = augmentedJacobian(ordering);
|
||||||
|
return make_pair(augmented.leftCols(augmented.cols() - 1),
|
||||||
|
augmented.col(augmented.cols() - 1));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
pair<Matrix, Vector> GaussianFactorGraph::jacobian() const {
|
||||||
|
Matrix augmented = augmentedJacobian();
|
||||||
return make_pair(augmented.leftCols(augmented.cols() - 1),
|
return make_pair(augmented.leftCols(augmented.cols() - 1),
|
||||||
augmented.col(augmented.cols() - 1));
|
augmented.col(augmented.cols() - 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Matrix GaussianFactorGraph::augmentedHessian(
|
Matrix GaussianFactorGraph::augmentedHessian(
|
||||||
boost::optional<const Ordering&> optionalOrdering) const {
|
const Ordering& ordering) const {
|
||||||
// combine all factors and get upper-triangular part of Hessian
|
// combine all factors and get upper-triangular part of Hessian
|
||||||
Scatter scatter(*this, optionalOrdering);
|
Scatter scatter(*this, ordering);
|
||||||
|
HessianFactor combined(*this, scatter);
|
||||||
|
return combined.info().selfadjointView();;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
Matrix GaussianFactorGraph::augmentedHessian() const {
|
||||||
|
// combine all factors and get upper-triangular part of Hessian
|
||||||
|
Scatter scatter(*this);
|
||||||
HessianFactor combined(*this, scatter);
|
HessianFactor combined(*this, scatter);
|
||||||
return combined.info().selfadjointView();;
|
return combined.info().selfadjointView();;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
pair<Matrix, Vector> GaussianFactorGraph::hessian(
|
pair<Matrix, Vector> GaussianFactorGraph::hessian(
|
||||||
boost::optional<const Ordering&> optionalOrdering) const {
|
const Ordering& ordering) const {
|
||||||
Matrix augmented = augmentedHessian(optionalOrdering);
|
Matrix augmented = augmentedHessian(ordering);
|
||||||
|
size_t n = augmented.rows() - 1;
|
||||||
|
return make_pair(augmented.topLeftCorner(n, n), augmented.topRightCorner(n, 1));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
pair<Matrix, Vector> GaussianFactorGraph::hessian() const {
|
||||||
|
Matrix augmented = augmentedHessian();
|
||||||
size_t n = augmented.rows() - 1;
|
size_t n = augmented.rows() - 1;
|
||||||
return make_pair(augmented.topLeftCorner(n, n), augmented.topRightCorner(n, 1));
|
return make_pair(augmented.topLeftCorner(n, n), augmented.topRightCorner(n, 1));
|
||||||
}
|
}
|
||||||
|
@ -253,8 +282,13 @@ namespace gtsam {
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
VectorValues GaussianFactorGraph::optimize(OptionalOrdering ordering, const Eliminate& function) const
|
VectorValues GaussianFactorGraph::optimize(const Eliminate& function) const {
|
||||||
{
|
gttic(GaussianFactorGraph_optimize);
|
||||||
|
return BaseEliminateable::eliminateMultifrontal(function)->optimize();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
VectorValues GaussianFactorGraph::optimize(const Ordering& ordering, const Eliminate& function) const {
|
||||||
gttic(GaussianFactorGraph_optimize);
|
gttic(GaussianFactorGraph_optimize);
|
||||||
return BaseEliminateable::eliminateMultifrontal(ordering, function)->optimize();
|
return BaseEliminateable::eliminateMultifrontal(ordering, function)->optimize();
|
||||||
}
|
}
|
||||||
|
@ -460,4 +494,11 @@ namespace gtsam {
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
/** \deprecated */
|
||||||
|
VectorValues GaussianFactorGraph::optimize(boost::none_t,
|
||||||
|
const Eliminate& function) const {
|
||||||
|
return optimize(function);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
@ -201,7 +201,16 @@ namespace gtsam {
|
||||||
* \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$. See also
|
* \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$. See also
|
||||||
* GaussianFactorGraph::jacobian and GaussianFactorGraph::sparseJacobian.
|
* GaussianFactorGraph::jacobian and GaussianFactorGraph::sparseJacobian.
|
||||||
*/
|
*/
|
||||||
Matrix augmentedJacobian(boost::optional<const Ordering&> optionalOrdering = boost::none) const;
|
Matrix augmentedJacobian(const Ordering& ordering) const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a dense \f$ [ \;A\;b\; ] \in \mathbb{R}^{m \times n+1} \f$
|
||||||
|
* Jacobian matrix, augmented with b with the noise models baked
|
||||||
|
* into A and b. The negative log-likelihood is
|
||||||
|
* \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$. See also
|
||||||
|
* GaussianFactorGraph::jacobian and GaussianFactorGraph::sparseJacobian.
|
||||||
|
*/
|
||||||
|
Matrix augmentedJacobian() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the dense Jacobian \f$ A \f$ and right-hand-side \f$ b \f$,
|
* Return the dense Jacobian \f$ A \f$ and right-hand-side \f$ b \f$,
|
||||||
|
@ -210,7 +219,16 @@ namespace gtsam {
|
||||||
* GaussianFactorGraph::augmentedJacobian and
|
* GaussianFactorGraph::augmentedJacobian and
|
||||||
* GaussianFactorGraph::sparseJacobian.
|
* GaussianFactorGraph::sparseJacobian.
|
||||||
*/
|
*/
|
||||||
std::pair<Matrix,Vector> jacobian(boost::optional<const Ordering&> optionalOrdering = boost::none) const;
|
std::pair<Matrix,Vector> jacobian(const Ordering& ordering) const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the dense Jacobian \f$ A \f$ and right-hand-side \f$ b \f$,
|
||||||
|
* with the noise models baked into A and b. The negative log-likelihood
|
||||||
|
* is \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$. See also
|
||||||
|
* GaussianFactorGraph::augmentedJacobian and
|
||||||
|
* GaussianFactorGraph::sparseJacobian.
|
||||||
|
*/
|
||||||
|
std::pair<Matrix,Vector> jacobian() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a dense \f$ \Lambda \in \mathbb{R}^{n+1 \times n+1} \f$ Hessian
|
* Return a dense \f$ \Lambda \in \mathbb{R}^{n+1 \times n+1} \f$ Hessian
|
||||||
|
@ -223,7 +241,20 @@ namespace gtsam {
|
||||||
and the negative log-likelihood is
|
and the negative log-likelihood is
|
||||||
\f$ \frac{1}{2} x^T \Lambda x + \eta^T x + c \f$.
|
\f$ \frac{1}{2} x^T \Lambda x + \eta^T x + c \f$.
|
||||||
*/
|
*/
|
||||||
Matrix augmentedHessian(boost::optional<const Ordering&> optionalOrdering = boost::none) const;
|
Matrix augmentedHessian(const Ordering& ordering) const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a dense \f$ \Lambda \in \mathbb{R}^{n+1 \times n+1} \f$ Hessian
|
||||||
|
* matrix, augmented with the information vector \f$ \eta \f$. The
|
||||||
|
* augmented Hessian is
|
||||||
|
\f[ \left[ \begin{array}{ccc}
|
||||||
|
\Lambda & \eta \\
|
||||||
|
\eta^T & c
|
||||||
|
\end{array} \right] \f]
|
||||||
|
and the negative log-likelihood is
|
||||||
|
\f$ \frac{1}{2} x^T \Lambda x + \eta^T x + c \f$.
|
||||||
|
*/
|
||||||
|
Matrix augmentedHessian() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the dense Hessian \f$ \Lambda \f$ and information vector
|
* Return the dense Hessian \f$ \Lambda \f$ and information vector
|
||||||
|
@ -231,7 +262,15 @@ namespace gtsam {
|
||||||
* is \frac{1}{2} x^T \Lambda x + \eta^T x + c. See also
|
* is \frac{1}{2} x^T \Lambda x + \eta^T x + c. See also
|
||||||
* GaussianFactorGraph::augmentedHessian.
|
* GaussianFactorGraph::augmentedHessian.
|
||||||
*/
|
*/
|
||||||
std::pair<Matrix,Vector> hessian(boost::optional<const Ordering&> optionalOrdering = boost::none) const;
|
std::pair<Matrix,Vector> hessian(const Ordering& ordering) const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the dense Hessian \f$ \Lambda \f$ and information vector
|
||||||
|
* \f$ \eta \f$, with the noise models baked in. The negative log-likelihood
|
||||||
|
* is \frac{1}{2} x^T \Lambda x + \eta^T x + c. See also
|
||||||
|
* GaussianFactorGraph::augmentedHessian.
|
||||||
|
*/
|
||||||
|
std::pair<Matrix,Vector> hessian() const;
|
||||||
|
|
||||||
/** Return only the diagonal of the Hessian A'*A, as a VectorValues */
|
/** Return only the diagonal of the Hessian A'*A, as a VectorValues */
|
||||||
virtual VectorValues hessianDiagonal() const;
|
virtual VectorValues hessianDiagonal() const;
|
||||||
|
@ -243,7 +282,14 @@ namespace gtsam {
|
||||||
* the dense elimination function specified in \c function (default EliminatePreferCholesky),
|
* the dense elimination function specified in \c function (default EliminatePreferCholesky),
|
||||||
* followed by back-substitution in the Bayes tree resulting from elimination. Is equivalent
|
* followed by back-substitution in the Bayes tree resulting from elimination. Is equivalent
|
||||||
* to calling graph.eliminateMultifrontal()->optimize(). */
|
* to calling graph.eliminateMultifrontal()->optimize(). */
|
||||||
VectorValues optimize(OptionalOrdering ordering = boost::none,
|
VectorValues optimize(
|
||||||
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate) const;
|
||||||
|
|
||||||
|
/** Solve the factor graph by performing multifrontal variable elimination in COLAMD order using
|
||||||
|
* the dense elimination function specified in \c function (default EliminatePreferCholesky),
|
||||||
|
* followed by back-substitution in the Bayes tree resulting from elimination. Is equivalent
|
||||||
|
* to calling graph.eliminateMultifrontal()->optimize(). */
|
||||||
|
VectorValues optimize(const Ordering&,
|
||||||
const Eliminate& function = EliminationTraitsType::DefaultEliminate) const;
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -329,6 +375,12 @@ namespace gtsam {
|
||||||
ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
|
ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
/** \deprecated */
|
||||||
|
VectorValues optimize(boost::none_t,
|
||||||
|
const Eliminate& function = EliminationTraitsType::DefaultEliminate) const;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -250,10 +250,10 @@ HessianFactor::HessianFactor(const GaussianFactor& gf) :
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::HessianFactor(const GaussianFactorGraph& factors,
|
HessianFactor::HessianFactor(const GaussianFactorGraph& factors,
|
||||||
boost::optional<const Scatter&> scatter) {
|
const Scatter& scatter) {
|
||||||
gttic(HessianFactor_MergeConstructor);
|
gttic(HessianFactor_MergeConstructor);
|
||||||
|
|
||||||
Allocate(scatter ? *scatter : Scatter(factors));
|
Allocate(scatter);
|
||||||
|
|
||||||
// Form A' * A
|
// Form A' * A
|
||||||
gttic(update);
|
gttic(update);
|
||||||
|
|
|
@ -176,7 +176,11 @@ namespace gtsam {
|
||||||
|
|
||||||
/** Combine a set of factors into a single dense HessianFactor */
|
/** Combine a set of factors into a single dense HessianFactor */
|
||||||
explicit HessianFactor(const GaussianFactorGraph& factors,
|
explicit HessianFactor(const GaussianFactorGraph& factors,
|
||||||
boost::optional<const Scatter&> scatter = boost::none);
|
const Scatter& scatter);
|
||||||
|
|
||||||
|
/** Combine a set of factors into a single dense HessianFactor */
|
||||||
|
explicit HessianFactor(const GaussianFactorGraph& factors)
|
||||||
|
: HessianFactor(factors, Scatter(factors)) {}
|
||||||
|
|
||||||
/** Destructor */
|
/** Destructor */
|
||||||
virtual ~HessianFactor() {}
|
virtual ~HessianFactor() {}
|
||||||
|
|
|
@ -220,60 +220,13 @@ FastVector<JacobianFactor::shared_ptr> _convertOrCastToJacobians(
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph,
|
void JacobianFactor::JacobianFactorHelper(const GaussianFactorGraph& graph,
|
||||||
boost::optional<const Ordering&> ordering,
|
const FastVector<VariableSlots::const_iterator>& orderedSlots) {
|
||||||
boost::optional<const VariableSlots&> p_variableSlots) {
|
|
||||||
gttic(JacobianFactor_combine_constructor);
|
|
||||||
|
|
||||||
// Compute VariableSlots if one was not provided
|
|
||||||
// Binds reference, does not copy VariableSlots
|
|
||||||
const VariableSlots & variableSlots =
|
|
||||||
p_variableSlots ? p_variableSlots.get() : VariableSlots(graph);
|
|
||||||
|
|
||||||
// Cast or convert to Jacobians
|
// Cast or convert to Jacobians
|
||||||
FastVector<JacobianFactor::shared_ptr> jacobians = _convertOrCastToJacobians(
|
FastVector<JacobianFactor::shared_ptr> jacobians = _convertOrCastToJacobians(
|
||||||
graph);
|
graph);
|
||||||
|
|
||||||
gttic(Order_slots);
|
|
||||||
// Order variable slots - we maintain the vector of ordered slots, as well as keep a list
|
|
||||||
// 'unorderedSlots' of any variables discovered that are not in the ordering. Those will then
|
|
||||||
// be added after all of the ordered variables.
|
|
||||||
FastVector<VariableSlots::const_iterator> orderedSlots;
|
|
||||||
orderedSlots.reserve(variableSlots.size());
|
|
||||||
if (ordering) {
|
|
||||||
// If an ordering is provided, arrange the slots first that ordering
|
|
||||||
FastList<VariableSlots::const_iterator> unorderedSlots;
|
|
||||||
size_t nOrderingSlotsUsed = 0;
|
|
||||||
orderedSlots.resize(ordering->size());
|
|
||||||
FastMap<Key, size_t> inverseOrdering = ordering->invert();
|
|
||||||
for (VariableSlots::const_iterator item = variableSlots.begin();
|
|
||||||
item != variableSlots.end(); ++item) {
|
|
||||||
FastMap<Key, size_t>::const_iterator orderingPosition =
|
|
||||||
inverseOrdering.find(item->first);
|
|
||||||
if (orderingPosition == inverseOrdering.end()) {
|
|
||||||
unorderedSlots.push_back(item);
|
|
||||||
} else {
|
|
||||||
orderedSlots[orderingPosition->second] = item;
|
|
||||||
++nOrderingSlotsUsed;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (nOrderingSlotsUsed != ordering->size())
|
|
||||||
throw std::invalid_argument(
|
|
||||||
"The ordering provided to the JacobianFactor combine constructor\n"
|
|
||||||
"contained extra variables that did not appear in the factors to combine.");
|
|
||||||
// Add the remaining slots
|
|
||||||
for(VariableSlots::const_iterator item: unorderedSlots) {
|
|
||||||
orderedSlots.push_back(item);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// If no ordering is provided, arrange the slots as they were, which will be sorted
|
|
||||||
// numerically since VariableSlots uses a map sorting on Key.
|
|
||||||
for (VariableSlots::const_iterator item = variableSlots.begin();
|
|
||||||
item != variableSlots.end(); ++item)
|
|
||||||
orderedSlots.push_back(item);
|
|
||||||
}
|
|
||||||
gttoc(Order_slots);
|
|
||||||
|
|
||||||
// Count dimensions
|
// Count dimensions
|
||||||
FastVector<DenseIndex> varDims;
|
FastVector<DenseIndex> varDims;
|
||||||
DenseIndex m, n;
|
DenseIndex m, n;
|
||||||
|
@ -344,6 +297,127 @@ JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph,
|
||||||
this->setModel(anyConstrained, *sigmas);
|
this->setModel(anyConstrained, *sigmas);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
// Order variable slots - we maintain the vector of ordered slots, as well as keep a list
|
||||||
|
// 'unorderedSlots' of any variables discovered that are not in the ordering. Those will then
|
||||||
|
// be added after all of the ordered variables.
|
||||||
|
FastVector<VariableSlots::const_iterator> orderedSlotsHelper(
|
||||||
|
const Ordering& ordering,
|
||||||
|
const VariableSlots& variableSlots) {
|
||||||
|
gttic(Order_slots);
|
||||||
|
|
||||||
|
FastVector<VariableSlots::const_iterator> orderedSlots;
|
||||||
|
orderedSlots.reserve(variableSlots.size());
|
||||||
|
|
||||||
|
// If an ordering is provided, arrange the slots first that ordering
|
||||||
|
FastList<VariableSlots::const_iterator> unorderedSlots;
|
||||||
|
size_t nOrderingSlotsUsed = 0;
|
||||||
|
orderedSlots.resize(ordering.size());
|
||||||
|
FastMap<Key, size_t> inverseOrdering = ordering.invert();
|
||||||
|
for (VariableSlots::const_iterator item = variableSlots.begin();
|
||||||
|
item != variableSlots.end(); ++item) {
|
||||||
|
FastMap<Key, size_t>::const_iterator orderingPosition =
|
||||||
|
inverseOrdering.find(item->first);
|
||||||
|
if (orderingPosition == inverseOrdering.end()) {
|
||||||
|
unorderedSlots.push_back(item);
|
||||||
|
} else {
|
||||||
|
orderedSlots[orderingPosition->second] = item;
|
||||||
|
++nOrderingSlotsUsed;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (nOrderingSlotsUsed != ordering.size())
|
||||||
|
throw std::invalid_argument(
|
||||||
|
"The ordering provided to the JacobianFactor combine constructor\n"
|
||||||
|
"contained extra variables that did not appear in the factors to combine.");
|
||||||
|
// Add the remaining slots
|
||||||
|
for(VariableSlots::const_iterator item: unorderedSlots) {
|
||||||
|
orderedSlots.push_back(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
gttoc(Order_slots);
|
||||||
|
|
||||||
|
return orderedSlots;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph) {
|
||||||
|
gttic(JacobianFactor_combine_constructor);
|
||||||
|
|
||||||
|
// Compute VariableSlots if one was not provided
|
||||||
|
// Binds reference, does not copy VariableSlots
|
||||||
|
const VariableSlots & variableSlots = VariableSlots(graph);
|
||||||
|
|
||||||
|
gttic(Order_slots);
|
||||||
|
// Order variable slots - we maintain the vector of ordered slots, as well as keep a list
|
||||||
|
// 'unorderedSlots' of any variables discovered that are not in the ordering. Those will then
|
||||||
|
// be added after all of the ordered variables.
|
||||||
|
FastVector<VariableSlots::const_iterator> orderedSlots;
|
||||||
|
orderedSlots.reserve(variableSlots.size());
|
||||||
|
|
||||||
|
// If no ordering is provided, arrange the slots as they were, which will be sorted
|
||||||
|
// numerically since VariableSlots uses a map sorting on Key.
|
||||||
|
for (VariableSlots::const_iterator item = variableSlots.begin();
|
||||||
|
item != variableSlots.end(); ++item)
|
||||||
|
orderedSlots.push_back(item);
|
||||||
|
gttoc(Order_slots);
|
||||||
|
|
||||||
|
JacobianFactorHelper(graph, orderedSlots);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph,
|
||||||
|
const VariableSlots& p_variableSlots) {
|
||||||
|
gttic(JacobianFactor_combine_constructor);
|
||||||
|
|
||||||
|
// Binds reference, does not copy VariableSlots
|
||||||
|
const VariableSlots & variableSlots = p_variableSlots;
|
||||||
|
|
||||||
|
gttic(Order_slots);
|
||||||
|
// Order variable slots - we maintain the vector of ordered slots, as well as keep a list
|
||||||
|
// 'unorderedSlots' of any variables discovered that are not in the ordering. Those will then
|
||||||
|
// be added after all of the ordered variables.
|
||||||
|
FastVector<VariableSlots::const_iterator> orderedSlots;
|
||||||
|
orderedSlots.reserve(variableSlots.size());
|
||||||
|
|
||||||
|
// If no ordering is provided, arrange the slots as they were, which will be sorted
|
||||||
|
// numerically since VariableSlots uses a map sorting on Key.
|
||||||
|
for (VariableSlots::const_iterator item = variableSlots.begin();
|
||||||
|
item != variableSlots.end(); ++item)
|
||||||
|
orderedSlots.push_back(item);
|
||||||
|
gttoc(Order_slots);
|
||||||
|
|
||||||
|
JacobianFactorHelper(graph, orderedSlots);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph,
|
||||||
|
const Ordering& ordering) {
|
||||||
|
gttic(JacobianFactor_combine_constructor);
|
||||||
|
|
||||||
|
// Compute VariableSlots if one was not provided
|
||||||
|
// Binds reference, does not copy VariableSlots
|
||||||
|
const VariableSlots & variableSlots = VariableSlots(graph);
|
||||||
|
|
||||||
|
// Order variable slots
|
||||||
|
FastVector<VariableSlots::const_iterator> orderedSlots =
|
||||||
|
orderedSlotsHelper(ordering, variableSlots);
|
||||||
|
|
||||||
|
JacobianFactorHelper(graph, orderedSlots);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph,
|
||||||
|
const Ordering& ordering,
|
||||||
|
const VariableSlots& p_variableSlots) {
|
||||||
|
gttic(JacobianFactor_combine_constructor);
|
||||||
|
|
||||||
|
// Order variable slots
|
||||||
|
FastVector<VariableSlots::const_iterator> orderedSlots =
|
||||||
|
orderedSlotsHelper(ordering, p_variableSlots);
|
||||||
|
|
||||||
|
JacobianFactorHelper(graph, orderedSlots);
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void JacobianFactor::print(const string& s,
|
void JacobianFactor::print(const string& s,
|
||||||
const KeyFormatter& formatter) const {
|
const KeyFormatter& formatter) const {
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
#include <gtsam/linear/NoiseModel.h>
|
#include <gtsam/linear/NoiseModel.h>
|
||||||
#include <gtsam/base/VerticalBlockMatrix.h>
|
#include <gtsam/base/VerticalBlockMatrix.h>
|
||||||
#include <gtsam/global_includes.h>
|
#include <gtsam/global_includes.h>
|
||||||
|
#include <gtsam/inference/VariableSlots.h>
|
||||||
|
|
||||||
#include <boost/make_shared.hpp>
|
#include <boost/make_shared.hpp>
|
||||||
|
|
||||||
|
@ -147,14 +148,37 @@ namespace gtsam {
|
||||||
JacobianFactor(
|
JacobianFactor(
|
||||||
const KEYS& keys, const VerticalBlockMatrix& augmentedMatrix, const SharedDiagonal& sigmas = SharedDiagonal());
|
const KEYS& keys, const VerticalBlockMatrix& augmentedMatrix, const SharedDiagonal& sigmas = SharedDiagonal());
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a dense joint factor from all the factors in a factor graph. If a VariableSlots
|
||||||
|
* structure computed for \c graph is already available, providing it will reduce the amount of
|
||||||
|
* computation performed. */
|
||||||
|
explicit JacobianFactor(
|
||||||
|
const GaussianFactorGraph& graph);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build a dense joint factor from all the factors in a factor graph. If a VariableSlots
|
* Build a dense joint factor from all the factors in a factor graph. If a VariableSlots
|
||||||
* structure computed for \c graph is already available, providing it will reduce the amount of
|
* structure computed for \c graph is already available, providing it will reduce the amount of
|
||||||
* computation performed. */
|
* computation performed. */
|
||||||
explicit JacobianFactor(
|
explicit JacobianFactor(
|
||||||
const GaussianFactorGraph& graph,
|
const GaussianFactorGraph& graph,
|
||||||
boost::optional<const Ordering&> ordering = boost::none,
|
const VariableSlots& p_variableSlots);
|
||||||
boost::optional<const VariableSlots&> p_variableSlots = boost::none);
|
|
||||||
|
/**
|
||||||
|
* Build a dense joint factor from all the factors in a factor graph. If a VariableSlots
|
||||||
|
* structure computed for \c graph is already available, providing it will reduce the amount of
|
||||||
|
* computation performed. */
|
||||||
|
explicit JacobianFactor(
|
||||||
|
const GaussianFactorGraph& graph,
|
||||||
|
const Ordering& ordering);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a dense joint factor from all the factors in a factor graph. If a VariableSlots
|
||||||
|
* structure computed for \c graph is already available, providing it will reduce the amount of
|
||||||
|
* computation performed. */
|
||||||
|
explicit JacobianFactor(
|
||||||
|
const GaussianFactorGraph& graph,
|
||||||
|
const Ordering& ordering,
|
||||||
|
const VariableSlots& p_variableSlots);
|
||||||
|
|
||||||
/** Virtual destructor */
|
/** Virtual destructor */
|
||||||
virtual ~JacobianFactor() {}
|
virtual ~JacobianFactor() {}
|
||||||
|
@ -356,6 +380,14 @@ namespace gtsam {
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function for public constructors:
|
||||||
|
* Build a dense joint factor from all the factors in a factor graph. Takes in
|
||||||
|
* ordered variable slots */
|
||||||
|
void JacobianFactorHelper(
|
||||||
|
const GaussianFactorGraph& graph,
|
||||||
|
const FastVector<VariableSlots::const_iterator>& orderedSlots);
|
||||||
|
|
||||||
/** Unsafe Constructor that creates an uninitialized Jacobian of right size
|
/** Unsafe Constructor that creates an uninitialized Jacobian of right size
|
||||||
* @param keys in some order
|
* @param keys in some order
|
||||||
* @param diemnsions of the variables in same order
|
* @param diemnsions of the variables in same order
|
||||||
|
|
|
@ -77,11 +77,17 @@ public:
|
||||||
|
|
||||||
/// Construct from a GaussianFactorGraph
|
/// Construct from a GaussianFactorGraph
|
||||||
RegularHessianFactor(const GaussianFactorGraph& factors,
|
RegularHessianFactor(const GaussianFactorGraph& factors,
|
||||||
boost::optional<const Scatter&> scatter = boost::none)
|
const Scatter& scatter)
|
||||||
: HessianFactor(factors, scatter) {
|
: HessianFactor(factors, scatter) {
|
||||||
checkInvariants();
|
checkInvariants();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Construct from a GaussianFactorGraph
|
||||||
|
RegularHessianFactor(const GaussianFactorGraph& factors)
|
||||||
|
: HessianFactor(factors) {
|
||||||
|
checkInvariants();
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
/// Check invariants after construction
|
/// Check invariants after construction
|
||||||
|
|
|
@ -33,16 +33,16 @@ string SlotEntry::toString() const {
|
||||||
return oss.str();
|
return oss.str();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Scatter::Scatter(const GaussianFactorGraph& gfg) : Scatter(gfg, Ordering()) {}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Scatter::Scatter(const GaussianFactorGraph& gfg,
|
Scatter::Scatter(const GaussianFactorGraph& gfg,
|
||||||
boost::optional<const Ordering&> ordering) {
|
const Ordering& ordering) {
|
||||||
gttic(Scatter_Constructor);
|
gttic(Scatter_Constructor);
|
||||||
|
|
||||||
// If we have an ordering, pre-fill the ordered variables first
|
// If we have an ordering, pre-fill the ordered variables first
|
||||||
if (ordering) {
|
for (Key key : ordering) {
|
||||||
for (Key key : *ordering) {
|
add(key, 0);
|
||||||
add(key, 0);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now, find dimensions of variables and/or extend
|
// Now, find dimensions of variables and/or extend
|
||||||
|
@ -68,7 +68,7 @@ Scatter::Scatter(const GaussianFactorGraph& gfg,
|
||||||
|
|
||||||
// To keep the same behavior as before, sort the keys after the ordering
|
// To keep the same behavior as before, sort the keys after the ordering
|
||||||
iterator first = begin();
|
iterator first = begin();
|
||||||
if (ordering) first += ordering->size();
|
first += ordering.size();
|
||||||
if (first != end()) std::sort(first, end());
|
if (first != end()) std::sort(first, end());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,8 +23,6 @@
|
||||||
#include <gtsam/base/FastMap.h>
|
#include <gtsam/base/FastMap.h>
|
||||||
#include <gtsam/dllexport.h>
|
#include <gtsam/dllexport.h>
|
||||||
|
|
||||||
#include <boost/optional.hpp>
|
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
class GaussianFactorGraph;
|
class GaussianFactorGraph;
|
||||||
|
@ -53,15 +51,16 @@ class Scatter : public FastVector<SlotEntry> {
|
||||||
/// Default Constructor
|
/// Default Constructor
|
||||||
Scatter() {}
|
Scatter() {}
|
||||||
|
|
||||||
/// Construct from gaussian factor graph, with optional (partial or complete) ordering
|
/// Construct from gaussian factor graph, without ordering
|
||||||
Scatter(const GaussianFactorGraph& gfg,
|
explicit Scatter(const GaussianFactorGraph& gfg);
|
||||||
boost::optional<const Ordering&> ordering = boost::none);
|
|
||||||
|
/// Construct from gaussian factor graph, with (partial or complete) ordering
|
||||||
|
explicit Scatter(const GaussianFactorGraph& gfg, const Ordering& ordering);
|
||||||
|
|
||||||
/// Add a key/dim pair
|
/// Add a key/dim pair
|
||||||
void add(Key key, size_t dim);
|
void add(Key key, size_t dim);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
/// Find the SlotEntry with the right key (linear time worst case)
|
/// Find the SlotEntry with the right key (linear time worst case)
|
||||||
iterator find(Key key);
|
iterator find(Key key);
|
||||||
};
|
};
|
||||||
|
|
|
@ -26,8 +26,16 @@ using namespace std;
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization,
|
Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization)
|
||||||
EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering)
|
: values_(solution), factorization_(factorization) {
|
||||||
|
gttic(MarginalsConstructor);
|
||||||
|
graph_ = *graph.linearize(solution);
|
||||||
|
computeBayesTree();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, const Ordering& ordering,
|
||||||
|
Factorization factorization)
|
||||||
: values_(solution), factorization_(factorization) {
|
: values_(solution), factorization_(factorization) {
|
||||||
gttic(MarginalsConstructor);
|
gttic(MarginalsConstructor);
|
||||||
graph_ = *graph.linearize(solution);
|
graph_ = *graph.linearize(solution);
|
||||||
|
@ -35,28 +43,52 @@ Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization,
|
Marginals::Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization)
|
||||||
EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering)
|
: graph_(graph), values_(solution), factorization_(factorization) {
|
||||||
: graph_(graph), factorization_(factorization) {
|
|
||||||
gttic(MarginalsConstructor);
|
gttic(MarginalsConstructor);
|
||||||
Values vals;
|
computeBayesTree();
|
||||||
for (const auto& keyValue: solution) {
|
|
||||||
vals.insert(keyValue.first, keyValue.second);
|
|
||||||
}
|
|
||||||
values_ = vals;
|
|
||||||
computeBayesTree(ordering);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Marginals::Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization,
|
Marginals::Marginals(const GaussianFactorGraph& graph, const Values& solution, const Ordering& ordering,
|
||||||
EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering)
|
Factorization factorization)
|
||||||
: graph_(graph), values_(solution), factorization_(factorization) {
|
: graph_(graph), values_(solution), factorization_(factorization) {
|
||||||
gttic(MarginalsConstructor);
|
gttic(MarginalsConstructor);
|
||||||
computeBayesTree(ordering);
|
computeBayesTree(ordering);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void Marginals::computeBayesTree(EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering) {
|
Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization)
|
||||||
|
: graph_(graph), factorization_(factorization) {
|
||||||
|
gttic(MarginalsConstructor);
|
||||||
|
for (const auto& keyValue: solution) {
|
||||||
|
values_.insert(keyValue.first, keyValue.second);
|
||||||
|
}
|
||||||
|
computeBayesTree();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, const Ordering& ordering,
|
||||||
|
Factorization factorization)
|
||||||
|
: graph_(graph), factorization_(factorization) {
|
||||||
|
gttic(MarginalsConstructor);
|
||||||
|
for (const auto& keyValue: solution) {
|
||||||
|
values_.insert(keyValue.first, keyValue.second);
|
||||||
|
}
|
||||||
|
computeBayesTree(ordering);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
void Marginals::computeBayesTree() {
|
||||||
|
// Compute BayesTree
|
||||||
|
if(factorization_ == CHOLESKY)
|
||||||
|
bayesTree_ = *graph_.eliminateMultifrontal(EliminatePreferCholesky);
|
||||||
|
else if(factorization_ == QR)
|
||||||
|
bayesTree_ = *graph_.eliminateMultifrontal(EliminateQR);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
void Marginals::computeBayesTree(const Ordering& ordering) {
|
||||||
// Compute BayesTree
|
// Compute BayesTree
|
||||||
if(factorization_ == CHOLESKY)
|
if(factorization_ == CHOLESKY)
|
||||||
bayesTree_ = *graph_.eliminateMultifrontal(ordering, EliminatePreferCholesky);
|
bayesTree_ = *graph_.eliminateMultifrontal(ordering, EliminatePreferCholesky);
|
||||||
|
@ -128,9 +160,9 @@ JointMarginal Marginals::jointMarginalInformation(const KeyVector& variables) co
|
||||||
jointFG = *bayesTree_.joint(variables[0], variables[1], EliminateQR);
|
jointFG = *bayesTree_.joint(variables[0], variables[1], EliminateQR);
|
||||||
} else {
|
} else {
|
||||||
if(factorization_ == CHOLESKY)
|
if(factorization_ == CHOLESKY)
|
||||||
jointFG = GaussianFactorGraph(*graph_.marginalMultifrontalBayesTree(variables, boost::none, EliminatePreferCholesky));
|
jointFG = GaussianFactorGraph(*graph_.marginalMultifrontalBayesTree(variables, EliminatePreferCholesky));
|
||||||
else if(factorization_ == QR)
|
else if(factorization_ == QR)
|
||||||
jointFG = GaussianFactorGraph(*graph_.marginalMultifrontalBayesTree(variables, boost::none, EliminateQR));
|
jointFG = GaussianFactorGraph(*graph_.marginalMultifrontalBayesTree(variables, EliminateQR));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get information matrix
|
// Get information matrix
|
||||||
|
|
|
@ -55,10 +55,33 @@ public:
|
||||||
* @param graph The factor graph defining the full joint density on all variables.
|
* @param graph The factor graph defining the full joint density on all variables.
|
||||||
* @param solution The linearization point about which to compute Gaussian marginals (usually the MLE as obtained from a NonlinearOptimizer).
|
* @param solution The linearization point about which to compute Gaussian marginals (usually the MLE as obtained from a NonlinearOptimizer).
|
||||||
* @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems).
|
* @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems).
|
||||||
* @param ordering An optional variable ordering for elimination.
|
|
||||||
*/
|
*/
|
||||||
Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY,
|
Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY);
|
||||||
EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering = boost::none);
|
|
||||||
|
/** Construct a marginals class from a nonlinear factor graph.
|
||||||
|
* @param graph The factor graph defining the full joint density on all variables.
|
||||||
|
* @param solution The linearization point about which to compute Gaussian marginals (usually the MLE as obtained from a NonlinearOptimizer).
|
||||||
|
* @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems).
|
||||||
|
* @param ordering The ordering for elimination.
|
||||||
|
*/
|
||||||
|
Marginals(const NonlinearFactorGraph& graph, const Values& solution, const Ordering& ordering,
|
||||||
|
Factorization factorization = CHOLESKY);
|
||||||
|
|
||||||
|
/** Construct a marginals class from a linear factor graph.
|
||||||
|
* @param graph The factor graph defining the full joint density on all variables.
|
||||||
|
* @param solution The solution point to compute Gaussian marginals.
|
||||||
|
* @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems).
|
||||||
|
*/
|
||||||
|
Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY);
|
||||||
|
|
||||||
|
/** Construct a marginals class from a linear factor graph.
|
||||||
|
* @param graph The factor graph defining the full joint density on all variables.
|
||||||
|
* @param solution The solution point to compute Gaussian marginals.
|
||||||
|
* @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems).
|
||||||
|
* @param ordering The ordering for elimination.
|
||||||
|
*/
|
||||||
|
Marginals(const GaussianFactorGraph& graph, const Values& solution, const Ordering& ordering,
|
||||||
|
Factorization factorization = CHOLESKY);
|
||||||
|
|
||||||
/** Construct a marginals class from a linear factor graph.
|
/** Construct a marginals class from a linear factor graph.
|
||||||
* @param graph The factor graph defining the full joint density on all variables.
|
* @param graph The factor graph defining the full joint density on all variables.
|
||||||
|
@ -66,8 +89,7 @@ public:
|
||||||
* @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems).
|
* @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems).
|
||||||
* @param ordering An optional variable ordering for elimination.
|
* @param ordering An optional variable ordering for elimination.
|
||||||
*/
|
*/
|
||||||
Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY,
|
Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization = CHOLESKY);
|
||||||
EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering = boost::none);
|
|
||||||
|
|
||||||
/** Construct a marginals class from a linear factor graph.
|
/** Construct a marginals class from a linear factor graph.
|
||||||
* @param graph The factor graph defining the full joint density on all variables.
|
* @param graph The factor graph defining the full joint density on all variables.
|
||||||
|
@ -75,8 +97,8 @@ public:
|
||||||
* @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems).
|
* @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems).
|
||||||
* @param ordering An optional variable ordering for elimination.
|
* @param ordering An optional variable ordering for elimination.
|
||||||
*/
|
*/
|
||||||
Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization = CHOLESKY,
|
Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, const Ordering& ordering,
|
||||||
EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering = boost::none);
|
Factorization factorization = CHOLESKY);
|
||||||
|
|
||||||
/** print */
|
/** print */
|
||||||
void print(const std::string& str = "Marginals: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
|
void print(const std::string& str = "Marginals: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
|
||||||
|
@ -99,11 +121,27 @@ public:
|
||||||
|
|
||||||
/** Optimize the bayes tree */
|
/** Optimize the bayes tree */
|
||||||
VectorValues optimize() const;
|
VectorValues optimize() const;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
/** Compute the Bayes Tree as a helper function to the constructor */
|
/** Compute the Bayes Tree as a helper function to the constructor */
|
||||||
void computeBayesTree(EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering);
|
void computeBayesTree();
|
||||||
|
|
||||||
|
/** Compute the Bayes Tree as a helper function to the constructor */
|
||||||
|
void computeBayesTree(const Ordering& ordering);
|
||||||
|
|
||||||
|
public:
|
||||||
|
/** \deprecated argument order changed due to removing boost::optional<Ordering> */
|
||||||
|
Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization,
|
||||||
|
const Ordering& ordering) : Marginals(graph, solution, ordering, factorization) {}
|
||||||
|
|
||||||
|
/** \deprecated argument order changed due to removing boost::optional<Ordering> */
|
||||||
|
Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization,
|
||||||
|
const Ordering& ordering) : Marginals(graph, solution, ordering, factorization) {}
|
||||||
|
|
||||||
|
/** \deprecated argument order changed due to removing boost::optional<Ordering> */
|
||||||
|
Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization,
|
||||||
|
const Ordering& ordering) : Marginals(graph, solution, ordering, factorization) {}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -344,23 +344,31 @@ GaussianFactorGraph::shared_ptr NonlinearFactorGraph::linearize(const Values& li
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
static Scatter scatterFromValues(const Values& values, boost::optional<Ordering&> ordering) {
|
static Scatter scatterFromValues(const Values& values) {
|
||||||
gttic(scatterFromValues);
|
gttic(scatterFromValues);
|
||||||
|
|
||||||
Scatter scatter;
|
Scatter scatter;
|
||||||
scatter.reserve(values.size());
|
scatter.reserve(values.size());
|
||||||
|
|
||||||
if (!ordering) {
|
// use "natural" ordering with keys taken from the initial values
|
||||||
// use "natural" ordering with keys taken from the initial values
|
for (const auto& key_value : values) {
|
||||||
for (const auto& key_value : values) {
|
scatter.add(key_value.key, key_value.value.dim());
|
||||||
scatter.add(key_value.key, key_value.value.dim());
|
}
|
||||||
}
|
|
||||||
} else {
|
return scatter;
|
||||||
// copy ordering into keys and lookup dimension in values, is O(n*log n)
|
}
|
||||||
for (Key key : *ordering) {
|
|
||||||
const Value& value = values.at(key);
|
/* ************************************************************************* */
|
||||||
scatter.add(key, value.dim());
|
static Scatter scatterFromValues(const Values& values, const Ordering& ordering) {
|
||||||
}
|
gttic(scatterFromValues);
|
||||||
|
|
||||||
|
Scatter scatter;
|
||||||
|
scatter.reserve(values.size());
|
||||||
|
|
||||||
|
// copy ordering into keys and lookup dimension in values, is O(n*log n)
|
||||||
|
for (Key key : ordering) {
|
||||||
|
const Value& value = values.at(key);
|
||||||
|
scatter.add(key, value.dim());
|
||||||
}
|
}
|
||||||
|
|
||||||
return scatter;
|
return scatter;
|
||||||
|
@ -368,11 +376,7 @@ static Scatter scatterFromValues(const Values& values, boost::optional<Ordering&
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
HessianFactor::shared_ptr NonlinearFactorGraph::linearizeToHessianFactor(
|
HessianFactor::shared_ptr NonlinearFactorGraph::linearizeToHessianFactor(
|
||||||
const Values& values, boost::optional<Ordering&> ordering, const Dampen& dampen) const {
|
const Values& values, const Scatter& scatter, const Dampen& dampen) const {
|
||||||
gttic(NonlinearFactorGraph_linearizeToHessianFactor);
|
|
||||||
|
|
||||||
Scatter scatter = scatterFromValues(values, ordering);
|
|
||||||
|
|
||||||
// NOTE(frank): we are heavily leaning on friendship below
|
// NOTE(frank): we are heavily leaning on friendship below
|
||||||
HessianFactor::shared_ptr hessianFactor(new HessianFactor(scatter));
|
HessianFactor::shared_ptr hessianFactor(new HessianFactor(scatter));
|
||||||
|
|
||||||
|
@ -393,9 +397,36 @@ HessianFactor::shared_ptr NonlinearFactorGraph::linearizeToHessianFactor(
|
||||||
return hessianFactor;
|
return hessianFactor;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
HessianFactor::shared_ptr NonlinearFactorGraph::linearizeToHessianFactor(
|
||||||
|
const Values& values, const Ordering& order, const Dampen& dampen) const {
|
||||||
|
gttic(NonlinearFactorGraph_linearizeToHessianFactor);
|
||||||
|
|
||||||
|
Scatter scatter = scatterFromValues(values, order);
|
||||||
|
return linearizeToHessianFactor(values, scatter, dampen);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
HessianFactor::shared_ptr NonlinearFactorGraph::linearizeToHessianFactor(
|
||||||
|
const Values& values, const Dampen& dampen) const {
|
||||||
|
gttic(NonlinearFactorGraph_linearizeToHessianFactor);
|
||||||
|
|
||||||
|
Scatter scatter = scatterFromValues(values);
|
||||||
|
return linearizeToHessianFactor(values, scatter, dampen);
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Values NonlinearFactorGraph::updateCholesky(const Values& values,
|
Values NonlinearFactorGraph::updateCholesky(const Values& values,
|
||||||
boost::optional<Ordering&> ordering,
|
const Dampen& dampen) const {
|
||||||
|
gttic(NonlinearFactorGraph_updateCholesky);
|
||||||
|
auto hessianFactor = linearizeToHessianFactor(values, dampen);
|
||||||
|
VectorValues delta = hessianFactor->solve();
|
||||||
|
return values.retract(delta);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
Values NonlinearFactorGraph::updateCholesky(const Values& values,
|
||||||
|
const Ordering& ordering,
|
||||||
const Dampen& dampen) const {
|
const Dampen& dampen) const {
|
||||||
gttic(NonlinearFactorGraph_updateCholesky);
|
gttic(NonlinearFactorGraph_updateCholesky);
|
||||||
auto hessianFactor = linearizeToHessianFactor(values, ordering, dampen);
|
auto hessianFactor = linearizeToHessianFactor(values, ordering, dampen);
|
||||||
|
|
|
@ -149,17 +149,31 @@ namespace gtsam {
|
||||||
* Instead of producing a GaussianFactorGraph, pre-allocate and linearize directly
|
* Instead of producing a GaussianFactorGraph, pre-allocate and linearize directly
|
||||||
* into a HessianFactor. Avoids the many mallocs and pointer indirection in constructing
|
* into a HessianFactor. Avoids the many mallocs and pointer indirection in constructing
|
||||||
* a new graph, and hence useful in case a dense solve is appropriate for your problem.
|
* a new graph, and hence useful in case a dense solve is appropriate for your problem.
|
||||||
* An optional ordering can be given that still decides how the Hessian is laid out.
|
|
||||||
* An optional lambda function can be used to apply damping on the filled Hessian.
|
* An optional lambda function can be used to apply damping on the filled Hessian.
|
||||||
* No parallelism is exploited, because all the factors write in the same memory.
|
* No parallelism is exploited, because all the factors write in the same memory.
|
||||||
*/
|
*/
|
||||||
boost::shared_ptr<HessianFactor> linearizeToHessianFactor(
|
boost::shared_ptr<HessianFactor> linearizeToHessianFactor(
|
||||||
const Values& values, boost::optional<Ordering&> ordering = boost::none,
|
const Values& values, const Dampen& dampen = nullptr) const;
|
||||||
const Dampen& dampen = nullptr) const;
|
|
||||||
|
/**
|
||||||
|
* Instead of producing a GaussianFactorGraph, pre-allocate and linearize directly
|
||||||
|
* into a HessianFactor. Avoids the many mallocs and pointer indirection in constructing
|
||||||
|
* a new graph, and hence useful in case a dense solve is appropriate for your problem.
|
||||||
|
* An ordering is given that still decides how the Hessian is laid out.
|
||||||
|
* An optional lambda function can be used to apply damping on the filled Hessian.
|
||||||
|
* No parallelism is exploited, because all the factors write in the same memory.
|
||||||
|
*/
|
||||||
|
boost::shared_ptr<HessianFactor> linearizeToHessianFactor(
|
||||||
|
const Values& values, const Ordering& ordering, const Dampen& dampen = nullptr) const;
|
||||||
|
|
||||||
/// Linearize and solve in one pass.
|
/// Linearize and solve in one pass.
|
||||||
/// Calls linearizeToHessianFactor, densely solves the normal equations, and updates the values.
|
/// Calls linearizeToHessianFactor, densely solves the normal equations, and updates the values.
|
||||||
Values updateCholesky(const Values& values, boost::optional<Ordering&> ordering = boost::none,
|
Values updateCholesky(const Values& values,
|
||||||
|
const Dampen& dampen = nullptr) const;
|
||||||
|
|
||||||
|
/// Linearize and solve in one pass.
|
||||||
|
/// Calls linearizeToHessianFactor, densely solves the normal equations, and updates the values.
|
||||||
|
Values updateCholesky(const Values& values, const Ordering& ordering,
|
||||||
const Dampen& dampen = nullptr) const;
|
const Dampen& dampen = nullptr) const;
|
||||||
|
|
||||||
/// Clone() performs a deep-copy of the graph, including all of the factors
|
/// Clone() performs a deep-copy of the graph, including all of the factors
|
||||||
|
@ -190,6 +204,13 @@ namespace gtsam {
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Linearize from Scatter rather than from Ordering. Made private because
|
||||||
|
* it doesn't include gttic.
|
||||||
|
*/
|
||||||
|
boost::shared_ptr<HessianFactor> linearizeToHessianFactor(
|
||||||
|
const Values& values, const Scatter& scatter, const Dampen& dampen = nullptr) const;
|
||||||
|
|
||||||
/** Serialization function */
|
/** Serialization function */
|
||||||
friend class boost::serialization::access;
|
friend class boost::serialization::access;
|
||||||
template<class ARCHIVE>
|
template<class ARCHIVE>
|
||||||
|
@ -197,6 +218,19 @@ namespace gtsam {
|
||||||
ar & boost::serialization::make_nvp("NonlinearFactorGraph",
|
ar & boost::serialization::make_nvp("NonlinearFactorGraph",
|
||||||
boost::serialization::base_object<Base>(*this));
|
boost::serialization::base_object<Base>(*this));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
/** \deprecated */
|
||||||
|
boost::shared_ptr<HessianFactor> linearizeToHessianFactor(
|
||||||
|
const Values& values, boost::none_t, const Dampen& dampen = nullptr) const
|
||||||
|
{return linearizeToHessianFactor(values, dampen);}
|
||||||
|
|
||||||
|
/** \deprecated */
|
||||||
|
Values updateCholesky(const Values& values, boost::none_t,
|
||||||
|
const Dampen& dampen = nullptr) const
|
||||||
|
{return updateCholesky(values, dampen);}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// traits
|
/// traits
|
||||||
|
|
|
@ -128,18 +128,22 @@ VectorValues NonlinearOptimizer::solve(const GaussianFactorGraph& gfg,
|
||||||
const NonlinearOptimizerParams& params) const {
|
const NonlinearOptimizerParams& params) const {
|
||||||
// solution of linear solver is an update to the linearization point
|
// solution of linear solver is an update to the linearization point
|
||||||
VectorValues delta;
|
VectorValues delta;
|
||||||
boost::optional<const Ordering&> optionalOrdering;
|
|
||||||
if (params.ordering)
|
|
||||||
optionalOrdering.reset(*params.ordering);
|
|
||||||
|
|
||||||
// Check which solver we are using
|
// Check which solver we are using
|
||||||
if (params.isMultifrontal()) {
|
if (params.isMultifrontal()) {
|
||||||
// Multifrontal QR or Cholesky (decided by params.getEliminationFunction())
|
// Multifrontal QR or Cholesky (decided by params.getEliminationFunction())
|
||||||
delta = gfg.optimize(optionalOrdering, params.getEliminationFunction());
|
if (params.ordering)
|
||||||
|
delta = gfg.optimize(*params.ordering, params.getEliminationFunction());
|
||||||
|
else
|
||||||
|
delta = gfg.optimize(params.getEliminationFunction());
|
||||||
} else if (params.isSequential()) {
|
} else if (params.isSequential()) {
|
||||||
// Sequential QR or Cholesky (decided by params.getEliminationFunction())
|
// Sequential QR or Cholesky (decided by params.getEliminationFunction())
|
||||||
delta = gfg.eliminateSequential(optionalOrdering, params.getEliminationFunction(), boost::none,
|
if (params.ordering)
|
||||||
params.orderingType)->optimize();
|
delta = gfg.eliminateSequential(*params.ordering, params.getEliminationFunction(),
|
||||||
|
boost::none, params.orderingType)->optimize();
|
||||||
|
else
|
||||||
|
delta = gfg.eliminateSequential(params.getEliminationFunction(), boost::none,
|
||||||
|
params.orderingType)->optimize();
|
||||||
} else if (params.isIterative()) {
|
} else if (params.isIterative()) {
|
||||||
// Conjugate Gradient -> needs params.iterativeParams
|
// Conjugate Gradient -> needs params.iterativeParams
|
||||||
if (!params.iterativeParams)
|
if (!params.iterativeParams)
|
||||||
|
|
|
@ -82,7 +82,7 @@ public:
|
||||||
};
|
};
|
||||||
|
|
||||||
LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer
|
LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer
|
||||||
boost::optional<Ordering> ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
|
boost::optional<Ordering> ordering; ///< The optional variable elimination ordering, or empty to use COLAMD (default: empty)
|
||||||
IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers.
|
IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers.
|
||||||
|
|
||||||
inline bool isMultifrontal() const {
|
inline bool isMultifrontal() const {
|
||||||
|
|
|
@ -14,7 +14,14 @@ using namespace std;
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/// Find the best total assignment - can be expensive
|
/// Find the best total assignment - can be expensive
|
||||||
CSP::sharedValues CSP::optimalAssignment(OptionalOrdering ordering) const {
|
CSP::sharedValues CSP::optimalAssignment() const {
|
||||||
|
DiscreteBayesNet::shared_ptr chordal = this->eliminateSequential();
|
||||||
|
sharedValues mpe = chordal->optimize();
|
||||||
|
return mpe;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find the best total assignment - can be expensive
|
||||||
|
CSP::sharedValues CSP::optimalAssignment(const Ordering& ordering) const {
|
||||||
DiscreteBayesNet::shared_ptr chordal = this->eliminateSequential(ordering);
|
DiscreteBayesNet::shared_ptr chordal = this->eliminateSequential(ordering);
|
||||||
sharedValues mpe = chordal->optimize();
|
sharedValues mpe = chordal->optimize();
|
||||||
return mpe;
|
return mpe;
|
||||||
|
|
|
@ -60,7 +60,10 @@ namespace gtsam {
|
||||||
// }
|
// }
|
||||||
|
|
||||||
/// Find the best total assignment - can be expensive
|
/// Find the best total assignment - can be expensive
|
||||||
sharedValues optimalAssignment(OptionalOrdering ordering = boost::none) const;
|
sharedValues optimalAssignment() const;
|
||||||
|
|
||||||
|
/// Find the best total assignment - can be expensive
|
||||||
|
sharedValues optimalAssignment(const Ordering& ordering) const;
|
||||||
|
|
||||||
// /*
|
// /*
|
||||||
// * Perform loopy belief propagation
|
// * Perform loopy belief propagation
|
||||||
|
|
|
@ -46,21 +46,26 @@ class NonlinearClusterTree : public ClusterTree<NonlinearFactorGraph> {
|
||||||
// linearize local custer factors straight into hessianFactor, which is returned
|
// linearize local custer factors straight into hessianFactor, which is returned
|
||||||
// If no ordering given, uses colamd
|
// If no ordering given, uses colamd
|
||||||
HessianFactor::shared_ptr linearizeToHessianFactor(
|
HessianFactor::shared_ptr linearizeToHessianFactor(
|
||||||
const Values& values, boost::optional<Ordering> ordering = boost::none,
|
const Values& values,
|
||||||
const NonlinearFactorGraph::Dampen& dampen = nullptr) const {
|
const NonlinearFactorGraph::Dampen& dampen = nullptr) const {
|
||||||
if (!ordering)
|
Ordering ordering;
|
||||||
ordering.reset(Ordering::ColamdConstrainedFirst(factors, orderedFrontalKeys, true));
|
ordering = Ordering::ColamdConstrainedFirst(factors, orderedFrontalKeys, true);
|
||||||
return factors.linearizeToHessianFactor(values, *ordering, dampen);
|
return factors.linearizeToHessianFactor(values, ordering, dampen);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent
|
// linearize local custer factors straight into hessianFactor, which is returned
|
||||||
|
// If no ordering given, uses colamd
|
||||||
|
HessianFactor::shared_ptr linearizeToHessianFactor(
|
||||||
|
const Values& values, const Ordering& ordering,
|
||||||
|
const NonlinearFactorGraph::Dampen& dampen = nullptr) const {
|
||||||
|
return factors.linearizeToHessianFactor(values, ordering, dampen);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function: recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent
|
||||||
// TODO(frank): Use TBB to support deep trees and parallelism
|
// TODO(frank): Use TBB to support deep trees and parallelism
|
||||||
std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate(
|
std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate(
|
||||||
const Values& values, boost::optional<Ordering> ordering = boost::none,
|
const Values& values,
|
||||||
const NonlinearFactorGraph::Dampen& dampen = nullptr) const {
|
const HessianFactor::shared_ptr& localFactor) const {
|
||||||
// Linearize and create HessianFactor f(front,separator)
|
|
||||||
HessianFactor::shared_ptr localFactor = linearizeToHessianFactor(values, ordering, dampen);
|
|
||||||
|
|
||||||
// Get contributions f(front) from children, as well as p(children|front)
|
// Get contributions f(front) from children, as well as p(children|front)
|
||||||
GaussianBayesNet bayesNet;
|
GaussianBayesNet bayesNet;
|
||||||
for (const auto& child : children) {
|
for (const auto& child : children) {
|
||||||
|
@ -72,12 +77,45 @@ class NonlinearClusterTree : public ClusterTree<NonlinearFactorGraph> {
|
||||||
return {bayesNet, localFactor};
|
return {bayesNet, localFactor};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent
|
||||||
|
// TODO(frank): Use TBB to support deep trees and parallelism
|
||||||
|
std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate(
|
||||||
|
const Values& values,
|
||||||
|
const NonlinearFactorGraph::Dampen& dampen = nullptr) const {
|
||||||
|
// Linearize and create HessianFactor f(front,separator)
|
||||||
|
HessianFactor::shared_ptr localFactor = linearizeToHessianFactor(values, dampen);
|
||||||
|
return linearizeAndEliminate(values, localFactor);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent
|
||||||
|
// TODO(frank): Use TBB to support deep trees and parallelism
|
||||||
|
std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate(
|
||||||
|
const Values& values, const Ordering& ordering,
|
||||||
|
const NonlinearFactorGraph::Dampen& dampen = nullptr) const {
|
||||||
|
// Linearize and create HessianFactor f(front,separator)
|
||||||
|
HessianFactor::shared_ptr localFactor = linearizeToHessianFactor(values, ordering, dampen);
|
||||||
|
return linearizeAndEliminate(values, localFactor);
|
||||||
|
}
|
||||||
|
|
||||||
// Recursively eliminate subtree rooted at this Cluster
|
// Recursively eliminate subtree rooted at this Cluster
|
||||||
// Version that updates existing Bayes net and returns a new Hessian factor on parent clique
|
// Version that updates existing Bayes net and returns a new Hessian factor on parent clique
|
||||||
// It is possible to pass in a nullptr for the bayesNet if only interested in the new factor
|
// It is possible to pass in a nullptr for the bayesNet if only interested in the new factor
|
||||||
HessianFactor::shared_ptr linearizeAndEliminate(
|
HessianFactor::shared_ptr linearizeAndEliminate(
|
||||||
const Values& values, GaussianBayesNet* bayesNet,
|
const Values& values, GaussianBayesNet* bayesNet,
|
||||||
boost::optional<Ordering> ordering = boost::none,
|
const NonlinearFactorGraph::Dampen& dampen = nullptr) const {
|
||||||
|
auto bayesNet_newFactor_pair = linearizeAndEliminate(values, dampen);
|
||||||
|
if (bayesNet) {
|
||||||
|
bayesNet->push_back(bayesNet_newFactor_pair.first);
|
||||||
|
}
|
||||||
|
return bayesNet_newFactor_pair.second;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively eliminate subtree rooted at this Cluster
|
||||||
|
// Version that updates existing Bayes net and returns a new Hessian factor on parent clique
|
||||||
|
// It is possible to pass in a nullptr for the bayesNet if only interested in the new factor
|
||||||
|
HessianFactor::shared_ptr linearizeAndEliminate(
|
||||||
|
const Values& values, GaussianBayesNet* bayesNet,
|
||||||
|
const Ordering& ordering,
|
||||||
const NonlinearFactorGraph::Dampen& dampen = nullptr) const {
|
const NonlinearFactorGraph::Dampen& dampen = nullptr) const {
|
||||||
auto bayesNet_newFactor_pair = linearizeAndEliminate(values, ordering, dampen);
|
auto bayesNet_newFactor_pair = linearizeAndEliminate(values, ordering, dampen);
|
||||||
if (bayesNet) {
|
if (bayesNet) {
|
||||||
|
|
|
@ -206,7 +206,7 @@ TEST(GaussianFactorGraph, optimize_Cholesky) {
|
||||||
GaussianFactorGraph fg = createGaussianFactorGraph();
|
GaussianFactorGraph fg = createGaussianFactorGraph();
|
||||||
|
|
||||||
// optimize the graph
|
// optimize the graph
|
||||||
VectorValues actual = fg.optimize(boost::none, EliminateCholesky);
|
VectorValues actual = fg.optimize(EliminateCholesky);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
VectorValues expected = createCorrectDelta();
|
VectorValues expected = createCorrectDelta();
|
||||||
|
@ -220,7 +220,7 @@ TEST( GaussianFactorGraph, optimize_QR )
|
||||||
GaussianFactorGraph fg = createGaussianFactorGraph();
|
GaussianFactorGraph fg = createGaussianFactorGraph();
|
||||||
|
|
||||||
// optimize the graph
|
// optimize the graph
|
||||||
VectorValues actual = fg.optimize(boost::none, EliminateQR);
|
VectorValues actual = fg.optimize(EliminateQR);
|
||||||
|
|
||||||
// verify
|
// verify
|
||||||
VectorValues expected = createCorrectDelta();
|
VectorValues expected = createCorrectDelta();
|
||||||
|
|
|
@ -198,7 +198,7 @@ TEST(NonlinearFactorGraph, UpdateCholesky) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
EXPECT(assert_equal(initial, fg.updateCholesky(initial, boost::none, dampen), 1e-6));
|
EXPECT(assert_equal(initial, fg.updateCholesky(initial, dampen), 1e-6));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
Loading…
Reference in New Issue