Merge pull request #156 from borglab/fix/boost-optional
boost::optional<const Ordering &>release/4.3a0
						commit
						ed7f949385
					
				|  | @ -46,16 +46,20 @@ DiscreteConditional::DiscreteConditional(const size_t nrFrontals, | |||
| 
 | ||||
| /* ******************************************************************************** */ | ||||
| DiscreteConditional::DiscreteConditional(const DecisionTreeFactor& joint, | ||||
|     const DecisionTreeFactor& marginal, const boost::optional<Ordering>& orderedKeys) : | ||||
|     const DecisionTreeFactor& marginal) : | ||||
|     BaseFactor( | ||||
|         ISDEBUG("DiscreteConditional::COUNT") ? joint : joint / marginal), BaseConditional( | ||||
|             joint.size()-marginal.size()) { | ||||
|   if (ISDEBUG("DiscreteConditional::DiscreteConditional")) | ||||
|     cout << (firstFrontalKey()) << endl; //TODO Print all keys
 | ||||
|   if (orderedKeys) { | ||||
|     keys_.clear(); | ||||
|     keys_.insert(keys_.end(), orderedKeys->begin(), orderedKeys->end()); | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /* ******************************************************************************** */ | ||||
| DiscreteConditional::DiscreteConditional(const DecisionTreeFactor& joint, | ||||
|     const DecisionTreeFactor& marginal, const Ordering& orderedKeys) : | ||||
|     DiscreteConditional(joint, marginal) { | ||||
|   keys_.clear(); | ||||
|   keys_.insert(keys_.end(), orderedKeys.begin(), orderedKeys.end()); | ||||
| } | ||||
| 
 | ||||
| /* ******************************************************************************** */ | ||||
|  |  | |||
|  | @ -60,7 +60,11 @@ public: | |||
| 
 | ||||
|   /** construct P(X|Y)=P(X,Y)/P(Y) from P(X,Y) and P(Y) */ | ||||
|   DiscreteConditional(const DecisionTreeFactor& joint, | ||||
|       const DecisionTreeFactor& marginal, const boost::optional<Ordering>& orderedKeys = boost::none); | ||||
|       const DecisionTreeFactor& marginal); | ||||
| 
 | ||||
|   /** construct P(X|Y)=P(X,Y)/P(Y) from P(X,Y) and P(Y) */ | ||||
|   DiscreteConditional(const DecisionTreeFactor& joint, | ||||
|       const DecisionTreeFactor& marginal, const Ordering& orderedKeys); | ||||
| 
 | ||||
|   /**
 | ||||
|    * Combine several conditional into a single one. | ||||
|  |  | |||
|  | @ -262,7 +262,7 @@ namespace gtsam { | |||
| 
 | ||||
|     // Now, marginalize out everything that is not variable j
 | ||||
|     BayesNetType marginalBN = *cliqueMarginal.marginalMultifrontalBayesNet( | ||||
|       Ordering(cref_list_of<1,Key>(j)), boost::none, function); | ||||
|       Ordering(cref_list_of<1,Key>(j)), function); | ||||
| 
 | ||||
|     // The Bayes net should contain only one conditional for variable j, so return it
 | ||||
|     return marginalBN.front(); | ||||
|  | @ -383,7 +383,7 @@ namespace gtsam { | |||
|     } | ||||
| 
 | ||||
|     // now, marginalize out everything that is not variable j1 or j2
 | ||||
|     return p_BC1C2.marginalMultifrontalBayesNet(Ordering(cref_list_of<2,Key>(j1)(j2)), boost::none, function); | ||||
|     return p_BC1C2.marginalMultifrontalBayesNet(Ordering(cref_list_of<2,Key>(j1)(j2)), function); | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|  |  | |||
|  | @ -171,7 +171,7 @@ namespace gtsam { | |||
| 
 | ||||
|         // The variables we want to keepSet are exactly the ones in S
 | ||||
|         KeyVector indicesS(this->conditional()->beginParents(), this->conditional()->endParents()); | ||||
|         cachedSeparatorMarginal_ = *p_Cp.marginalMultifrontalBayesNet(Ordering(indicesS), boost::none, function); | ||||
|         cachedSeparatorMarginal_ = *p_Cp.marginalMultifrontalBayesNet(Ordering(indicesS), function); | ||||
|       } | ||||
|     } | ||||
| 
 | ||||
|  |  | |||
|  | @ -28,33 +28,19 @@ namespace gtsam { | |||
|   template<class FACTORGRAPH> | ||||
|   boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType> | ||||
|     EliminateableFactorGraph<FACTORGRAPH>::eliminateSequential( | ||||
|     OptionalOrdering ordering, const Eliminate& function, | ||||
|     OptionalVariableIndex variableIndex, OptionalOrderingType orderingType) const | ||||
|   { | ||||
|     if(ordering && variableIndex) { | ||||
|       gttic(eliminateSequential); | ||||
|       // Do elimination
 | ||||
|       EliminationTreeType etree(asDerived(), *variableIndex, *ordering); | ||||
|       boost::shared_ptr<BayesNetType> bayesNet; | ||||
|       boost::shared_ptr<FactorGraphType> factorGraph; | ||||
|       boost::tie(bayesNet,factorGraph) = etree.eliminate(function); | ||||
|       // If any factors are remaining, the ordering was incomplete
 | ||||
|       if(!factorGraph->empty()) | ||||
|         throw InconsistentEliminationRequested(); | ||||
|       // Return the Bayes net
 | ||||
|       return bayesNet; | ||||
|     } | ||||
|     else if(!variableIndex) { | ||||
|     OptionalOrderingType orderingType, const Eliminate& function, | ||||
|     OptionalVariableIndex variableIndex) const { | ||||
|     if(!variableIndex) { | ||||
|       // If no VariableIndex provided, compute one and call this function again IMPORTANT: we check
 | ||||
|       // for no variable index first so that it's always computed if we need to call COLAMD because
 | ||||
|       // no Ordering is provided.
 | ||||
|       // no Ordering is provided.  When removing optional from VariableIndex, create VariableIndex
 | ||||
|       // before creating ordering.
 | ||||
|       VariableIndex computedVariableIndex(asDerived()); | ||||
|       return eliminateSequential(ordering, function, computedVariableIndex, orderingType); | ||||
|       return eliminateSequential(function, computedVariableIndex, orderingType); | ||||
|     } | ||||
|     else /*if(!ordering)*/ { | ||||
|       // If no Ordering provided, compute one and call this function again.  We are guaranteed to
 | ||||
|       // have a VariableIndex already here because we computed one if needed in the previous 'else'
 | ||||
|       // block.
 | ||||
|     else { | ||||
|       // Compute an ordering and call this function again.  We are guaranteed to have a 
 | ||||
|       // VariableIndex already here because we computed one if needed in the previous 'if' block.
 | ||||
|       if (orderingType == Ordering::METIS) { | ||||
|         Ordering computedOrdering = Ordering::Metis(asDerived()); | ||||
|         return eliminateSequential(computedOrdering, function, variableIndex, orderingType); | ||||
|  | @ -65,17 +51,75 @@ namespace gtsam { | |||
|     } | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   template<class FACTORGRAPH> | ||||
|   boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType> | ||||
|     EliminateableFactorGraph<FACTORGRAPH>::eliminateSequential( | ||||
|     const Ordering& ordering, const Eliminate& function, | ||||
|     OptionalVariableIndex variableIndex) const | ||||
|   { | ||||
|     if(!variableIndex) { | ||||
|       // If no VariableIndex provided, compute one and call this function again
 | ||||
|       VariableIndex computedVariableIndex(asDerived()); | ||||
|       return eliminateSequential(ordering, function, computedVariableIndex); | ||||
|     } else { | ||||
|       gttic(eliminateSequential); | ||||
|       // Do elimination
 | ||||
|       EliminationTreeType etree(asDerived(), *variableIndex, ordering); | ||||
|       boost::shared_ptr<BayesNetType> bayesNet; | ||||
|       boost::shared_ptr<FactorGraphType> factorGraph; | ||||
|       boost::tie(bayesNet,factorGraph) = etree.eliminate(function); | ||||
|       // If any factors are remaining, the ordering was incomplete
 | ||||
|       if(!factorGraph->empty()) | ||||
|         throw InconsistentEliminationRequested(); | ||||
|       // Return the Bayes net
 | ||||
|       return bayesNet; | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   template<class FACTORGRAPH> | ||||
|   boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType> | ||||
|     EliminateableFactorGraph<FACTORGRAPH>::eliminateMultifrontal( | ||||
|     OptionalOrdering ordering, const Eliminate& function, | ||||
|     OptionalVariableIndex variableIndex, OptionalOrderingType orderingType) const | ||||
|     OptionalOrderingType orderingType, const Eliminate& function, | ||||
|     OptionalVariableIndex variableIndex) const | ||||
|   { | ||||
|     if(ordering && variableIndex) { | ||||
|     if(!variableIndex) { | ||||
|       // If no VariableIndex provided, compute one and call this function again IMPORTANT: we check
 | ||||
|       // for no variable index first so that it's always computed if we need to call COLAMD because
 | ||||
|       // no Ordering is provided.  When removing optional from VariableIndex, create VariableIndex
 | ||||
|       // before creating ordering.
 | ||||
|       VariableIndex computedVariableIndex(asDerived()); | ||||
|       return eliminateMultifrontal(function, computedVariableIndex, orderingType); | ||||
|     } | ||||
|     else { | ||||
|       // Compute an ordering and call this function again.  We are guaranteed to have a
 | ||||
|       // VariableIndex already here because we computed one if needed in the previous 'if' block.
 | ||||
|       if (orderingType == Ordering::METIS) { | ||||
|         Ordering computedOrdering = Ordering::Metis(asDerived()); | ||||
|         return eliminateMultifrontal(computedOrdering, function, variableIndex, orderingType); | ||||
|       } else { | ||||
|         Ordering computedOrdering = Ordering::Colamd(*variableIndex); | ||||
|         return eliminateMultifrontal(computedOrdering, function, variableIndex, orderingType); | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   template<class FACTORGRAPH> | ||||
|   boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType> | ||||
|     EliminateableFactorGraph<FACTORGRAPH>::eliminateMultifrontal( | ||||
|     const Ordering& ordering, const Eliminate& function, | ||||
|     OptionalVariableIndex variableIndex) const | ||||
|   { | ||||
|     if(!variableIndex) { | ||||
|       // If no VariableIndex provided, compute one and call this function again
 | ||||
|       VariableIndex computedVariableIndex(asDerived()); | ||||
|       return eliminateMultifrontal(ordering, function, computedVariableIndex); | ||||
|     } else { | ||||
|       gttic(eliminateMultifrontal); | ||||
|       // Do elimination with given ordering
 | ||||
|       EliminationTreeType etree(asDerived(), *variableIndex, *ordering); | ||||
|       EliminationTreeType etree(asDerived(), *variableIndex, ordering); | ||||
|       JunctionTreeType junctionTree(etree); | ||||
|       boost::shared_ptr<BayesTreeType> bayesTree; | ||||
|       boost::shared_ptr<FactorGraphType> factorGraph; | ||||
|  | @ -86,25 +130,6 @@ namespace gtsam { | |||
|       // Return the Bayes tree
 | ||||
|       return bayesTree; | ||||
|     } | ||||
|     else if(!variableIndex) { | ||||
|       // If no VariableIndex provided, compute one and call this function again IMPORTANT: we check
 | ||||
|       // for no variable index first so that it's always computed if we need to call COLAMD because
 | ||||
|       // no Ordering is provided.
 | ||||
|       VariableIndex computedVariableIndex(asDerived()); | ||||
|       return eliminateMultifrontal(ordering, function, computedVariableIndex, orderingType); | ||||
|     } | ||||
|     else /*if(!ordering)*/ { | ||||
|       // If no Ordering provided, compute one and call this function again.  We are guaranteed to
 | ||||
|       // have a VariableIndex already here because we computed one if needed in the previous 'else'
 | ||||
|       // block.
 | ||||
|       if (orderingType == Ordering::METIS) { | ||||
|         Ordering computedOrdering = Ordering::Metis(asDerived()); | ||||
|         return eliminateMultifrontal(computedOrdering, function, variableIndex, orderingType); | ||||
|       } else { | ||||
|         Ordering computedOrdering = Ordering::Colamd(*variableIndex); | ||||
|         return eliminateMultifrontal(computedOrdering, function, variableIndex, orderingType); | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|  | @ -191,57 +216,65 @@ namespace gtsam { | |||
|   boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType> | ||||
|     EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesNet( | ||||
|     boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|     OptionalOrdering marginalizedVariableOrdering, | ||||
|     const Eliminate& function, OptionalVariableIndex variableIndex) const | ||||
|   { | ||||
|     if(variableIndex) | ||||
|     { | ||||
|       if(marginalizedVariableOrdering) | ||||
|       { | ||||
|         gttic(marginalMultifrontalBayesNet); | ||||
|         // An ordering was provided for the marginalized variables, so we can first eliminate them
 | ||||
|         // in the order requested.
 | ||||
|         boost::shared_ptr<BayesTreeType> bayesTree; | ||||
|         boost::shared_ptr<FactorGraphType> factorGraph; | ||||
|         boost::tie(bayesTree,factorGraph) = | ||||
|           eliminatePartialMultifrontal(*marginalizedVariableOrdering, function, *variableIndex); | ||||
| 
 | ||||
|         if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables)) | ||||
|         { | ||||
|           // An ordering was also provided for the unmarginalized variables, so we can also
 | ||||
|           // eliminate them in the order requested.
 | ||||
|           return factorGraph->eliminateSequential(*varsAsOrdering, function); | ||||
|         } | ||||
|         else | ||||
|         { | ||||
|           // No ordering was provided for the unmarginalized variables, so order them with COLAMD.
 | ||||
|           return factorGraph->eliminateSequential(boost::none, function); | ||||
|         } | ||||
|       } | ||||
|       else | ||||
|       { | ||||
|         // No ordering was provided for the marginalized variables, so order them using constrained
 | ||||
|         // COLAMD.
 | ||||
|         bool unmarginalizedAreOrdered = (boost::get<const Ordering&>(&variables) != 0); | ||||
|         const KeyVector* variablesOrOrdering = | ||||
|           unmarginalizedAreOrdered ? | ||||
|           boost::get<const Ordering&>(&variables) : boost::get<const KeyVector&>(&variables); | ||||
| 
 | ||||
|         Ordering totalOrdering = | ||||
|           Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered); | ||||
| 
 | ||||
|         // Split up ordering
 | ||||
|         const size_t nVars = variablesOrOrdering->size(); | ||||
|         Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars); | ||||
|         Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end()); | ||||
| 
 | ||||
|         // Call this function again with the computed orderings
 | ||||
|         return marginalMultifrontalBayesNet(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex); | ||||
|       } | ||||
|     if(!variableIndex) { | ||||
|       // If no variable index is provided, compute one and call this function again
 | ||||
|       VariableIndex index(asDerived()); | ||||
|       return marginalMultifrontalBayesNet(variables, function, index); | ||||
|     } else { | ||||
|       // No ordering was provided for the marginalized variables, so order them using constrained
 | ||||
|       // COLAMD.
 | ||||
|       bool unmarginalizedAreOrdered = (boost::get<const Ordering&>(&variables) != 0); | ||||
|       const KeyVector* variablesOrOrdering = | ||||
|         unmarginalizedAreOrdered ? | ||||
|         boost::get<const Ordering&>(&variables) : boost::get<const KeyVector&>(&variables); | ||||
| 
 | ||||
|       Ordering totalOrdering = | ||||
|         Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered); | ||||
| 
 | ||||
|       // Split up ordering
 | ||||
|       const size_t nVars = variablesOrOrdering->size(); | ||||
|       Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars); | ||||
|       Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end()); | ||||
| 
 | ||||
|       // Call this function again with the computed orderings
 | ||||
|       return marginalMultifrontalBayesNet(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   template<class FACTORGRAPH> | ||||
|   boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesNetType> | ||||
|     EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesNet( | ||||
|     boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|     const Ordering& marginalizedVariableOrdering, | ||||
|     const Eliminate& function, OptionalVariableIndex variableIndex) const | ||||
|   { | ||||
|     if(!variableIndex) { | ||||
|       // If no variable index is provided, compute one and call this function again
 | ||||
|       VariableIndex index(asDerived()); | ||||
|       return marginalMultifrontalBayesNet(variables, marginalizedVariableOrdering, function, index); | ||||
|     } else { | ||||
|       gttic(marginalMultifrontalBayesNet); | ||||
|       // An ordering was provided for the marginalized variables, so we can first eliminate them
 | ||||
|       // in the order requested.
 | ||||
|       boost::shared_ptr<BayesTreeType> bayesTree; | ||||
|       boost::shared_ptr<FactorGraphType> factorGraph; | ||||
|       boost::tie(bayesTree,factorGraph) = | ||||
|         eliminatePartialMultifrontal(marginalizedVariableOrdering, function, *variableIndex); | ||||
| 
 | ||||
|       if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables)) | ||||
|       { | ||||
|         // An ordering was also provided for the unmarginalized variables, so we can also
 | ||||
|         // eliminate them in the order requested.
 | ||||
|         return factorGraph->eliminateSequential(*varsAsOrdering, function); | ||||
|       } | ||||
|       else | ||||
|       { | ||||
|         // No ordering was provided for the unmarginalized variables, so order them with COLAMD.
 | ||||
|         return factorGraph->eliminateSequential(function); | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|  | @ -250,57 +283,65 @@ namespace gtsam { | |||
|   boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType> | ||||
|     EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesTree( | ||||
|     boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|     OptionalOrdering marginalizedVariableOrdering, | ||||
|     const Eliminate& function, OptionalVariableIndex variableIndex) const | ||||
|   { | ||||
|     if(variableIndex) | ||||
|     { | ||||
|       if(marginalizedVariableOrdering) | ||||
|       { | ||||
|         gttic(marginalMultifrontalBayesTree); | ||||
|         // An ordering was provided for the marginalized variables, so we can first eliminate them
 | ||||
|         // in the order requested.
 | ||||
|         boost::shared_ptr<BayesTreeType> bayesTree; | ||||
|         boost::shared_ptr<FactorGraphType> factorGraph; | ||||
|         boost::tie(bayesTree,factorGraph) = | ||||
|           eliminatePartialMultifrontal(*marginalizedVariableOrdering, function, *variableIndex); | ||||
| 
 | ||||
|         if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables)) | ||||
|         { | ||||
|           // An ordering was also provided for the unmarginalized variables, so we can also
 | ||||
|           // eliminate them in the order requested.
 | ||||
|           return factorGraph->eliminateMultifrontal(*varsAsOrdering, function); | ||||
|         } | ||||
|         else | ||||
|         { | ||||
|           // No ordering was provided for the unmarginalized variables, so order them with COLAMD.
 | ||||
|           return factorGraph->eliminateMultifrontal(boost::none, function); | ||||
|         } | ||||
|       } | ||||
|       else | ||||
|       { | ||||
|         // No ordering was provided for the marginalized variables, so order them using constrained
 | ||||
|         // COLAMD.
 | ||||
|         bool unmarginalizedAreOrdered = (boost::get<const Ordering&>(&variables) != 0); | ||||
|         const KeyVector* variablesOrOrdering = | ||||
|           unmarginalizedAreOrdered ? | ||||
|           boost::get<const Ordering&>(&variables) : boost::get<const KeyVector&>(&variables); | ||||
| 
 | ||||
|         Ordering totalOrdering = | ||||
|           Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered); | ||||
| 
 | ||||
|         // Split up ordering
 | ||||
|         const size_t nVars = variablesOrOrdering->size(); | ||||
|         Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars); | ||||
|         Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end()); | ||||
| 
 | ||||
|         // Call this function again with the computed orderings
 | ||||
|         return marginalMultifrontalBayesTree(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex); | ||||
|       } | ||||
|     if(!variableIndex) { | ||||
|       // If no variable index is provided, compute one and call this function again
 | ||||
|       VariableIndex computedVariableIndex(asDerived()); | ||||
|       return marginalMultifrontalBayesTree(variables, function, computedVariableIndex); | ||||
|     } else { | ||||
|       // No ordering was provided for the marginalized variables, so order them using constrained
 | ||||
|       // COLAMD.
 | ||||
|       bool unmarginalizedAreOrdered = (boost::get<const Ordering&>(&variables) != 0); | ||||
|       const KeyVector* variablesOrOrdering = | ||||
|         unmarginalizedAreOrdered ? | ||||
|         boost::get<const Ordering&>(&variables) : boost::get<const KeyVector&>(&variables); | ||||
| 
 | ||||
|       Ordering totalOrdering = | ||||
|         Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered); | ||||
| 
 | ||||
|       // Split up ordering
 | ||||
|       const size_t nVars = variablesOrOrdering->size(); | ||||
|       Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars); | ||||
|       Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end()); | ||||
| 
 | ||||
|       // Call this function again with the computed orderings
 | ||||
|       return marginalMultifrontalBayesTree(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   template<class FACTORGRAPH> | ||||
|   boost::shared_ptr<typename EliminateableFactorGraph<FACTORGRAPH>::BayesTreeType> | ||||
|     EliminateableFactorGraph<FACTORGRAPH>::marginalMultifrontalBayesTree( | ||||
|     boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|     const Ordering& marginalizedVariableOrdering, | ||||
|     const Eliminate& function, OptionalVariableIndex variableIndex) const | ||||
|   { | ||||
|     if(!variableIndex) { | ||||
|       // If no variable index is provided, compute one and call this function again
 | ||||
|       VariableIndex computedVariableIndex(asDerived()); | ||||
|       return marginalMultifrontalBayesTree(variables, marginalizedVariableOrdering, function, computedVariableIndex); | ||||
|     } else { | ||||
|       gttic(marginalMultifrontalBayesTree); | ||||
|       // An ordering was provided for the marginalized variables, so we can first eliminate them
 | ||||
|       // in the order requested.
 | ||||
|       boost::shared_ptr<BayesTreeType> bayesTree; | ||||
|       boost::shared_ptr<FactorGraphType> factorGraph; | ||||
|       boost::tie(bayesTree,factorGraph) = | ||||
|         eliminatePartialMultifrontal(marginalizedVariableOrdering, function, *variableIndex); | ||||
| 
 | ||||
|       if(const Ordering* varsAsOrdering = boost::get<const Ordering&>(&variables)) | ||||
|       { | ||||
|         // An ordering was also provided for the unmarginalized variables, so we can also
 | ||||
|         // eliminate them in the order requested.
 | ||||
|         return factorGraph->eliminateMultifrontal(*varsAsOrdering, function); | ||||
|       } | ||||
|       else | ||||
|       { | ||||
|         // No ordering was provided for the unmarginalized variables, so order them with COLAMD.
 | ||||
|         return factorGraph->eliminateMultifrontal(function); | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|  |  | |||
|  | @ -88,9 +88,6 @@ namespace gtsam { | |||
|     /// The function type that does a single dense elimination step on a subgraph.
 | ||||
|     typedef boost::function<EliminationResult(const FactorGraphType&, const Ordering&)> Eliminate; | ||||
| 
 | ||||
|     /// Typedef for an optional ordering as an argument to elimination functions
 | ||||
|     typedef boost::optional<const Ordering&> OptionalOrdering; | ||||
| 
 | ||||
|     /// Typedef for an optional variable index as an argument to elimination functions
 | ||||
|     typedef boost::optional<const VariableIndex&> OptionalVariableIndex; | ||||
| 
 | ||||
|  | @ -108,24 +105,38 @@ namespace gtsam { | |||
|      *  <b> Example - METIS ordering for elimination | ||||
|      *  \code | ||||
|      *  boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(OrderingType::METIS); | ||||
|      * | ||||
|      *  <b> Example - Full QR elimination in specified order: | ||||
|      *  \code | ||||
|      *  boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(EliminateQR, myOrdering); | ||||
|      *  \endcode | ||||
|      * | ||||
|      *  <b> Example - Reusing an existing VariableIndex to improve performance, and using COLAMD ordering: </b> | ||||
|      *  \code | ||||
|      *  VariableIndex varIndex(graph); // Build variable index
 | ||||
|      *  Data data = otherFunctionUsingVariableIndex(graph, varIndex); // Other code that uses variable index
 | ||||
|      *  boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(EliminateQR, boost::none, varIndex); | ||||
|      *  boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(EliminateQR, varIndex, boost::none); | ||||
|      *  \endcode | ||||
|      *  */ | ||||
|     boost::shared_ptr<BayesNetType> eliminateSequential( | ||||
|       OptionalOrdering ordering = boost::none, | ||||
|       OptionalOrderingType orderingType = boost::none, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none, | ||||
|       OptionalOrderingType orderingType = boost::none) const; | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|     /** Do sequential elimination of all variables to produce a Bayes net.
 | ||||
|      * | ||||
|      *  <b> Example - Full QR elimination in specified order: | ||||
|      *  \code | ||||
|      *  boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(myOrdering, EliminateQR); | ||||
|      *  \endcode | ||||
|      * | ||||
|      *  <b> Example - Reusing an existing VariableIndex to improve performance: </b> | ||||
|      *  \code | ||||
|      *  VariableIndex varIndex(graph); // Build variable index
 | ||||
|      *  Data data = otherFunctionUsingVariableIndex(graph, varIndex); // Other code that uses variable index
 | ||||
|      *  boost::shared_ptr<GaussianBayesNet> result = graph.eliminateSequential(myOrdering, EliminateQR, varIndex, boost::none); | ||||
|      *  \endcode | ||||
|      *  */ | ||||
|     boost::shared_ptr<BayesNetType> eliminateSequential( | ||||
|       const Ordering& ordering, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|     /** Do multifrontal elimination of all variables to produce a Bayes tree.  If an ordering is not
 | ||||
|      *  provided, the ordering will be computed using either COLAMD or METIS, dependeing on | ||||
|  | @ -136,11 +147,6 @@ namespace gtsam { | |||
|      *  boost::shared_ptr<GaussianBayesTree> result = graph.eliminateMultifrontal(EliminateCholesky); | ||||
|      *  \endcode | ||||
|      * | ||||
|      *  <b> Example - Full QR elimination in specified order: | ||||
|      *  \code | ||||
|      *  boost::shared_ptr<GaussianBayesTree> result = graph.eliminateMultifrontal(EliminateQR, myOrdering); | ||||
|      *  \endcode | ||||
|      * | ||||
|      *  <b> Example - Reusing an existing VariableIndex to improve performance, and using COLAMD ordering: </b> | ||||
|      *  \code | ||||
|      *  VariableIndex varIndex(graph); // Build variable index
 | ||||
|  | @ -149,10 +155,23 @@ namespace gtsam { | |||
|      *  \endcode | ||||
|      *  */ | ||||
|     boost::shared_ptr<BayesTreeType> eliminateMultifrontal( | ||||
|       OptionalOrdering ordering = boost::none, | ||||
|       OptionalOrderingType orderingType = boost::none, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none, | ||||
|       OptionalOrderingType orderingType = boost::none) const; | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|     /** Do multifrontal elimination of all variables to produce a Bayes tree.  If an ordering is not
 | ||||
|      *  provided, the ordering will be computed using either COLAMD or METIS, dependeing on | ||||
|      *  the parameter orderingType (Ordering::COLAMD or Ordering::METIS) | ||||
|      * | ||||
|      *  <b> Example - Full QR elimination in specified order: | ||||
|      *  \code | ||||
|      *  boost::shared_ptr<GaussianBayesTree> result = graph.eliminateMultifrontal(EliminateQR, myOrdering); | ||||
|      *  \endcode | ||||
|      *  */ | ||||
|     boost::shared_ptr<BayesTreeType> eliminateMultifrontal( | ||||
|       const Ordering& ordering, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|     /** Do sequential elimination of some variables, in \c ordering provided, to produce a Bayes net
 | ||||
|      *  and a remaining factor graph.  This computes the factorization \f$ p(X) = p(A|B) p(B) \f$, | ||||
|  | @ -194,20 +213,47 @@ namespace gtsam { | |||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|     /** Compute the marginal of the requested variables and return the result as a Bayes net.
 | ||||
|     /** Compute the marginal of the requested variables and return the result as a Bayes net.  Uses
 | ||||
|      *  COLAMD marginalization ordering by default | ||||
|      *  @param variables Determines the variables whose marginal to compute, if provided as an | ||||
|      *         Ordering they will be ordered in the returned BayesNet as specified, and if provided | ||||
|      *         as a KeyVector they will be ordered using constrained COLAMD. | ||||
|      *  @param marginalizedVariableOrdering Optional ordering for the variables being marginalized | ||||
|      *         out, i.e. all variables not in \c variables.  If this is boost::none, the ordering | ||||
|      *         will be computed with COLAMD. | ||||
|      *  @param function Optional dense elimination function, if not provided the default will be | ||||
|      *         used. | ||||
|      *  @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not | ||||
|      *         provided one will be computed. */ | ||||
|     boost::shared_ptr<BayesNetType> marginalMultifrontalBayesNet( | ||||
|       boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|       OptionalOrdering marginalizedVariableOrdering = boost::none, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|     /** Compute the marginal of the requested variables and return the result as a Bayes net.
 | ||||
|      *  @param variables Determines the variables whose marginal to compute, if provided as an | ||||
|      *         Ordering they will be ordered in the returned BayesNet as specified, and if provided | ||||
|      *         as a KeyVector they will be ordered using constrained COLAMD. | ||||
|      *  @param marginalizedVariableOrdering Ordering for the variables being marginalized out, | ||||
|      *         i.e. all variables not in \c variables. | ||||
|      *  @param function Optional dense elimination function, if not provided the default will be | ||||
|      *         used. | ||||
|      *  @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not | ||||
|      *         provided one will be computed. */ | ||||
|     boost::shared_ptr<BayesNetType> marginalMultifrontalBayesNet( | ||||
|       boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|       const Ordering& marginalizedVariableOrdering, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|     /** Compute the marginal of the requested variables and return the result as a Bayes tree.  Uses
 | ||||
|      *  COLAMD marginalization order by default | ||||
|      *  @param variables Determines the variables whose marginal to compute, if provided as an | ||||
|      *         Ordering they will be ordered in the returned BayesNet as specified, and if provided | ||||
|      *         as a KeyVector they will be ordered using constrained COLAMD. | ||||
|      *  @param function Optional dense elimination function, if not provided the default will be | ||||
|      *         used. | ||||
|      *  @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not | ||||
|      *         provided one will be computed. */ | ||||
|     boost::shared_ptr<BayesTreeType> marginalMultifrontalBayesTree( | ||||
|       boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|  | @ -215,16 +261,15 @@ namespace gtsam { | |||
|      *  @param variables Determines the variables whose marginal to compute, if provided as an | ||||
|      *         Ordering they will be ordered in the returned BayesNet as specified, and if provided | ||||
|      *         as a KeyVector they will be ordered using constrained COLAMD. | ||||
|      *  @param marginalizedVariableOrdering Optional ordering for the variables being marginalized | ||||
|      *         out, i.e. all variables not in \c variables.  If this is boost::none, the ordering | ||||
|      *         will be computed with COLAMD. | ||||
|      *  @param marginalizedVariableOrdering Ordering for the variables being marginalized out, | ||||
|      *         i.e. all variables not in \c variables. | ||||
|      *  @param function Optional dense elimination function, if not provided the default will be | ||||
|      *         used. | ||||
|      *  @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not | ||||
|      *         provided one will be computed. */ | ||||
|     boost::shared_ptr<BayesTreeType> marginalMultifrontalBayesTree( | ||||
|       boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|       OptionalOrdering marginalizedVariableOrdering = boost::none, | ||||
|       const Ordering& marginalizedVariableOrdering, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const; | ||||
| 
 | ||||
|  | @ -241,6 +286,59 @@ namespace gtsam { | |||
| 
 | ||||
|     // Access the derived factor graph class
 | ||||
|     FactorGraphType& asDerived() { return static_cast<FactorGraphType&>(*this); } | ||||
| 
 | ||||
|   public: | ||||
|     /** \deprecated ordering and orderingType shouldn't both be specified */ | ||||
|     boost::shared_ptr<BayesNetType> eliminateSequential( | ||||
|       const Ordering& ordering, | ||||
|       const Eliminate& function, | ||||
|       OptionalVariableIndex variableIndex, | ||||
|       OptionalOrderingType orderingType) const { | ||||
|         return eliminateSequential(ordering, function, variableIndex); | ||||
|       } | ||||
|      | ||||
|     /** \deprecated orderingType specified first for consistency */ | ||||
|     boost::shared_ptr<BayesNetType> eliminateSequential( | ||||
|       const Eliminate& function, | ||||
|       OptionalVariableIndex variableIndex = boost::none, | ||||
|       OptionalOrderingType orderingType = boost::none) const { | ||||
|         return eliminateSequential(orderingType, function, variableIndex); | ||||
|       } | ||||
| 
 | ||||
|     /** \deprecated ordering and orderingType shouldn't both be specified */ | ||||
|     boost::shared_ptr<BayesTreeType> eliminateMultifrontal( | ||||
|       const Ordering& ordering, | ||||
|       const Eliminate& function, | ||||
|       OptionalVariableIndex variableIndex, | ||||
|       OptionalOrderingType orderingType) const { | ||||
|         return eliminateMultifrontal(ordering, function, variableIndex); | ||||
|       } | ||||
| 
 | ||||
|     /** \deprecated orderingType specified first for consistency */ | ||||
|     boost::shared_ptr<BayesTreeType> eliminateMultifrontal( | ||||
|       const Eliminate& function, | ||||
|       OptionalVariableIndex variableIndex = boost::none, | ||||
|       OptionalOrderingType orderingType = boost::none) const { | ||||
|         return eliminateMultifrontal(orderingType, function, variableIndex); | ||||
|       } | ||||
| 
 | ||||
|     /** \deprecated */ | ||||
|     boost::shared_ptr<BayesNetType> marginalMultifrontalBayesNet( | ||||
|       boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|       boost::none_t, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const { | ||||
|           return marginalMultifrontalBayesNet(variables, function, variableIndex); | ||||
|       } | ||||
| 
 | ||||
|     /** \deprecated */ | ||||
|     boost::shared_ptr<BayesTreeType> marginalMultifrontalBayesTree( | ||||
|       boost::variant<const Ordering&, const KeyVector&> variables, | ||||
|       boost::none_t, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate, | ||||
|       OptionalVariableIndex variableIndex = boost::none) const { | ||||
|           return marginalMultifrontalBayesTree(variables, function, variableIndex); | ||||
|       } | ||||
|   }; | ||||
| 
 | ||||
| } | ||||
|  |  | |||
|  | @ -156,16 +156,17 @@ namespace gtsam { | |||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   pair<Matrix, Vector> GaussianBayesNet::matrix(boost::optional<const Ordering&> ordering) const { | ||||
|     if (ordering) { | ||||
|       // Convert to a GaussianFactorGraph and use its machinery
 | ||||
|       GaussianFactorGraph factorGraph(*this); | ||||
|       return factorGraph.jacobian(ordering); | ||||
|     } else { | ||||
|       // recursively call with default ordering
 | ||||
|       const auto defaultOrdering = this->ordering(); | ||||
|       return matrix(defaultOrdering); | ||||
|     } | ||||
|   pair<Matrix, Vector> GaussianBayesNet::matrix(const Ordering& ordering) const { | ||||
|     // Convert to a GaussianFactorGraph and use its machinery
 | ||||
|     GaussianFactorGraph factorGraph(*this); | ||||
|     return factorGraph.jacobian(ordering); | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   pair<Matrix, Vector> GaussianBayesNet::matrix() const { | ||||
|     // recursively call with default ordering
 | ||||
|     const auto defaultOrdering = this->ordering(); | ||||
|     return matrix(defaultOrdering); | ||||
|   } | ||||
| 
 | ||||
|   ///* ************************************************************************* */
 | ||||
|  |  | |||
|  | @ -92,7 +92,14 @@ namespace gtsam { | |||
|      * Will return upper-triangular matrix only when using 'ordering' above. | ||||
|      * In case Bayes net is incomplete zero columns are added to the end. | ||||
|      */ | ||||
|     std::pair<Matrix, Vector> matrix(boost::optional<const Ordering&> ordering = boost::none) const; | ||||
|     std::pair<Matrix, Vector> matrix(const Ordering& ordering) const; | ||||
| 
 | ||||
|     /**
 | ||||
|      * Return (dense) upper-triangular matrix representation | ||||
|      * Will return upper-triangular matrix only when using 'ordering' above. | ||||
|      * In case Bayes net is incomplete zero columns are added to the end. | ||||
|      */ | ||||
|     std::pair<Matrix, Vector> matrix() const; | ||||
| 
 | ||||
|     /**
 | ||||
|      * Optimize along the gradient direction, with a closed-form computation to perform the line | ||||
|  |  | |||
|  | @ -190,33 +190,62 @@ namespace gtsam { | |||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   Matrix GaussianFactorGraph::augmentedJacobian( | ||||
|       boost::optional<const Ordering&> optionalOrdering) const { | ||||
|       const Ordering& ordering) const { | ||||
|     // combine all factors
 | ||||
|     JacobianFactor combined(*this, optionalOrdering); | ||||
|     JacobianFactor combined(*this, ordering); | ||||
|     return combined.augmentedJacobian(); | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   Matrix GaussianFactorGraph::augmentedJacobian() const { | ||||
|     // combine all factors
 | ||||
|     JacobianFactor combined(*this); | ||||
|     return combined.augmentedJacobian(); | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   pair<Matrix, Vector> GaussianFactorGraph::jacobian( | ||||
|       boost::optional<const Ordering&> optionalOrdering) const { | ||||
|     Matrix augmented = augmentedJacobian(optionalOrdering); | ||||
|       const Ordering& ordering) const { | ||||
|     Matrix augmented = augmentedJacobian(ordering); | ||||
|     return make_pair(augmented.leftCols(augmented.cols() - 1), | ||||
|         augmented.col(augmented.cols() - 1)); | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   pair<Matrix, Vector> GaussianFactorGraph::jacobian() const { | ||||
|     Matrix augmented = augmentedJacobian(); | ||||
|     return make_pair(augmented.leftCols(augmented.cols() - 1), | ||||
|         augmented.col(augmented.cols() - 1)); | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   Matrix GaussianFactorGraph::augmentedHessian( | ||||
|       boost::optional<const Ordering&> optionalOrdering) const { | ||||
|       const Ordering& ordering) const { | ||||
|     // combine all factors and get upper-triangular part of Hessian
 | ||||
|     Scatter scatter(*this, optionalOrdering); | ||||
|     Scatter scatter(*this, ordering); | ||||
|     HessianFactor combined(*this, scatter); | ||||
|     return combined.info().selfadjointView();; | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   Matrix GaussianFactorGraph::augmentedHessian() const { | ||||
|     // combine all factors and get upper-triangular part of Hessian
 | ||||
|     Scatter scatter(*this); | ||||
|     HessianFactor combined(*this, scatter); | ||||
|     return combined.info().selfadjointView();; | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   pair<Matrix, Vector> GaussianFactorGraph::hessian( | ||||
|       boost::optional<const Ordering&> optionalOrdering) const { | ||||
|     Matrix augmented = augmentedHessian(optionalOrdering); | ||||
|       const Ordering& ordering) const { | ||||
|     Matrix augmented = augmentedHessian(ordering); | ||||
|     size_t n = augmented.rows() - 1; | ||||
|     return make_pair(augmented.topLeftCorner(n, n), augmented.topRightCorner(n, 1)); | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   pair<Matrix, Vector> GaussianFactorGraph::hessian() const { | ||||
|     Matrix augmented = augmentedHessian(); | ||||
|     size_t n = augmented.rows() - 1; | ||||
|     return make_pair(augmented.topLeftCorner(n, n), augmented.topRightCorner(n, 1)); | ||||
|   } | ||||
|  | @ -253,8 +282,13 @@ namespace gtsam { | |||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   VectorValues GaussianFactorGraph::optimize(OptionalOrdering ordering, const Eliminate& function) const | ||||
|   { | ||||
|   VectorValues GaussianFactorGraph::optimize(const Eliminate& function) const { | ||||
|     gttic(GaussianFactorGraph_optimize); | ||||
|     return BaseEliminateable::eliminateMultifrontal(function)->optimize(); | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   VectorValues GaussianFactorGraph::optimize(const Ordering& ordering, const Eliminate& function) const { | ||||
|     gttic(GaussianFactorGraph_optimize); | ||||
|     return BaseEliminateable::eliminateMultifrontal(ordering, function)->optimize(); | ||||
|   } | ||||
|  | @ -460,4 +494,11 @@ namespace gtsam { | |||
|     return e; | ||||
|   } | ||||
| 
 | ||||
|   /* ************************************************************************* */ | ||||
|   /** \deprecated */ | ||||
|   VectorValues GaussianFactorGraph::optimize(boost::none_t, | ||||
|     const Eliminate& function) const { | ||||
|       return optimize(function); | ||||
|   } | ||||
| 
 | ||||
| } // namespace gtsam
 | ||||
|  |  | |||
|  | @ -201,7 +201,16 @@ namespace gtsam { | |||
|      * \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$.  See also | ||||
|      * GaussianFactorGraph::jacobian and GaussianFactorGraph::sparseJacobian. | ||||
|      */ | ||||
|     Matrix augmentedJacobian(boost::optional<const Ordering&> optionalOrdering = boost::none) const; | ||||
|     Matrix augmentedJacobian(const Ordering& ordering) const; | ||||
|      | ||||
|     /**
 | ||||
|      * Return a dense \f$ [ \;A\;b\; ] \in \mathbb{R}^{m \times n+1} \f$ | ||||
|      * Jacobian matrix, augmented with b with the noise models baked | ||||
|      * into A and b.  The negative log-likelihood is | ||||
|      * \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$.  See also | ||||
|      * GaussianFactorGraph::jacobian and GaussianFactorGraph::sparseJacobian. | ||||
|      */ | ||||
|     Matrix augmentedJacobian() const; | ||||
| 
 | ||||
|     /**
 | ||||
|      * Return the dense Jacobian \f$ A \f$ and right-hand-side \f$ b \f$, | ||||
|  | @ -210,7 +219,16 @@ namespace gtsam { | |||
|      * GaussianFactorGraph::augmentedJacobian and | ||||
|      * GaussianFactorGraph::sparseJacobian. | ||||
|      */ | ||||
|     std::pair<Matrix,Vector> jacobian(boost::optional<const Ordering&> optionalOrdering = boost::none) const; | ||||
|     std::pair<Matrix,Vector> jacobian(const Ordering& ordering) const; | ||||
| 
 | ||||
|     /**
 | ||||
|      * Return the dense Jacobian \f$ A \f$ and right-hand-side \f$ b \f$, | ||||
|      * with the noise models baked into A and b. The negative log-likelihood | ||||
|      * is \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$.  See also | ||||
|      * GaussianFactorGraph::augmentedJacobian and | ||||
|      * GaussianFactorGraph::sparseJacobian. | ||||
|      */ | ||||
|     std::pair<Matrix,Vector> jacobian() const; | ||||
| 
 | ||||
|     /**
 | ||||
|      * Return a dense \f$ \Lambda \in \mathbb{R}^{n+1 \times n+1} \f$ Hessian | ||||
|  | @ -223,7 +241,20 @@ namespace gtsam { | |||
|      and the negative log-likelihood is | ||||
|      \f$ \frac{1}{2} x^T \Lambda x + \eta^T x + c \f$. | ||||
|      */ | ||||
|     Matrix augmentedHessian(boost::optional<const Ordering&> optionalOrdering = boost::none) const; | ||||
|     Matrix augmentedHessian(const Ordering& ordering) const; | ||||
| 
 | ||||
|     /**
 | ||||
|      * Return a dense \f$ \Lambda \in \mathbb{R}^{n+1 \times n+1} \f$ Hessian | ||||
|      * matrix, augmented with the information vector \f$ \eta \f$.  The | ||||
|      * augmented Hessian is | ||||
|      \f[ \left[ \begin{array}{ccc} | ||||
|      \Lambda & \eta \\ | ||||
|      \eta^T & c | ||||
|      \end{array} \right] \f] | ||||
|      and the negative log-likelihood is | ||||
|      \f$ \frac{1}{2} x^T \Lambda x + \eta^T x + c \f$. | ||||
|      */ | ||||
|     Matrix augmentedHessian() const; | ||||
| 
 | ||||
|     /**
 | ||||
|      * Return the dense Hessian \f$ \Lambda \f$ and information vector | ||||
|  | @ -231,7 +262,15 @@ namespace gtsam { | |||
|      * is \frac{1}{2} x^T \Lambda x + \eta^T x + c.  See also | ||||
|      * GaussianFactorGraph::augmentedHessian. | ||||
|      */ | ||||
|     std::pair<Matrix,Vector> hessian(boost::optional<const Ordering&> optionalOrdering = boost::none) const; | ||||
|     std::pair<Matrix,Vector> hessian(const Ordering& ordering) const; | ||||
| 
 | ||||
|     /**
 | ||||
|      * Return the dense Hessian \f$ \Lambda \f$ and information vector | ||||
|      * \f$ \eta \f$, with the noise models baked in. The negative log-likelihood | ||||
|      * is \frac{1}{2} x^T \Lambda x + \eta^T x + c.  See also | ||||
|      * GaussianFactorGraph::augmentedHessian. | ||||
|      */ | ||||
|     std::pair<Matrix,Vector> hessian() const; | ||||
| 
 | ||||
|     /** Return only the diagonal of the Hessian A'*A, as a VectorValues */ | ||||
|     virtual VectorValues hessianDiagonal() const; | ||||
|  | @ -243,7 +282,14 @@ namespace gtsam { | |||
|      *  the dense elimination function specified in \c function (default EliminatePreferCholesky), | ||||
|      *  followed by back-substitution in the Bayes tree resulting from elimination.  Is equivalent | ||||
|      *  to calling graph.eliminateMultifrontal()->optimize(). */ | ||||
|     VectorValues optimize(OptionalOrdering ordering = boost::none, | ||||
|     VectorValues optimize( | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate) const; | ||||
| 
 | ||||
|     /** Solve the factor graph by performing multifrontal variable elimination in COLAMD order using
 | ||||
|      *  the dense elimination function specified in \c function (default EliminatePreferCholesky), | ||||
|      *  followed by back-substitution in the Bayes tree resulting from elimination.  Is equivalent | ||||
|      *  to calling graph.eliminateMultifrontal()->optimize(). */ | ||||
|     VectorValues optimize(const Ordering&, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate) const; | ||||
| 
 | ||||
|     /**
 | ||||
|  | @ -329,6 +375,12 @@ namespace gtsam { | |||
|       ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); | ||||
|     } | ||||
| 
 | ||||
|   public: | ||||
| 
 | ||||
|     /** \deprecated */ | ||||
|     VectorValues optimize(boost::none_t, | ||||
|       const Eliminate& function = EliminationTraitsType::DefaultEliminate) const; | ||||
| 
 | ||||
|   }; | ||||
| 
 | ||||
|   /**
 | ||||
|  |  | |||
|  | @ -250,10 +250,10 @@ HessianFactor::HessianFactor(const GaussianFactor& gf) : | |||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| HessianFactor::HessianFactor(const GaussianFactorGraph& factors, | ||||
|     boost::optional<const Scatter&> scatter) { | ||||
|     const Scatter& scatter) { | ||||
|   gttic(HessianFactor_MergeConstructor); | ||||
| 
 | ||||
|   Allocate(scatter ? *scatter : Scatter(factors)); | ||||
|   Allocate(scatter); | ||||
| 
 | ||||
|   // Form A' * A
 | ||||
|   gttic(update); | ||||
|  |  | |||
|  | @ -176,7 +176,11 @@ namespace gtsam { | |||
| 
 | ||||
|     /** Combine a set of factors into a single dense HessianFactor */ | ||||
|     explicit HessianFactor(const GaussianFactorGraph& factors, | ||||
|       boost::optional<const Scatter&> scatter = boost::none); | ||||
|       const Scatter& scatter); | ||||
| 
 | ||||
|     /** Combine a set of factors into a single dense HessianFactor */ | ||||
|     explicit HessianFactor(const GaussianFactorGraph& factors) | ||||
|         : HessianFactor(factors, Scatter(factors)) {} | ||||
| 
 | ||||
|     /** Destructor */ | ||||
|     virtual ~HessianFactor() {} | ||||
|  |  | |||
|  | @ -220,60 +220,13 @@ FastVector<JacobianFactor::shared_ptr> _convertOrCastToJacobians( | |||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph, | ||||
|     boost::optional<const Ordering&> ordering, | ||||
|     boost::optional<const VariableSlots&> p_variableSlots) { | ||||
|   gttic(JacobianFactor_combine_constructor); | ||||
| 
 | ||||
|   // Compute VariableSlots if one was not provided
 | ||||
|   // Binds reference, does not copy VariableSlots
 | ||||
|   const VariableSlots & variableSlots = | ||||
|       p_variableSlots ? p_variableSlots.get() : VariableSlots(graph); | ||||
| void JacobianFactor::JacobianFactorHelper(const GaussianFactorGraph& graph, | ||||
|     const FastVector<VariableSlots::const_iterator>& orderedSlots) { | ||||
| 
 | ||||
|   // Cast or convert to Jacobians
 | ||||
|   FastVector<JacobianFactor::shared_ptr> jacobians = _convertOrCastToJacobians( | ||||
|       graph); | ||||
| 
 | ||||
|   gttic(Order_slots); | ||||
|   // Order variable slots - we maintain the vector of ordered slots, as well as keep a list
 | ||||
|   // 'unorderedSlots' of any variables discovered that are not in the ordering.  Those will then
 | ||||
|   // be added after all of the ordered variables.
 | ||||
|   FastVector<VariableSlots::const_iterator> orderedSlots; | ||||
|   orderedSlots.reserve(variableSlots.size()); | ||||
|   if (ordering) { | ||||
|     // If an ordering is provided, arrange the slots first that ordering
 | ||||
|     FastList<VariableSlots::const_iterator> unorderedSlots; | ||||
|     size_t nOrderingSlotsUsed = 0; | ||||
|     orderedSlots.resize(ordering->size()); | ||||
|     FastMap<Key, size_t> inverseOrdering = ordering->invert(); | ||||
|     for (VariableSlots::const_iterator item = variableSlots.begin(); | ||||
|         item != variableSlots.end(); ++item) { | ||||
|       FastMap<Key, size_t>::const_iterator orderingPosition = | ||||
|           inverseOrdering.find(item->first); | ||||
|       if (orderingPosition == inverseOrdering.end()) { | ||||
|         unorderedSlots.push_back(item); | ||||
|       } else { | ||||
|         orderedSlots[orderingPosition->second] = item; | ||||
|         ++nOrderingSlotsUsed; | ||||
|       } | ||||
|     } | ||||
|     if (nOrderingSlotsUsed != ordering->size()) | ||||
|       throw std::invalid_argument( | ||||
|           "The ordering provided to the JacobianFactor combine constructor\n" | ||||
|               "contained extra variables that did not appear in the factors to combine."); | ||||
|     // Add the remaining slots
 | ||||
|     for(VariableSlots::const_iterator item: unorderedSlots) { | ||||
|       orderedSlots.push_back(item); | ||||
|     } | ||||
|   } else { | ||||
|     // If no ordering is provided, arrange the slots as they were, which will be sorted
 | ||||
|     // numerically since VariableSlots uses a map sorting on Key.
 | ||||
|     for (VariableSlots::const_iterator item = variableSlots.begin(); | ||||
|         item != variableSlots.end(); ++item) | ||||
|       orderedSlots.push_back(item); | ||||
|   } | ||||
|   gttoc(Order_slots); | ||||
| 
 | ||||
|   // Count dimensions
 | ||||
|   FastVector<DenseIndex> varDims; | ||||
|   DenseIndex m, n; | ||||
|  | @ -344,6 +297,127 @@ JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph, | |||
|     this->setModel(anyConstrained, *sigmas); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| // Order variable slots - we maintain the vector of ordered slots, as well as keep a list
 | ||||
| // 'unorderedSlots' of any variables discovered that are not in the ordering.  Those will then
 | ||||
| // be added after all of the ordered variables.
 | ||||
| FastVector<VariableSlots::const_iterator> orderedSlotsHelper( | ||||
|     const Ordering& ordering, | ||||
|     const VariableSlots& variableSlots) { | ||||
|   gttic(Order_slots); | ||||
|    | ||||
|   FastVector<VariableSlots::const_iterator> orderedSlots; | ||||
|   orderedSlots.reserve(variableSlots.size()); | ||||
|    | ||||
|   // If an ordering is provided, arrange the slots first that ordering
 | ||||
|   FastList<VariableSlots::const_iterator> unorderedSlots; | ||||
|   size_t nOrderingSlotsUsed = 0; | ||||
|   orderedSlots.resize(ordering.size()); | ||||
|   FastMap<Key, size_t> inverseOrdering = ordering.invert(); | ||||
|   for (VariableSlots::const_iterator item = variableSlots.begin(); | ||||
|       item != variableSlots.end(); ++item) { | ||||
|     FastMap<Key, size_t>::const_iterator orderingPosition = | ||||
|         inverseOrdering.find(item->first); | ||||
|     if (orderingPosition == inverseOrdering.end()) { | ||||
|       unorderedSlots.push_back(item); | ||||
|     } else { | ||||
|       orderedSlots[orderingPosition->second] = item; | ||||
|       ++nOrderingSlotsUsed; | ||||
|     } | ||||
|   } | ||||
|   if (nOrderingSlotsUsed != ordering.size()) | ||||
|     throw std::invalid_argument( | ||||
|         "The ordering provided to the JacobianFactor combine constructor\n" | ||||
|             "contained extra variables that did not appear in the factors to combine."); | ||||
|   // Add the remaining slots
 | ||||
|   for(VariableSlots::const_iterator item: unorderedSlots) { | ||||
|     orderedSlots.push_back(item); | ||||
|   } | ||||
| 
 | ||||
|   gttoc(Order_slots); | ||||
| 
 | ||||
|   return orderedSlots; | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph) { | ||||
|   gttic(JacobianFactor_combine_constructor); | ||||
| 
 | ||||
|   // Compute VariableSlots if one was not provided
 | ||||
|   // Binds reference, does not copy VariableSlots
 | ||||
|   const VariableSlots & variableSlots = VariableSlots(graph); | ||||
| 
 | ||||
|   gttic(Order_slots); | ||||
|   // Order variable slots - we maintain the vector of ordered slots, as well as keep a list
 | ||||
|   // 'unorderedSlots' of any variables discovered that are not in the ordering.  Those will then
 | ||||
|   // be added after all of the ordered variables.
 | ||||
|   FastVector<VariableSlots::const_iterator> orderedSlots; | ||||
|   orderedSlots.reserve(variableSlots.size()); | ||||
|    | ||||
|   // If no ordering is provided, arrange the slots as they were, which will be sorted
 | ||||
|   // numerically since VariableSlots uses a map sorting on Key.
 | ||||
|   for (VariableSlots::const_iterator item = variableSlots.begin(); | ||||
|       item != variableSlots.end(); ++item) | ||||
|     orderedSlots.push_back(item); | ||||
|   gttoc(Order_slots); | ||||
| 
 | ||||
|   JacobianFactorHelper(graph, orderedSlots); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph, | ||||
|     const VariableSlots& p_variableSlots) { | ||||
|   gttic(JacobianFactor_combine_constructor); | ||||
| 
 | ||||
|   // Binds reference, does not copy VariableSlots
 | ||||
|   const VariableSlots & variableSlots = p_variableSlots; | ||||
| 
 | ||||
|   gttic(Order_slots); | ||||
|   // Order variable slots - we maintain the vector of ordered slots, as well as keep a list
 | ||||
|   // 'unorderedSlots' of any variables discovered that are not in the ordering.  Those will then
 | ||||
|   // be added after all of the ordered variables.
 | ||||
|   FastVector<VariableSlots::const_iterator> orderedSlots; | ||||
|   orderedSlots.reserve(variableSlots.size()); | ||||
|    | ||||
|   // If no ordering is provided, arrange the slots as they were, which will be sorted
 | ||||
|   // numerically since VariableSlots uses a map sorting on Key.
 | ||||
|   for (VariableSlots::const_iterator item = variableSlots.begin(); | ||||
|       item != variableSlots.end(); ++item) | ||||
|     orderedSlots.push_back(item); | ||||
|   gttoc(Order_slots); | ||||
| 
 | ||||
|   JacobianFactorHelper(graph, orderedSlots); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph, | ||||
|     const Ordering& ordering) { | ||||
|   gttic(JacobianFactor_combine_constructor); | ||||
|    | ||||
|   // Compute VariableSlots if one was not provided
 | ||||
|   // Binds reference, does not copy VariableSlots
 | ||||
|   const VariableSlots & variableSlots = VariableSlots(graph); | ||||
| 
 | ||||
|   // Order variable slots
 | ||||
|   FastVector<VariableSlots::const_iterator> orderedSlots = | ||||
|     orderedSlotsHelper(ordering, variableSlots); | ||||
| 
 | ||||
|   JacobianFactorHelper(graph, orderedSlots); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| JacobianFactor::JacobianFactor(const GaussianFactorGraph& graph, | ||||
|     const Ordering& ordering, | ||||
|     const VariableSlots& p_variableSlots) { | ||||
|   gttic(JacobianFactor_combine_constructor); | ||||
|    | ||||
|   // Order variable slots
 | ||||
|   FastVector<VariableSlots::const_iterator> orderedSlots = | ||||
|     orderedSlotsHelper(ordering, p_variableSlots); | ||||
| 
 | ||||
|   JacobianFactorHelper(graph, orderedSlots); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| void JacobianFactor::print(const string& s, | ||||
|     const KeyFormatter& formatter) const { | ||||
|  |  | |||
|  | @ -22,6 +22,7 @@ | |||
| #include <gtsam/linear/NoiseModel.h> | ||||
| #include <gtsam/base/VerticalBlockMatrix.h> | ||||
| #include <gtsam/global_includes.h> | ||||
| #include <gtsam/inference/VariableSlots.h> | ||||
| 
 | ||||
| #include <boost/make_shared.hpp> | ||||
| 
 | ||||
|  | @ -147,14 +148,37 @@ namespace gtsam { | |||
|     JacobianFactor( | ||||
|       const KEYS& keys, const VerticalBlockMatrix& augmentedMatrix, const SharedDiagonal& sigmas = SharedDiagonal()); | ||||
| 
 | ||||
|     /**
 | ||||
|      * Build a dense joint factor from all the factors in a factor graph.  If a VariableSlots | ||||
|      * structure computed for \c graph is already available, providing it will reduce the amount of | ||||
|      * computation performed. */ | ||||
|     explicit JacobianFactor( | ||||
|       const GaussianFactorGraph& graph); | ||||
| 
 | ||||
|     /**
 | ||||
|      * Build a dense joint factor from all the factors in a factor graph.  If a VariableSlots | ||||
|      * structure computed for \c graph is already available, providing it will reduce the amount of | ||||
|      * computation performed. */ | ||||
|     explicit JacobianFactor( | ||||
|       const GaussianFactorGraph& graph, | ||||
|       boost::optional<const Ordering&> ordering = boost::none, | ||||
|       boost::optional<const VariableSlots&> p_variableSlots = boost::none); | ||||
|       const VariableSlots& p_variableSlots); | ||||
|      | ||||
|     /**
 | ||||
|      * Build a dense joint factor from all the factors in a factor graph.  If a VariableSlots | ||||
|      * structure computed for \c graph is already available, providing it will reduce the amount of | ||||
|      * computation performed. */ | ||||
|     explicit JacobianFactor( | ||||
|       const GaussianFactorGraph& graph, | ||||
|       const Ordering& ordering); | ||||
|      | ||||
|     /**
 | ||||
|      * Build a dense joint factor from all the factors in a factor graph.  If a VariableSlots | ||||
|      * structure computed for \c graph is already available, providing it will reduce the amount of | ||||
|      * computation performed. */ | ||||
|     explicit JacobianFactor( | ||||
|       const GaussianFactorGraph& graph, | ||||
|       const Ordering& ordering, | ||||
|       const VariableSlots& p_variableSlots); | ||||
| 
 | ||||
|     /** Virtual destructor */ | ||||
|     virtual ~JacobianFactor() {} | ||||
|  | @ -356,6 +380,14 @@ namespace gtsam { | |||
| 
 | ||||
|   private: | ||||
| 
 | ||||
|     /**
 | ||||
|      * Helper function for public constructors: | ||||
|      * Build a dense joint factor from all the factors in a factor graph.  Takes in | ||||
|      * ordered variable slots */ | ||||
|     void JacobianFactorHelper( | ||||
|       const GaussianFactorGraph& graph, | ||||
|       const FastVector<VariableSlots::const_iterator>& orderedSlots); | ||||
| 
 | ||||
|     /** Unsafe Constructor that creates an uninitialized Jacobian of right size
 | ||||
|      *  @param keys in some order | ||||
|      *  @param diemnsions of the variables in same order | ||||
|  |  | |||
|  | @ -77,11 +77,17 @@ public: | |||
| 
 | ||||
|   /// Construct from a GaussianFactorGraph
 | ||||
|   RegularHessianFactor(const GaussianFactorGraph& factors, | ||||
|                        boost::optional<const Scatter&> scatter = boost::none) | ||||
|                        const Scatter& scatter) | ||||
|       : HessianFactor(factors, scatter) { | ||||
|     checkInvariants(); | ||||
|   } | ||||
| 
 | ||||
|   /// Construct from a GaussianFactorGraph
 | ||||
|   RegularHessianFactor(const GaussianFactorGraph& factors) | ||||
|       : HessianFactor(factors) { | ||||
|     checkInvariants(); | ||||
|   } | ||||
| 
 | ||||
| private: | ||||
| 
 | ||||
|   /// Check invariants after construction
 | ||||
|  |  | |||
|  | @ -33,16 +33,16 @@ string SlotEntry::toString() const { | |||
|   return oss.str(); | ||||
| } | ||||
| 
 | ||||
| Scatter::Scatter(const GaussianFactorGraph& gfg) : Scatter(gfg, Ordering()) {} | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| Scatter::Scatter(const GaussianFactorGraph& gfg, | ||||
|     boost::optional<const Ordering&> ordering) { | ||||
|     const Ordering& ordering) { | ||||
|   gttic(Scatter_Constructor); | ||||
| 
 | ||||
|   // If we have an ordering, pre-fill the ordered variables first
 | ||||
|   if (ordering) { | ||||
|     for (Key key : *ordering) { | ||||
|       add(key, 0); | ||||
|     } | ||||
|   for (Key key : ordering) { | ||||
|     add(key, 0); | ||||
|   } | ||||
| 
 | ||||
|   // Now, find dimensions of variables and/or extend
 | ||||
|  | @ -68,7 +68,7 @@ Scatter::Scatter(const GaussianFactorGraph& gfg, | |||
| 
 | ||||
|   // To keep the same behavior as before, sort the keys after the ordering
 | ||||
|   iterator first = begin(); | ||||
|   if (ordering) first += ordering->size(); | ||||
|   first += ordering.size(); | ||||
|   if (first != end()) std::sort(first, end()); | ||||
| } | ||||
| 
 | ||||
|  |  | |||
|  | @ -23,8 +23,6 @@ | |||
| #include <gtsam/base/FastMap.h> | ||||
| #include <gtsam/dllexport.h> | ||||
| 
 | ||||
| #include <boost/optional.hpp> | ||||
| 
 | ||||
| namespace gtsam { | ||||
| 
 | ||||
| class GaussianFactorGraph; | ||||
|  | @ -53,15 +51,16 @@ class Scatter : public FastVector<SlotEntry> { | |||
|   /// Default Constructor
 | ||||
|   Scatter() {} | ||||
| 
 | ||||
|   /// Construct from gaussian factor graph, with optional (partial or complete) ordering
 | ||||
|   Scatter(const GaussianFactorGraph& gfg, | ||||
|           boost::optional<const Ordering&> ordering = boost::none); | ||||
|   /// Construct from gaussian factor graph, without ordering
 | ||||
|   explicit Scatter(const GaussianFactorGraph& gfg); | ||||
| 
 | ||||
|   /// Construct from gaussian factor graph, with (partial or complete) ordering
 | ||||
|   explicit Scatter(const GaussianFactorGraph& gfg, const Ordering& ordering); | ||||
| 
 | ||||
|   /// Add a key/dim pair
 | ||||
|   void add(Key key, size_t dim); | ||||
| 
 | ||||
|  private: | ||||
| 
 | ||||
|   /// Find the SlotEntry with the right key (linear time worst case)
 | ||||
|   iterator find(Key key); | ||||
| }; | ||||
|  |  | |||
|  | @ -26,8 +26,16 @@ using namespace std; | |||
| namespace gtsam { | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization, | ||||
|                      EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering) | ||||
| Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization) | ||||
|                      : values_(solution), factorization_(factorization) { | ||||
|   gttic(MarginalsConstructor); | ||||
|   graph_ = *graph.linearize(solution); | ||||
|   computeBayesTree(); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, const Ordering& ordering, | ||||
|                      Factorization factorization) | ||||
|                      : values_(solution), factorization_(factorization) { | ||||
|   gttic(MarginalsConstructor); | ||||
|   graph_ = *graph.linearize(solution); | ||||
|  | @ -35,28 +43,52 @@ Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, | |||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization, | ||||
|                      EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering) | ||||
|                      : graph_(graph), factorization_(factorization) { | ||||
| Marginals::Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization) | ||||
|                      : graph_(graph), values_(solution), factorization_(factorization) { | ||||
|   gttic(MarginalsConstructor); | ||||
|   Values vals; | ||||
|   for (const auto& keyValue: solution) { | ||||
|     vals.insert(keyValue.first, keyValue.second); | ||||
|   } | ||||
|   values_ = vals; | ||||
|   computeBayesTree(ordering); | ||||
|   computeBayesTree(); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| Marginals::Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization, | ||||
|                      EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering) | ||||
| Marginals::Marginals(const GaussianFactorGraph& graph, const Values& solution, const Ordering& ordering, | ||||
|                      Factorization factorization) | ||||
|                      : graph_(graph), values_(solution), factorization_(factorization) { | ||||
|   gttic(MarginalsConstructor); | ||||
|   computeBayesTree(ordering); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| void Marginals::computeBayesTree(EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering) { | ||||
| Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization) | ||||
|                      : graph_(graph), factorization_(factorization) { | ||||
|   gttic(MarginalsConstructor); | ||||
|   for (const auto& keyValue: solution) { | ||||
|     values_.insert(keyValue.first, keyValue.second); | ||||
|   } | ||||
|   computeBayesTree(); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, const Ordering& ordering, | ||||
|                      Factorization factorization) | ||||
|                      : graph_(graph), factorization_(factorization) { | ||||
|   gttic(MarginalsConstructor); | ||||
|   for (const auto& keyValue: solution) { | ||||
|     values_.insert(keyValue.first, keyValue.second); | ||||
|   } | ||||
|   computeBayesTree(ordering); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| void Marginals::computeBayesTree() { | ||||
|   // Compute BayesTree
 | ||||
|   if(factorization_ == CHOLESKY) | ||||
|     bayesTree_ = *graph_.eliminateMultifrontal(EliminatePreferCholesky); | ||||
|   else if(factorization_ == QR) | ||||
|     bayesTree_ = *graph_.eliminateMultifrontal(EliminateQR); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| void Marginals::computeBayesTree(const Ordering& ordering) { | ||||
|   // Compute BayesTree
 | ||||
|   if(factorization_ == CHOLESKY) | ||||
|     bayesTree_ = *graph_.eliminateMultifrontal(ordering, EliminatePreferCholesky); | ||||
|  | @ -128,9 +160,9 @@ JointMarginal Marginals::jointMarginalInformation(const KeyVector& variables) co | |||
|         jointFG = *bayesTree_.joint(variables[0], variables[1], EliminateQR); | ||||
|     } else { | ||||
|       if(factorization_ == CHOLESKY) | ||||
|         jointFG = GaussianFactorGraph(*graph_.marginalMultifrontalBayesTree(variables, boost::none, EliminatePreferCholesky)); | ||||
|         jointFG = GaussianFactorGraph(*graph_.marginalMultifrontalBayesTree(variables, EliminatePreferCholesky)); | ||||
|       else if(factorization_ == QR) | ||||
|         jointFG = GaussianFactorGraph(*graph_.marginalMultifrontalBayesTree(variables, boost::none, EliminateQR)); | ||||
|         jointFG = GaussianFactorGraph(*graph_.marginalMultifrontalBayesTree(variables, EliminateQR)); | ||||
|     } | ||||
| 
 | ||||
|     // Get information matrix
 | ||||
|  |  | |||
|  | @ -55,10 +55,33 @@ public: | |||
|    * @param graph The factor graph defining the full joint density on all variables. | ||||
|    * @param solution The linearization point about which to compute Gaussian marginals (usually the MLE as obtained from a NonlinearOptimizer). | ||||
|    * @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems). | ||||
|    * @param ordering An optional variable ordering for elimination. | ||||
|    */ | ||||
|   Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY, | ||||
|             EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering = boost::none); | ||||
|   Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY); | ||||
| 
 | ||||
|   /** Construct a marginals class from a nonlinear factor graph.
 | ||||
|    * @param graph The factor graph defining the full joint density on all variables. | ||||
|    * @param solution The linearization point about which to compute Gaussian marginals (usually the MLE as obtained from a NonlinearOptimizer). | ||||
|    * @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems). | ||||
|    * @param ordering The ordering for elimination. | ||||
|    */ | ||||
|   Marginals(const NonlinearFactorGraph& graph, const Values& solution, const Ordering& ordering, | ||||
|               Factorization factorization = CHOLESKY); | ||||
| 
 | ||||
|   /** Construct a marginals class from a linear factor graph.
 | ||||
|    * @param graph The factor graph defining the full joint density on all variables. | ||||
|    * @param solution The solution point to compute Gaussian marginals. | ||||
|    * @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems). | ||||
|    */           | ||||
|   Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY); | ||||
| 
 | ||||
|   /** Construct a marginals class from a linear factor graph.
 | ||||
|    * @param graph The factor graph defining the full joint density on all variables. | ||||
|    * @param solution The solution point to compute Gaussian marginals. | ||||
|    * @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems). | ||||
|    * @param ordering The ordering for elimination. | ||||
|    */           | ||||
|   Marginals(const GaussianFactorGraph& graph, const Values& solution, const Ordering& ordering, | ||||
|               Factorization factorization = CHOLESKY); | ||||
| 
 | ||||
|   /** Construct a marginals class from a linear factor graph.
 | ||||
|    * @param graph The factor graph defining the full joint density on all variables. | ||||
|  | @ -66,8 +89,7 @@ public: | |||
|    * @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems). | ||||
|    * @param ordering An optional variable ordering for elimination. | ||||
|    */           | ||||
|   Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY, | ||||
|               EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering = boost::none); | ||||
|   Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization = CHOLESKY); | ||||
| 
 | ||||
|   /** Construct a marginals class from a linear factor graph.
 | ||||
|    * @param graph The factor graph defining the full joint density on all variables. | ||||
|  | @ -75,8 +97,8 @@ public: | |||
|    * @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems). | ||||
|    * @param ordering An optional variable ordering for elimination. | ||||
|    */           | ||||
|   Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization = CHOLESKY, | ||||
|               EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering = boost::none); | ||||
|   Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, const Ordering& ordering, | ||||
|               Factorization factorization = CHOLESKY); | ||||
| 
 | ||||
|   /** print */ | ||||
|   void print(const std::string& str = "Marginals: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; | ||||
|  | @ -99,11 +121,27 @@ public: | |||
| 
 | ||||
|   /** Optimize the bayes tree */ | ||||
|   VectorValues optimize() const; | ||||
| 
 | ||||
|              | ||||
| protected: | ||||
|    | ||||
|   /** Compute the Bayes Tree as a helper function to the constructor */ | ||||
|   void computeBayesTree(EliminateableFactorGraph<GaussianFactorGraph>::OptionalOrdering ordering); | ||||
|   void computeBayesTree(); | ||||
| 
 | ||||
|   /** Compute the Bayes Tree as a helper function to the constructor */ | ||||
|   void computeBayesTree(const Ordering& ordering); | ||||
| 
 | ||||
| public: | ||||
|   /** \deprecated argument order changed due to removing boost::optional<Ordering> */ | ||||
|   Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization, | ||||
|             const Ordering& ordering) : Marginals(graph, solution, ordering, factorization) {} | ||||
| 
 | ||||
|   /** \deprecated argument order changed due to removing boost::optional<Ordering> */ | ||||
|   Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization, | ||||
|             const Ordering& ordering) : Marginals(graph, solution, ordering, factorization) {} | ||||
| 
 | ||||
|   /** \deprecated argument order changed due to removing boost::optional<Ordering> */ | ||||
|   Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization, | ||||
|             const Ordering& ordering) : Marginals(graph, solution, ordering, factorization) {} | ||||
| 
 | ||||
| }; | ||||
| 
 | ||||
|  |  | |||
|  | @ -344,23 +344,31 @@ GaussianFactorGraph::shared_ptr NonlinearFactorGraph::linearize(const Values& li | |||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| static Scatter scatterFromValues(const Values& values, boost::optional<Ordering&> ordering) { | ||||
| static Scatter scatterFromValues(const Values& values) { | ||||
|   gttic(scatterFromValues); | ||||
| 
 | ||||
|   Scatter scatter; | ||||
|   scatter.reserve(values.size()); | ||||
| 
 | ||||
|   if (!ordering) { | ||||
|     // use "natural" ordering with keys taken from the initial values
 | ||||
|     for (const auto& key_value : values) { | ||||
|       scatter.add(key_value.key, key_value.value.dim()); | ||||
|     } | ||||
|   } else { | ||||
|     // copy ordering into keys and lookup dimension in values, is O(n*log n)
 | ||||
|     for (Key key : *ordering) { | ||||
|       const Value& value = values.at(key); | ||||
|       scatter.add(key, value.dim()); | ||||
|     } | ||||
|   // use "natural" ordering with keys taken from the initial values
 | ||||
|   for (const auto& key_value : values) { | ||||
|     scatter.add(key_value.key, key_value.value.dim()); | ||||
|   } | ||||
| 
 | ||||
|   return scatter; | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| static Scatter scatterFromValues(const Values& values, const Ordering& ordering) { | ||||
|   gttic(scatterFromValues); | ||||
| 
 | ||||
|   Scatter scatter; | ||||
|   scatter.reserve(values.size()); | ||||
| 
 | ||||
|   // copy ordering into keys and lookup dimension in values, is O(n*log n)
 | ||||
|   for (Key key : ordering) { | ||||
|     const Value& value = values.at(key); | ||||
|     scatter.add(key, value.dim()); | ||||
|   } | ||||
| 
 | ||||
|   return scatter; | ||||
|  | @ -368,11 +376,7 @@ static Scatter scatterFromValues(const Values& values, boost::optional<Ordering& | |||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| HessianFactor::shared_ptr NonlinearFactorGraph::linearizeToHessianFactor( | ||||
|     const Values& values, boost::optional<Ordering&> ordering, const Dampen& dampen) const { | ||||
|   gttic(NonlinearFactorGraph_linearizeToHessianFactor); | ||||
| 
 | ||||
|   Scatter scatter = scatterFromValues(values, ordering); | ||||
| 
 | ||||
|     const Values& values, const Scatter& scatter, const Dampen& dampen) const { | ||||
|   // NOTE(frank): we are heavily leaning on friendship below
 | ||||
|   HessianFactor::shared_ptr hessianFactor(new HessianFactor(scatter)); | ||||
| 
 | ||||
|  | @ -393,9 +397,36 @@ HessianFactor::shared_ptr NonlinearFactorGraph::linearizeToHessianFactor( | |||
|   return hessianFactor; | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| HessianFactor::shared_ptr NonlinearFactorGraph::linearizeToHessianFactor( | ||||
|     const Values& values, const Ordering& order, const Dampen& dampen) const { | ||||
|   gttic(NonlinearFactorGraph_linearizeToHessianFactor); | ||||
| 
 | ||||
|   Scatter scatter = scatterFromValues(values, order); | ||||
|   return linearizeToHessianFactor(values, scatter, dampen); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| HessianFactor::shared_ptr NonlinearFactorGraph::linearizeToHessianFactor( | ||||
|     const Values& values, const Dampen& dampen) const { | ||||
|   gttic(NonlinearFactorGraph_linearizeToHessianFactor); | ||||
| 
 | ||||
|   Scatter scatter = scatterFromValues(values); | ||||
|   return linearizeToHessianFactor(values, scatter, dampen); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| Values NonlinearFactorGraph::updateCholesky(const Values& values, | ||||
|                                             boost::optional<Ordering&> ordering, | ||||
|                                             const Dampen& dampen) const { | ||||
|   gttic(NonlinearFactorGraph_updateCholesky); | ||||
|   auto hessianFactor = linearizeToHessianFactor(values, dampen); | ||||
|   VectorValues delta = hessianFactor->solve(); | ||||
|   return values.retract(delta); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
| Values NonlinearFactorGraph::updateCholesky(const Values& values, | ||||
|                                             const Ordering& ordering, | ||||
|                                             const Dampen& dampen) const { | ||||
|   gttic(NonlinearFactorGraph_updateCholesky); | ||||
|   auto hessianFactor = linearizeToHessianFactor(values, ordering, dampen); | ||||
|  |  | |||
|  | @ -149,17 +149,31 @@ namespace gtsam { | |||
|      * Instead of producing a GaussianFactorGraph, pre-allocate and linearize directly | ||||
|      * into a HessianFactor. Avoids the many mallocs and pointer indirection in constructing | ||||
|      * a new graph, and hence useful in case a dense solve is appropriate for your problem. | ||||
|      * An optional ordering can be given that still decides how the Hessian is laid out. | ||||
|      * An optional lambda function can be used to apply damping on the filled Hessian. | ||||
|      * No parallelism is exploited, because all the factors write in the same memory. | ||||
|      */ | ||||
|     boost::shared_ptr<HessianFactor> linearizeToHessianFactor( | ||||
|         const Values& values, boost::optional<Ordering&> ordering = boost::none, | ||||
|         const Dampen& dampen = nullptr) const; | ||||
|         const Values& values, const Dampen& dampen = nullptr) const; | ||||
| 
 | ||||
|     /**
 | ||||
|      * Instead of producing a GaussianFactorGraph, pre-allocate and linearize directly | ||||
|      * into a HessianFactor. Avoids the many mallocs and pointer indirection in constructing | ||||
|      * a new graph, and hence useful in case a dense solve is appropriate for your problem. | ||||
|      * An ordering is given that still decides how the Hessian is laid out. | ||||
|      * An optional lambda function can be used to apply damping on the filled Hessian. | ||||
|      * No parallelism is exploited, because all the factors write in the same memory. | ||||
|      */ | ||||
|     boost::shared_ptr<HessianFactor> linearizeToHessianFactor( | ||||
|         const Values& values, const Ordering& ordering, const Dampen& dampen = nullptr) const; | ||||
| 
 | ||||
|     /// Linearize and solve in one pass.
 | ||||
|     /// Calls linearizeToHessianFactor, densely solves the normal equations, and updates the values.
 | ||||
|     Values updateCholesky(const Values& values, boost::optional<Ordering&> ordering = boost::none, | ||||
|     Values updateCholesky(const Values& values, | ||||
|                           const Dampen& dampen = nullptr) const; | ||||
| 
 | ||||
|     /// Linearize and solve in one pass.
 | ||||
|     /// Calls linearizeToHessianFactor, densely solves the normal equations, and updates the values.
 | ||||
|     Values updateCholesky(const Values& values, const Ordering& ordering, | ||||
|                           const Dampen& dampen = nullptr) const; | ||||
| 
 | ||||
|     /// Clone() performs a deep-copy of the graph, including all of the factors
 | ||||
|  | @ -190,6 +204,13 @@ namespace gtsam { | |||
| 
 | ||||
|   private: | ||||
| 
 | ||||
|     /**
 | ||||
|      * Linearize from Scatter rather than from Ordering.  Made private because | ||||
|      *  it doesn't include gttic. | ||||
|      */ | ||||
|     boost::shared_ptr<HessianFactor> linearizeToHessianFactor( | ||||
|         const Values& values, const Scatter& scatter, const Dampen& dampen = nullptr) const; | ||||
| 
 | ||||
|     /** Serialization function */ | ||||
|     friend class boost::serialization::access; | ||||
|     template<class ARCHIVE> | ||||
|  | @ -197,6 +218,19 @@ namespace gtsam { | |||
|       ar & boost::serialization::make_nvp("NonlinearFactorGraph", | ||||
|                 boost::serialization::base_object<Base>(*this)); | ||||
|     } | ||||
| 
 | ||||
|   public: | ||||
| 
 | ||||
|     /** \deprecated */ | ||||
|     boost::shared_ptr<HessianFactor> linearizeToHessianFactor( | ||||
|         const Values& values, boost::none_t, const Dampen& dampen = nullptr) const | ||||
|       {return linearizeToHessianFactor(values, dampen);} | ||||
| 
 | ||||
|     /** \deprecated */ | ||||
|     Values updateCholesky(const Values& values, boost::none_t, | ||||
|                           const Dampen& dampen = nullptr) const | ||||
|       {return updateCholesky(values, dampen);} | ||||
| 
 | ||||
|   }; | ||||
| 
 | ||||
| /// traits
 | ||||
|  |  | |||
|  | @ -128,18 +128,22 @@ VectorValues NonlinearOptimizer::solve(const GaussianFactorGraph& gfg, | |||
|                                        const NonlinearOptimizerParams& params) const { | ||||
|   // solution of linear solver is an update to the linearization point
 | ||||
|   VectorValues delta; | ||||
|   boost::optional<const Ordering&> optionalOrdering; | ||||
|   if (params.ordering) | ||||
|     optionalOrdering.reset(*params.ordering); | ||||
| 
 | ||||
|   // Check which solver we are using
 | ||||
|   if (params.isMultifrontal()) { | ||||
|     // Multifrontal QR or Cholesky (decided by params.getEliminationFunction())
 | ||||
|     delta = gfg.optimize(optionalOrdering, params.getEliminationFunction()); | ||||
|     if (params.ordering) | ||||
|       delta = gfg.optimize(*params.ordering, params.getEliminationFunction()); | ||||
|     else | ||||
|       delta = gfg.optimize(params.getEliminationFunction()); | ||||
|   } else if (params.isSequential()) { | ||||
|     // Sequential QR or Cholesky (decided by params.getEliminationFunction())
 | ||||
|     delta = gfg.eliminateSequential(optionalOrdering, params.getEliminationFunction(), boost::none, | ||||
|                                     params.orderingType)->optimize(); | ||||
|     if (params.ordering) | ||||
|       delta = gfg.eliminateSequential(*params.ordering, params.getEliminationFunction(), | ||||
|                                       boost::none, params.orderingType)->optimize(); | ||||
|     else | ||||
|       delta = gfg.eliminateSequential(params.getEliminationFunction(), boost::none, | ||||
|                                       params.orderingType)->optimize(); | ||||
|   } else if (params.isIterative()) { | ||||
|     // Conjugate Gradient -> needs params.iterativeParams
 | ||||
|     if (!params.iterativeParams) | ||||
|  |  | |||
|  | @ -82,7 +82,7 @@ public: | |||
|   }; | ||||
| 
 | ||||
|   LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer
 | ||||
|   boost::optional<Ordering> ordering; ///< The variable elimination ordering, or empty to use COLAMD (default: empty)
 | ||||
|   boost::optional<Ordering> ordering; ///< The optional variable elimination ordering, or empty to use COLAMD (default: empty)
 | ||||
|   IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers.
 | ||||
| 
 | ||||
|   inline bool isMultifrontal() const { | ||||
|  |  | |||
|  | @ -14,7 +14,14 @@ using namespace std; | |||
| namespace gtsam { | ||||
| 
 | ||||
|   /// Find the best total assignment - can be expensive
 | ||||
|   CSP::sharedValues CSP::optimalAssignment(OptionalOrdering ordering) const { | ||||
|   CSP::sharedValues CSP::optimalAssignment() const { | ||||
|     DiscreteBayesNet::shared_ptr chordal = this->eliminateSequential(); | ||||
|     sharedValues mpe = chordal->optimize(); | ||||
|     return mpe; | ||||
|   } | ||||
| 
 | ||||
|   /// Find the best total assignment - can be expensive
 | ||||
|   CSP::sharedValues CSP::optimalAssignment(const Ordering& ordering) const { | ||||
|     DiscreteBayesNet::shared_ptr chordal = this->eliminateSequential(ordering); | ||||
|     sharedValues mpe = chordal->optimize(); | ||||
|     return mpe; | ||||
|  |  | |||
|  | @ -60,7 +60,10 @@ namespace gtsam { | |||
| //    }
 | ||||
| 
 | ||||
|     /// Find the best total assignment - can be expensive
 | ||||
|     sharedValues optimalAssignment(OptionalOrdering ordering = boost::none) const; | ||||
|     sharedValues optimalAssignment() const; | ||||
| 
 | ||||
|     /// Find the best total assignment - can be expensive
 | ||||
|     sharedValues optimalAssignment(const Ordering& ordering) const; | ||||
| 
 | ||||
| //    /*
 | ||||
| //     * Perform loopy belief propagation
 | ||||
|  |  | |||
|  | @ -46,21 +46,26 @@ class NonlinearClusterTree : public ClusterTree<NonlinearFactorGraph> { | |||
|     // linearize local custer factors straight into hessianFactor, which is returned
 | ||||
|     // If no ordering given, uses colamd
 | ||||
|     HessianFactor::shared_ptr linearizeToHessianFactor( | ||||
|         const Values& values, boost::optional<Ordering> ordering = boost::none, | ||||
|         const Values& values, | ||||
|         const NonlinearFactorGraph::Dampen& dampen = nullptr) const { | ||||
|       if (!ordering) | ||||
|         ordering.reset(Ordering::ColamdConstrainedFirst(factors, orderedFrontalKeys, true)); | ||||
|       return factors.linearizeToHessianFactor(values, *ordering, dampen); | ||||
|       Ordering ordering; | ||||
|       ordering = Ordering::ColamdConstrainedFirst(factors, orderedFrontalKeys, true); | ||||
|       return factors.linearizeToHessianFactor(values, ordering, dampen); | ||||
|     } | ||||
| 
 | ||||
|     // Recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent
 | ||||
|     // linearize local custer factors straight into hessianFactor, which is returned
 | ||||
|     // If no ordering given, uses colamd
 | ||||
|     HessianFactor::shared_ptr linearizeToHessianFactor( | ||||
|         const Values& values, const Ordering& ordering, | ||||
|         const NonlinearFactorGraph::Dampen& dampen = nullptr) const { | ||||
|       return factors.linearizeToHessianFactor(values, ordering, dampen); | ||||
|     } | ||||
| 
 | ||||
|     // Helper function: recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent
 | ||||
|     // TODO(frank): Use TBB to support deep trees and parallelism
 | ||||
|     std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate( | ||||
|         const Values& values, boost::optional<Ordering> ordering = boost::none, | ||||
|         const NonlinearFactorGraph::Dampen& dampen = nullptr) const { | ||||
|       // Linearize and create HessianFactor f(front,separator)
 | ||||
|       HessianFactor::shared_ptr localFactor = linearizeToHessianFactor(values, ordering, dampen); | ||||
| 
 | ||||
|         const Values& values, | ||||
|         const HessianFactor::shared_ptr& localFactor) const { | ||||
|       // Get contributions f(front) from children, as well as p(children|front)
 | ||||
|       GaussianBayesNet bayesNet; | ||||
|       for (const auto& child : children) { | ||||
|  | @ -72,12 +77,45 @@ class NonlinearClusterTree : public ClusterTree<NonlinearFactorGraph> { | |||
|       return {bayesNet, localFactor}; | ||||
|     } | ||||
| 
 | ||||
|     // Recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent
 | ||||
|     // TODO(frank): Use TBB to support deep trees and parallelism
 | ||||
|     std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate( | ||||
|         const Values& values, | ||||
|         const NonlinearFactorGraph::Dampen& dampen = nullptr) const { | ||||
|       // Linearize and create HessianFactor f(front,separator)
 | ||||
|       HessianFactor::shared_ptr localFactor = linearizeToHessianFactor(values, dampen); | ||||
|       return linearizeAndEliminate(values, localFactor); | ||||
|     } | ||||
| 
 | ||||
|     // Recursively eliminate subtree rooted at this Cluster into a Bayes net and factor on parent
 | ||||
|     // TODO(frank): Use TBB to support deep trees and parallelism
 | ||||
|     std::pair<GaussianBayesNet, HessianFactor::shared_ptr> linearizeAndEliminate( | ||||
|         const Values& values, const Ordering& ordering, | ||||
|         const NonlinearFactorGraph::Dampen& dampen = nullptr) const { | ||||
|       // Linearize and create HessianFactor f(front,separator)
 | ||||
|       HessianFactor::shared_ptr localFactor = linearizeToHessianFactor(values, ordering, dampen); | ||||
|       return linearizeAndEliminate(values, localFactor); | ||||
|     } | ||||
| 
 | ||||
|     // Recursively eliminate subtree rooted at this Cluster
 | ||||
|     // Version that updates existing Bayes net and returns a new Hessian factor on parent clique
 | ||||
|     // It is possible to pass in a nullptr for the bayesNet if only interested in the new factor
 | ||||
|     HessianFactor::shared_ptr linearizeAndEliminate( | ||||
|         const Values& values, GaussianBayesNet* bayesNet, | ||||
|         boost::optional<Ordering> ordering = boost::none, | ||||
|         const NonlinearFactorGraph::Dampen& dampen = nullptr) const { | ||||
|       auto bayesNet_newFactor_pair = linearizeAndEliminate(values, dampen); | ||||
|       if (bayesNet) { | ||||
|         bayesNet->push_back(bayesNet_newFactor_pair.first); | ||||
|       } | ||||
|       return bayesNet_newFactor_pair.second; | ||||
|     } | ||||
| 
 | ||||
|     // Recursively eliminate subtree rooted at this Cluster
 | ||||
|     // Version that updates existing Bayes net and returns a new Hessian factor on parent clique
 | ||||
|     // It is possible to pass in a nullptr for the bayesNet if only interested in the new factor
 | ||||
|     HessianFactor::shared_ptr linearizeAndEliminate( | ||||
|         const Values& values, GaussianBayesNet* bayesNet, | ||||
|         const Ordering& ordering, | ||||
|         const NonlinearFactorGraph::Dampen& dampen = nullptr) const { | ||||
|       auto bayesNet_newFactor_pair = linearizeAndEliminate(values, ordering, dampen); | ||||
|       if (bayesNet) { | ||||
|  |  | |||
|  | @ -206,7 +206,7 @@ TEST(GaussianFactorGraph, optimize_Cholesky) { | |||
|   GaussianFactorGraph fg = createGaussianFactorGraph(); | ||||
| 
 | ||||
|   // optimize the graph
 | ||||
|   VectorValues actual = fg.optimize(boost::none, EliminateCholesky); | ||||
|   VectorValues actual = fg.optimize(EliminateCholesky); | ||||
| 
 | ||||
|   // verify
 | ||||
|   VectorValues expected = createCorrectDelta(); | ||||
|  | @ -220,7 +220,7 @@ TEST( GaussianFactorGraph, optimize_QR ) | |||
|   GaussianFactorGraph fg = createGaussianFactorGraph(); | ||||
| 
 | ||||
|   // optimize the graph
 | ||||
|   VectorValues actual = fg.optimize(boost::none, EliminateQR); | ||||
|   VectorValues actual = fg.optimize(EliminateQR); | ||||
| 
 | ||||
|   // verify
 | ||||
|   VectorValues expected = createCorrectDelta(); | ||||
|  |  | |||
|  | @ -198,7 +198,7 @@ TEST(NonlinearFactorGraph, UpdateCholesky) { | |||
|       } | ||||
|     } | ||||
|   }; | ||||
|   EXPECT(assert_equal(initial, fg.updateCholesky(initial, boost::none, dampen), 1e-6)); | ||||
|   EXPECT(assert_equal(initial, fg.updateCholesky(initial, dampen), 1e-6)); | ||||
| } | ||||
| 
 | ||||
| /* ************************************************************************* */ | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue