Return fixed values
parent
2af9d2d35a
commit
b5ddba9c3c
|
@ -57,7 +57,8 @@ Ordering HybridSmoother::getOrdering(const HybridGaussianFactorGraph &factors,
|
|||
void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
||||
std::optional<size_t> maxNrLeaves,
|
||||
const std::optional<Ordering> given_ordering) {
|
||||
std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size() << std::endl;
|
||||
std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size()
|
||||
<< std::endl;
|
||||
std::cout << "newFactors size: " << graph.size() << std::endl;
|
||||
HybridGaussianFactorGraph updatedGraph;
|
||||
// Add the necessary conditionals from the previous timestep(s).
|
||||
|
@ -65,8 +66,10 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
|||
addConditionals(graph, hybridBayesNet_);
|
||||
// print size of graph, updatedGraph, hybridBayesNet_
|
||||
std::cout << "updatedGraph size: " << updatedGraph.size() << std::endl;
|
||||
std::cout << "hybridBayesNet_ size after: " << hybridBayesNet_.size() << std::endl;
|
||||
std::cout << "total size: " << updatedGraph.size() + hybridBayesNet_.size() << std::endl;
|
||||
std::cout << "hybridBayesNet_ size after: " << hybridBayesNet_.size()
|
||||
<< std::endl;
|
||||
std::cout << "total size: " << updatedGraph.size() + hybridBayesNet_.size()
|
||||
<< std::endl;
|
||||
|
||||
Ordering ordering;
|
||||
// If no ordering provided, then we compute one
|
||||
|
@ -86,7 +89,8 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
|||
|
||||
#ifdef DEBUG_SMOOTHER
|
||||
for (auto conditional : bayesNetFragment) {
|
||||
auto e =std::dynamic_pointer_cast<HybridConditional::BaseConditional>(conditional);
|
||||
auto e = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
|
||||
conditional);
|
||||
GTSAM_PRINT(*e);
|
||||
}
|
||||
#endif
|
||||
|
@ -101,7 +105,10 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
|||
if (maxNrLeaves) {
|
||||
// `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
|
||||
// all the conditionals with the same keys in bayesNetFragment.
|
||||
bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_);
|
||||
DiscreteValues newlyFixedValues;
|
||||
bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_,
|
||||
&newlyFixedValues);
|
||||
fixedValues_.insert(newlyFixedValues);
|
||||
}
|
||||
|
||||
// Print discrete keys in the bayesNetFragment:
|
||||
|
@ -113,7 +120,8 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
|||
|
||||
#ifdef DEBUG_SMOOTHER
|
||||
for (auto conditional : bayesNetFragment) {
|
||||
auto c =std::dynamic_pointer_cast<HybridConditional::BaseConditional>(conditional);
|
||||
auto c = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
|
||||
conditional);
|
||||
GTSAM_PRINT(*c);
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -106,6 +106,18 @@ class GTSAM_EXPORT HybridSmoother {
|
|||
|
||||
/// Return the Bayes Net posterior.
|
||||
const HybridBayesNet& hybridBayesNet() const;
|
||||
|
||||
/// Optimize the hybrid Bayes Net, taking into accound fixed values.
|
||||
HybridValues optimize() const {
|
||||
// Solve for the MPE
|
||||
DiscreteValues mpe = hybridBayesNet_.mpe();
|
||||
|
||||
// Add fixed values to the MPE.
|
||||
mpe.insert(fixedValues_);
|
||||
|
||||
// Given the MPE, compute the optimal continuous values.
|
||||
return HybridValues(hybridBayesNet_.optimize(mpe), mpe);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
Loading…
Reference in New Issue