Return fixed values
parent
2af9d2d35a
commit
b5ddba9c3c
|
@ -57,7 +57,8 @@ Ordering HybridSmoother::getOrdering(const HybridGaussianFactorGraph &factors,
|
||||||
void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
||||||
std::optional<size_t> maxNrLeaves,
|
std::optional<size_t> maxNrLeaves,
|
||||||
const std::optional<Ordering> given_ordering) {
|
const std::optional<Ordering> given_ordering) {
|
||||||
std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size() << std::endl;
|
std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size()
|
||||||
|
<< std::endl;
|
||||||
std::cout << "newFactors size: " << graph.size() << std::endl;
|
std::cout << "newFactors size: " << graph.size() << std::endl;
|
||||||
HybridGaussianFactorGraph updatedGraph;
|
HybridGaussianFactorGraph updatedGraph;
|
||||||
// Add the necessary conditionals from the previous timestep(s).
|
// Add the necessary conditionals from the previous timestep(s).
|
||||||
|
@ -65,8 +66,10 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
||||||
addConditionals(graph, hybridBayesNet_);
|
addConditionals(graph, hybridBayesNet_);
|
||||||
// print size of graph, updatedGraph, hybridBayesNet_
|
// print size of graph, updatedGraph, hybridBayesNet_
|
||||||
std::cout << "updatedGraph size: " << updatedGraph.size() << std::endl;
|
std::cout << "updatedGraph size: " << updatedGraph.size() << std::endl;
|
||||||
std::cout << "hybridBayesNet_ size after: " << hybridBayesNet_.size() << std::endl;
|
std::cout << "hybridBayesNet_ size after: " << hybridBayesNet_.size()
|
||||||
std::cout << "total size: " << updatedGraph.size() + hybridBayesNet_.size() << std::endl;
|
<< std::endl;
|
||||||
|
std::cout << "total size: " << updatedGraph.size() + hybridBayesNet_.size()
|
||||||
|
<< std::endl;
|
||||||
|
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
// If no ordering provided, then we compute one
|
// If no ordering provided, then we compute one
|
||||||
|
@ -85,8 +88,9 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
||||||
HybridBayesNet bayesNetFragment = *updatedGraph.eliminateSequential(ordering);
|
HybridBayesNet bayesNetFragment = *updatedGraph.eliminateSequential(ordering);
|
||||||
|
|
||||||
#ifdef DEBUG_SMOOTHER
|
#ifdef DEBUG_SMOOTHER
|
||||||
for (auto conditional: bayesNetFragment) {
|
for (auto conditional : bayesNetFragment) {
|
||||||
auto e =std::dynamic_pointer_cast<HybridConditional::BaseConditional>(conditional);
|
auto e = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
|
||||||
|
conditional);
|
||||||
GTSAM_PRINT(*e);
|
GTSAM_PRINT(*e);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -101,7 +105,10 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
||||||
if (maxNrLeaves) {
|
if (maxNrLeaves) {
|
||||||
// `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
|
// `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
|
||||||
// all the conditionals with the same keys in bayesNetFragment.
|
// all the conditionals with the same keys in bayesNetFragment.
|
||||||
bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_);
|
DiscreteValues newlyFixedValues;
|
||||||
|
bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_,
|
||||||
|
&newlyFixedValues);
|
||||||
|
fixedValues_.insert(newlyFixedValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Print discrete keys in the bayesNetFragment:
|
// Print discrete keys in the bayesNetFragment:
|
||||||
|
@ -112,8 +119,9 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
||||||
std::cout << std::endl << std::endl;
|
std::cout << std::endl << std::endl;
|
||||||
|
|
||||||
#ifdef DEBUG_SMOOTHER
|
#ifdef DEBUG_SMOOTHER
|
||||||
for (auto conditional: bayesNetFragment) {
|
for (auto conditional : bayesNetFragment) {
|
||||||
auto c =std::dynamic_pointer_cast<HybridConditional::BaseConditional>(conditional);
|
auto c = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
|
||||||
|
conditional);
|
||||||
GTSAM_PRINT(*c);
|
GTSAM_PRINT(*c);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -106,6 +106,18 @@ class GTSAM_EXPORT HybridSmoother {
|
||||||
|
|
||||||
/// Return the Bayes Net posterior.
|
/// Return the Bayes Net posterior.
|
||||||
const HybridBayesNet& hybridBayesNet() const;
|
const HybridBayesNet& hybridBayesNet() const;
|
||||||
|
|
||||||
|
/// Optimize the hybrid Bayes Net, taking into accound fixed values.
|
||||||
|
HybridValues optimize() const {
|
||||||
|
// Solve for the MPE
|
||||||
|
DiscreteValues mpe = hybridBayesNet_.mpe();
|
||||||
|
|
||||||
|
// Add fixed values to the MPE.
|
||||||
|
mpe.insert(fixedValues_);
|
||||||
|
|
||||||
|
// Given the MPE, compute the optimal continuous values.
|
||||||
|
return HybridValues(hybridBayesNet_.optimize(mpe), mpe);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
Loading…
Reference in New Issue