Product now has scalars

release/4.3a0
Frank Dellaert 2024-10-06 17:50:22 +09:00
parent 92540298e1
commit 584a71fb94
7 changed files with 343 additions and 353 deletions

View File

@ -22,8 +22,8 @@
#include <gtsam/discrete/DiscreteValues.h>
#include <gtsam/hybrid/HybridGaussianConditional.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/hybrid/HybridGaussianProductFactor.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Conditional-inst.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h>
@ -44,7 +44,7 @@ struct HybridGaussianConditional::Helper {
/// Construct from a vector of mean and sigma pairs, plus extra args.
template <typename... Args>
explicit Helper(const DiscreteKey &mode, const P &p, Args &&...args) {
explicit Helper(const DiscreteKey& mode, const P& p, Args&&... args) {
nrFrontals = 1;
minNegLogConstant = std::numeric_limits<double>::infinity();
@ -52,9 +52,8 @@ struct HybridGaussianConditional::Helper {
std::vector<GC::shared_ptr> gcs;
fvs.reserve(p.size());
gcs.reserve(p.size());
for (auto &&[mean, sigma] : p) {
auto gaussianConditional =
GC::sharedMeanAndStddev(std::forward<Args>(args)..., mean, sigma);
for (auto&& [mean, sigma] : p) {
auto gaussianConditional = GC::sharedMeanAndStddev(std::forward<Args>(args)..., mean, sigma);
double value = gaussianConditional->negLogConstant();
minNegLogConstant = std::min(minNegLogConstant, value);
fvs.emplace_back(gaussianConditional, value);
@ -66,10 +65,9 @@ struct HybridGaussianConditional::Helper {
}
/// Construct from tree of GaussianConditionals.
explicit Helper(const Conditionals &conditionals)
: conditionals(conditionals),
minNegLogConstant(std::numeric_limits<double>::infinity()) {
auto func = [this](const GC::shared_ptr &c) -> GaussianFactorValuePair {
explicit Helper(const Conditionals& conditionals)
: conditionals(conditionals), minNegLogConstant(std::numeric_limits<double>::infinity()) {
auto func = [this](const GC::shared_ptr& c) -> GaussianFactorValuePair {
double value = 0.0;
if (c) {
if (!nrFrontals.has_value()) {
@ -90,56 +88,61 @@ struct HybridGaussianConditional::Helper {
};
/* *******************************************************************************/
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKeys &discreteParents, const Helper &helper)
HybridGaussianConditional::HybridGaussianConditional(const DiscreteKeys& discreteParents,
const Helper& helper)
: BaseFactor(discreteParents, helper.pairs),
BaseConditional(*helper.nrFrontals),
conditionals_(helper.conditionals),
negLogConstant_(helper.minNegLogConstant) {}
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKey &discreteParent,
const std::vector<GaussianConditional::shared_ptr> &conditionals)
const DiscreteKey& discreteParent,
const std::vector<GaussianConditional::shared_ptr>& conditionals)
: HybridGaussianConditional(DiscreteKeys{discreteParent},
Conditionals({discreteParent}, conditionals)) {}
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKey &discreteParent, Key key, //
const std::vector<std::pair<Vector, double>> &parameters)
const DiscreteKey& discreteParent,
Key key, //
const std::vector<std::pair<Vector, double>>& parameters)
: HybridGaussianConditional(DiscreteKeys{discreteParent},
Helper(discreteParent, parameters, key)) {}
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKey &discreteParent, Key key, //
const Matrix &A, Key parent,
const std::vector<std::pair<Vector, double>> &parameters)
: HybridGaussianConditional(
DiscreteKeys{discreteParent},
Helper(discreteParent, parameters, key, A, parent)) {}
const DiscreteKey& discreteParent,
Key key, //
const Matrix& A,
Key parent,
const std::vector<std::pair<Vector, double>>& parameters)
: HybridGaussianConditional(DiscreteKeys{discreteParent},
Helper(discreteParent, parameters, key, A, parent)) {}
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKey &discreteParent, Key key, //
const Matrix &A1, Key parent1, const Matrix &A2, Key parent2,
const std::vector<std::pair<Vector, double>> &parameters)
: HybridGaussianConditional(
DiscreteKeys{discreteParent},
Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) {}
const DiscreteKey& discreteParent,
Key key, //
const Matrix& A1,
Key parent1,
const Matrix& A2,
Key parent2,
const std::vector<std::pair<Vector, double>>& parameters)
: HybridGaussianConditional(DiscreteKeys{discreteParent},
Helper(discreteParent, parameters, key, A1, parent1, A2, parent2)) {
}
HybridGaussianConditional::HybridGaussianConditional(
const DiscreteKeys &discreteParents,
const HybridGaussianConditional::Conditionals &conditionals)
const DiscreteKeys& discreteParents,
const HybridGaussianConditional::Conditionals& conditionals)
: HybridGaussianConditional(discreteParents, Helper(conditionals)) {}
/* *******************************************************************************/
const HybridGaussianConditional::Conditionals &
HybridGaussianConditional::conditionals() const {
const HybridGaussianConditional::Conditionals& HybridGaussianConditional::conditionals() const {
return conditionals_;
}
/* *******************************************************************************/
HybridGaussianProductFactor HybridGaussianConditional::asProductFactor()
const {
auto wrap = [this](const std::shared_ptr<GaussianConditional> &gc) {
HybridGaussianProductFactor HybridGaussianConditional::asProductFactor() const {
auto wrap = [this](const std::shared_ptr<GaussianConditional>& gc)
-> std::pair<GaussianFactorGraph, double> {
// First check if conditional has not been pruned
if (gc) {
const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_;
@ -151,10 +154,17 @@ HybridGaussianProductFactor HybridGaussianConditional::asProductFactor()
Vector c(1);
c << std::sqrt(2.0 * Cgm_Kgcm);
auto constantFactor = std::make_shared<JacobianFactor>(c);
return GaussianFactorGraph{gc, constantFactor};
return {GaussianFactorGraph{gc, constantFactor}, Cgm_Kgcm};
} else {
// The scalar can be zero.
// TODO(Frank): after hiding is gone, this should be only case here.
return {GaussianFactorGraph{gc}, Cgm_Kgcm};
}
} else {
// If the conditional is pruned, return an empty GaussianFactorGraph with zero scalar sum
// TODO(Frank): Could we just return an *empty* GaussianFactorGraph?
return {GaussianFactorGraph{nullptr}, 0.0};
}
return GaussianFactorGraph{gc};
};
return {{conditionals_, wrap}};
}
@ -162,7 +172,7 @@ HybridGaussianProductFactor HybridGaussianConditional::asProductFactor()
/* *******************************************************************************/
size_t HybridGaussianConditional::nrComponents() const {
size_t total = 0;
conditionals_.visit([&total](const GaussianFactor::shared_ptr &node) {
conditionals_.visit([&total](const GaussianFactor::shared_ptr& node) {
if (node) total += 1;
});
return total;
@ -170,21 +180,19 @@ size_t HybridGaussianConditional::nrComponents() const {
/* *******************************************************************************/
GaussianConditional::shared_ptr HybridGaussianConditional::choose(
const DiscreteValues &discreteValues) const {
auto &ptr = conditionals_(discreteValues);
const DiscreteValues& discreteValues) const {
auto& ptr = conditionals_(discreteValues);
if (!ptr) return nullptr;
auto conditional = std::dynamic_pointer_cast<GaussianConditional>(ptr);
if (conditional)
return conditional;
else
throw std::logic_error(
"A HybridGaussianConditional unexpectedly contained a non-conditional");
throw std::logic_error("A HybridGaussianConditional unexpectedly contained a non-conditional");
}
/* *******************************************************************************/
bool HybridGaussianConditional::equals(const HybridFactor &lf,
double tol) const {
const This *e = dynamic_cast<const This *>(&lf);
bool HybridGaussianConditional::equals(const HybridFactor& lf, double tol) const {
const This* e = dynamic_cast<const This*>(&lf);
if (e == nullptr) return false;
// This will return false if either conditionals_ is empty or e->conditionals_
@ -193,27 +201,26 @@ bool HybridGaussianConditional::equals(const HybridFactor &lf,
// Check the base and the factors:
return BaseFactor::equals(*e, tol) &&
conditionals_.equals(
e->conditionals_, [tol](const auto &f1, const auto &f2) {
return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol));
});
conditionals_.equals(e->conditionals_, [tol](const auto& f1, const auto& f2) {
return (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol));
});
}
/* *******************************************************************************/
void HybridGaussianConditional::print(const std::string &s,
const KeyFormatter &formatter) const {
void HybridGaussianConditional::print(const std::string& s, const KeyFormatter& formatter) const {
std::cout << (s.empty() ? "" : s + "\n");
BaseConditional::print("", formatter);
std::cout << " Discrete Keys = ";
for (auto &dk : discreteKeys()) {
for (auto& dk : discreteKeys()) {
std::cout << "(" << formatter(dk.first) << ", " << dk.second << "), ";
}
std::cout << std::endl
<< " logNormalizationConstant: " << -negLogConstant() << std::endl
<< std::endl;
conditionals_.print(
"", [&](Key k) { return formatter(k); },
[&](const GaussianConditional::shared_ptr &gf) -> std::string {
"",
[&](Key k) { return formatter(k); },
[&](const GaussianConditional::shared_ptr& gf) -> std::string {
RedirectCout rd;
if (gf && !gf->empty()) {
gf->print("", formatter);
@ -230,20 +237,19 @@ KeyVector HybridGaussianConditional::continuousParents() const {
const auto range = parents();
KeyVector continuousParentKeys(range.begin(), range.end());
// Loop over all discrete keys:
for (const auto &discreteKey : discreteKeys()) {
for (const auto& discreteKey : discreteKeys()) {
const Key key = discreteKey.first;
// remove that key from continuousParentKeys:
continuousParentKeys.erase(std::remove(continuousParentKeys.begin(),
continuousParentKeys.end(), key),
continuousParentKeys.end());
continuousParentKeys.erase(
std::remove(continuousParentKeys.begin(), continuousParentKeys.end(), key),
continuousParentKeys.end());
}
return continuousParentKeys;
}
/* ************************************************************************* */
bool HybridGaussianConditional::allFrontalsGiven(
const VectorValues &given) const {
for (auto &&kv : given) {
bool HybridGaussianConditional::allFrontalsGiven(const VectorValues& given) const {
for (auto&& kv : given) {
if (given.find(kv.first) == given.end()) {
return false;
}
@ -253,7 +259,7 @@ bool HybridGaussianConditional::allFrontalsGiven(
/* ************************************************************************* */
std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
const VectorValues &given) const {
const VectorValues& given) const {
if (!allFrontalsGiven(given)) {
throw std::runtime_error(
"HybridGaussianConditional::likelihood: given values are missing some "
@ -264,8 +270,7 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
const KeyVector continuousParentKeys = continuousParents();
const HybridGaussianFactor::FactorValuePairs likelihoods(
conditionals_,
[&](const GaussianConditional::shared_ptr &conditional)
-> GaussianFactorValuePair {
[&](const GaussianConditional::shared_ptr& conditional) -> GaussianFactorValuePair {
const auto likelihood_m = conditional->likelihood(given);
const double Cgm_Kgcm = conditional->negLogConstant() - negLogConstant_;
if (Cgm_Kgcm == 0.0) {
@ -276,26 +281,24 @@ std::shared_ptr<HybridGaussianFactor> HybridGaussianConditional::likelihood(
return {likelihood_m, Cgm_Kgcm};
}
});
return std::make_shared<HybridGaussianFactor>(discreteParentKeys,
likelihoods);
return std::make_shared<HybridGaussianFactor>(discreteParentKeys, likelihoods);
}
/* ************************************************************************* */
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &discreteKeys) {
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys& discreteKeys) {
std::set<DiscreteKey> s(discreteKeys.begin(), discreteKeys.end());
return s;
}
/* *******************************************************************************/
HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune(
const DecisionTreeFactor &discreteProbs) const {
const DecisionTreeFactor& discreteProbs) const {
// Find keys in discreteProbs.keys() but not in this->keys():
std::set<Key> mine(this->keys().begin(), this->keys().end());
std::set<Key> theirs(discreteProbs.keys().begin(),
discreteProbs.keys().end());
std::set<Key> theirs(discreteProbs.keys().begin(), discreteProbs.keys().end());
std::vector<Key> diff;
std::set_difference(theirs.begin(), theirs.end(), mine.begin(), mine.end(),
std::back_inserter(diff));
std::set_difference(
theirs.begin(), theirs.end(), mine.begin(), mine.end(), std::back_inserter(diff));
// Find maximum probability value for every combination of our keys.
Ordering keys(diff);
@ -303,26 +306,24 @@ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune(
// Check the max value for every combination of our keys.
// If the max value is 0.0, we can prune the corresponding conditional.
auto pruner = [&](const Assignment<Key> &choices,
const GaussianConditional::shared_ptr &conditional)
-> GaussianConditional::shared_ptr {
auto pruner =
[&](const Assignment<Key>& choices,
const GaussianConditional::shared_ptr& conditional) -> GaussianConditional::shared_ptr {
return (max->evaluate(choices) == 0.0) ? nullptr : conditional;
};
auto pruned_conditionals = conditionals_.apply(pruner);
return std::make_shared<HybridGaussianConditional>(discreteKeys(),
pruned_conditionals);
return std::make_shared<HybridGaussianConditional>(discreteKeys(), pruned_conditionals);
}
/* *******************************************************************************/
double HybridGaussianConditional::logProbability(
const HybridValues &values) const {
double HybridGaussianConditional::logProbability(const HybridValues& values) const {
auto conditional = conditionals_(values.discrete());
return conditional->logProbability(values.continuous());
}
/* *******************************************************************************/
double HybridGaussianConditional::evaluate(const HybridValues &values) const {
double HybridGaussianConditional::evaluate(const HybridValues& values) const {
auto conditional = conditionals_(values.discrete());
return conditional->evaluate(values.continuous());
}

View File

@ -32,8 +32,8 @@
namespace gtsam {
/* *******************************************************************************/
HybridGaussianFactor::FactorValuePairs
HybridGaussianFactor::augment(const FactorValuePairs &factors) {
HybridGaussianFactor::FactorValuePairs HybridGaussianFactor::augment(
const FactorValuePairs& factors) {
// Find the minimum value so we can "proselytize" to positive values.
// Done because we can't have sqrt of negative numbers.
DecisionTree<Key, GaussianFactor::shared_ptr> gaussianFactors;
@ -44,18 +44,16 @@ HybridGaussianFactor::augment(const FactorValuePairs &factors) {
double min_value = valueTree.min();
// Finally, update the [A|b] matrices.
auto update = [&min_value](const auto &gfv) -> GaussianFactorValuePair {
auto update = [&min_value](const auto& gfv) -> GaussianFactorValuePair {
auto [gf, value] = gfv;
auto jf = std::dynamic_pointer_cast<JacobianFactor>(gf);
if (!jf)
return {gf, 0.0}; // should this be zero or infinite?
if (!jf) return {gf, 0.0}; // should this be zero or infinite?
double normalized_value = value - min_value;
// If the value is 0, do nothing
if (normalized_value == 0.0)
return {gf, 0.0};
if (normalized_value == 0.0) return {gf, value};
GaussianFactorGraph gfg;
gfg.push_back(jf);
@ -66,40 +64,42 @@ HybridGaussianFactor::augment(const FactorValuePairs &factors) {
auto constantFactor = std::make_shared<JacobianFactor>(c);
gfg.push_back(constantFactor);
return {std::make_shared<JacobianFactor>(gfg), normalized_value};
// NOTE(Frank): we store the actual value, not the normalized value:
return {std::make_shared<JacobianFactor>(gfg), value};
};
return FactorValuePairs(factors, update);
}
/* *******************************************************************************/
struct HybridGaussianFactor::ConstructorHelper {
KeyVector continuousKeys; // Continuous keys extracted from factors
DiscreteKeys discreteKeys; // Discrete keys provided to the constructors
FactorValuePairs pairs; // The decision tree with factors and scalars
KeyVector continuousKeys; // Continuous keys extracted from factors
DiscreteKeys discreteKeys; // Discrete keys provided to the constructors
FactorValuePairs pairs; // The decision tree with factors and scalars
ConstructorHelper(const DiscreteKey &discreteKey,
const std::vector<GaussianFactor::shared_ptr> &factors)
/// Constructor for a single discrete key and a vector of Gaussian factors
ConstructorHelper(const DiscreteKey& discreteKey,
const std::vector<GaussianFactor::shared_ptr>& factors)
: discreteKeys({discreteKey}) {
// Extract continuous keys from the first non-null factor
for (const auto &factor : factors) {
for (const auto& factor : factors) {
if (factor && continuousKeys.empty()) {
continuousKeys = factor->keys();
break;
}
}
// Build the FactorValuePairs DecisionTree
pairs = FactorValuePairs(
DecisionTree<Key, GaussianFactor::shared_ptr>(discreteKeys, factors),
[](const auto &f) {
return std::pair{f, 0.0};
});
pairs = FactorValuePairs(DecisionTree<Key, GaussianFactor::shared_ptr>(discreteKeys, factors),
[](const auto& f) {
return std::pair{f, 0.0};
});
}
ConstructorHelper(const DiscreteKey &discreteKey,
const std::vector<GaussianFactorValuePair> &factorPairs)
/// Constructor for a single discrete key and a vector of GaussianFactorValuePairs
ConstructorHelper(const DiscreteKey& discreteKey,
const std::vector<GaussianFactorValuePair>& factorPairs)
: discreteKeys({discreteKey}) {
// Extract continuous keys from the first non-null factor
for (const auto &pair : factorPairs) {
for (const auto& pair : factorPairs) {
if (pair.first && continuousKeys.empty()) {
continuousKeys = pair.first->keys();
break;
@ -110,12 +110,12 @@ struct HybridGaussianFactor::ConstructorHelper {
pairs = FactorValuePairs(discreteKeys, factorPairs);
}
ConstructorHelper(const DiscreteKeys &discreteKeys,
const FactorValuePairs &factorPairs)
/// Constructor for a vector of discrete keys and a vector of GaussianFactorValuePairs
ConstructorHelper(const DiscreteKeys& discreteKeys, const FactorValuePairs& factorPairs)
: discreteKeys(discreteKeys) {
// Extract continuous keys from the first non-null factor
// TODO: just stop after first non-null factor
factorPairs.visit([&](const GaussianFactorValuePair &pair) {
factorPairs.visit([&](const GaussianFactorValuePair& pair) {
if (pair.first && continuousKeys.empty()) {
continuousKeys = pair.first->keys();
}
@ -127,40 +127,32 @@ struct HybridGaussianFactor::ConstructorHelper {
};
/* *******************************************************************************/
HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper &helper)
: Base(helper.continuousKeys, helper.discreteKeys),
factors_(augment(helper.pairs)) {}
HybridGaussianFactor::HybridGaussianFactor(const ConstructorHelper& helper)
: Base(helper.continuousKeys, helper.discreteKeys), factors_(augment(helper.pairs)) {}
/* *******************************************************************************/
HybridGaussianFactor::HybridGaussianFactor(
const DiscreteKey &discreteKey,
const std::vector<GaussianFactor::shared_ptr> &factorPairs)
const DiscreteKey& discreteKey, const std::vector<GaussianFactor::shared_ptr>& factorPairs)
: HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {}
/* *******************************************************************************/
HybridGaussianFactor::HybridGaussianFactor(
const DiscreteKey &discreteKey,
const std::vector<GaussianFactorValuePair> &factorPairs)
HybridGaussianFactor::HybridGaussianFactor(const DiscreteKey& discreteKey,
const std::vector<GaussianFactorValuePair>& factorPairs)
: HybridGaussianFactor(ConstructorHelper(discreteKey, factorPairs)) {}
/* *******************************************************************************/
HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys &discreteKeys,
const FactorValuePairs &factorPairs)
HybridGaussianFactor::HybridGaussianFactor(const DiscreteKeys& discreteKeys,
const FactorValuePairs& factorPairs)
: HybridGaussianFactor(ConstructorHelper(discreteKeys, factorPairs)) {}
/* *******************************************************************************/
bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const {
const This *e = dynamic_cast<const This *>(&lf);
if (e == nullptr)
return false;
bool HybridGaussianFactor::equals(const HybridFactor& lf, double tol) const {
const This* e = dynamic_cast<const This*>(&lf);
if (e == nullptr) return false;
// This will return false if either factors_ is empty or e->factors_ is
// empty, but not if both are empty or both are not empty:
if (factors_.empty() ^ e->factors_.empty())
return false;
if (factors_.empty() ^ e->factors_.empty()) return false;
// Check the base and the factors:
auto compareFunc = [tol](const auto &pair1, const auto &pair2) {
auto compareFunc = [tol](const auto& pair1, const auto& pair2) {
auto f1 = pair1.first, f2 = pair2.first;
bool match = (!f1 && !f2) || (f1 && f2 && f1->equals(*f2, tol));
return match && gtsam::equal(pair1.second, pair2.second, tol);
@ -169,8 +161,7 @@ bool HybridGaussianFactor::equals(const HybridFactor &lf, double tol) const {
}
/* *******************************************************************************/
void HybridGaussianFactor::print(const std::string &s,
const KeyFormatter &formatter) const {
void HybridGaussianFactor::print(const std::string& s, const KeyFormatter& formatter) const {
std::cout << (s.empty() ? "" : s + "\n");
HybridFactor::print("", formatter);
std::cout << "{\n";
@ -178,8 +169,9 @@ void HybridGaussianFactor::print(const std::string &s,
std::cout << " empty" << std::endl;
} else {
factors_.print(
"", [&](Key k) { return formatter(k); },
[&](const auto &pair) -> std::string {
"",
[&](Key k) { return formatter(k); },
[&](const auto& pair) -> std::string {
RedirectCout rd;
std::cout << ":\n";
if (pair.first) {
@ -195,22 +187,25 @@ void HybridGaussianFactor::print(const std::string &s,
}
/* *******************************************************************************/
HybridGaussianFactor::sharedFactor
HybridGaussianFactor::operator()(const DiscreteValues &assignment) const {
HybridGaussianFactor::sharedFactor HybridGaussianFactor::operator()(
const DiscreteValues& assignment) const {
return factors_(assignment).first;
}
/* *******************************************************************************/
HybridGaussianProductFactor HybridGaussianFactor::asProductFactor() const {
return {{factors_,
[](const auto &pair) { return GaussianFactorGraph{pair.first}; }}};
// Implemented by creating a new DecisionTree where:
// - The structure (keys and assignments) is preserved from factors_
// - Each leaf converted to a GaussianFactorGraph with just the factor and its scalar.
return {{factors_, [](const auto& pair) -> std::pair<GaussianFactorGraph, double> {
return {GaussianFactorGraph{pair.first}, pair.second};
}}};
}
/* *******************************************************************************/
/// Helper method to compute the error of a component.
static double
PotentiallyPrunedComponentError(const GaussianFactor::shared_ptr &gf,
const VectorValues &values) {
static double PotentiallyPrunedComponentError(const GaussianFactor::shared_ptr& gf,
const VectorValues& values) {
// Check if valid pointer
if (gf) {
return gf->error(values);
@ -222,10 +217,10 @@ PotentiallyPrunedComponentError(const GaussianFactor::shared_ptr &gf,
}
/* *******************************************************************************/
AlgebraicDecisionTree<Key>
HybridGaussianFactor::errorTree(const VectorValues &continuousValues) const {
AlgebraicDecisionTree<Key> HybridGaussianFactor::errorTree(
const VectorValues& continuousValues) const {
// functor to convert from sharedFactor to double error value.
auto errorFunc = [&continuousValues](const auto &pair) {
auto errorFunc = [&continuousValues](const auto& pair) {
return PotentiallyPrunedComponentError(pair.first, continuousValues);
};
DecisionTree<Key, double> error_tree(factors_, errorFunc);
@ -233,10 +228,10 @@ HybridGaussianFactor::errorTree(const VectorValues &continuousValues) const {
}
/* *******************************************************************************/
double HybridGaussianFactor::error(const HybridValues &values) const {
double HybridGaussianFactor::error(const HybridValues& values) const {
// Directly index to get the component, no need to build the whole tree.
const auto pair = factors_(values.discrete());
return PotentiallyPrunedComponentError(pair.first, values.continuous());
}
} // namespace gtsam
} // namespace gtsam

View File

@ -18,7 +18,6 @@
* @date Mar 11, 2022
*/
#include "gtsam/discrete/DiscreteValues.h"
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/Assignment.h>
#include <gtsam/discrete/DiscreteEliminationTree.h>
@ -40,6 +39,7 @@
#include <gtsam/linear/GaussianJunctionTree.h>
#include <gtsam/linear/HessianFactor.h>
#include <gtsam/linear/JacobianFactor.h>
#include "gtsam/discrete/DiscreteValues.h"
#include <cstddef>
#include <iostream>
@ -59,15 +59,14 @@ using std::dynamic_pointer_cast;
/* ************************************************************************ */
// Throw a runtime exception for method specified in string s, and factor f:
static void throwRuntimeError(const std::string &s,
const std::shared_ptr<Factor> &f) {
auto &fr = *f;
throw std::runtime_error(s + " not implemented for factor type " +
demangle(typeid(fr).name()) + ".");
static void throwRuntimeError(const std::string& s, const std::shared_ptr<Factor>& f) {
auto& fr = *f;
throw std::runtime_error(s + " not implemented for factor type " + demangle(typeid(fr).name()) +
".");
}
/* ************************************************************************ */
const Ordering HybridOrdering(const HybridGaussianFactorGraph &graph) {
const Ordering HybridOrdering(const HybridGaussianFactorGraph& graph) {
KeySet discrete_keys = graph.discreteKeySet();
const VariableIndex index(graph);
return Ordering::ColamdConstrainedLast(
@ -75,15 +74,14 @@ const Ordering HybridOrdering(const HybridGaussianFactorGraph &graph) {
}
/* ************************************************************************ */
static void printFactor(const std::shared_ptr<Factor> &factor,
const DiscreteValues &assignment,
const KeyFormatter &keyFormatter) {
static void printFactor(const std::shared_ptr<Factor>& factor,
const DiscreteValues& assignment,
const KeyFormatter& keyFormatter) {
if (auto hgf = std::dynamic_pointer_cast<HybridGaussianFactor>(factor)) {
if (assignment.empty())
hgf->print("HybridGaussianFactor:", keyFormatter);
else
hgf->operator()(assignment)
->print("HybridGaussianFactor, component:", keyFormatter);
hgf->operator()(assignment)->print("HybridGaussianFactor, component:", keyFormatter);
} else if (auto gf = std::dynamic_pointer_cast<GaussianFactor>(factor)) {
factor->print("GaussianFactor:\n", keyFormatter);
@ -98,9 +96,7 @@ static void printFactor(const std::shared_ptr<Factor> &factor,
if (assignment.empty())
hc->print("HybridConditional:", keyFormatter);
else
hc->asHybrid()
->choose(assignment)
->print("HybridConditional, component:\n", keyFormatter);
hc->asHybrid()->choose(assignment)->print("HybridConditional, component:\n", keyFormatter);
}
} else {
factor->print("Unknown factor type\n", keyFormatter);
@ -108,13 +104,13 @@ static void printFactor(const std::shared_ptr<Factor> &factor,
}
/* ************************************************************************ */
void HybridGaussianFactorGraph::print(const std::string &s,
const KeyFormatter &keyFormatter) const {
void HybridGaussianFactorGraph::print(const std::string& s,
const KeyFormatter& keyFormatter) const {
std::cout << (s.empty() ? "" : s + " ") << std::endl;
std::cout << "size: " << size() << std::endl;
for (size_t i = 0; i < factors_.size(); i++) {
auto &&factor = factors_[i];
auto&& factor = factors_[i];
if (factor == nullptr) {
std::cout << "Factor " << i << ": nullptr\n";
continue;
@ -129,15 +125,15 @@ void HybridGaussianFactorGraph::print(const std::string &s,
/* ************************************************************************ */
void HybridGaussianFactorGraph::printErrors(
const HybridValues &values, const std::string &str,
const KeyFormatter &keyFormatter,
const std::function<bool(const Factor * /*factor*/,
double /*whitenedError*/, size_t /*index*/)>
&printCondition) const {
const HybridValues& values,
const std::string& str,
const KeyFormatter& keyFormatter,
const std::function<bool(const Factor* /*factor*/, double /*whitenedError*/, size_t /*index*/)>&
printCondition) const {
std::cout << str << " size: " << size() << std::endl << std::endl;
for (size_t i = 0; i < factors_.size(); i++) {
auto &&factor = factors_[i];
auto&& factor = factors_[i];
if (factor == nullptr) {
std::cout << "Factor " << i << ": nullptr\n";
continue;
@ -157,14 +153,13 @@ void HybridGaussianFactorGraph::printErrors(
/* ************************************************************************ */
// TODO(dellaert): it's probably more efficient to first collect the discrete
// keys, and then loop over all assignments to populate a vector.
HybridGaussianProductFactor
HybridGaussianFactorGraph::collectProductFactor() const {
HybridGaussianProductFactor HybridGaussianFactorGraph::collectProductFactor() const {
HybridGaussianProductFactor result;
for (auto &f : factors_) {
for (auto& f : factors_) {
// TODO(dellaert): can we make this cleaner and less error-prone?
if (auto orphan = dynamic_pointer_cast<OrphanWrapper>(f)) {
continue; // Ignore OrphanWrapper
continue; // Ignore OrphanWrapper
} else if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) {
result += gf;
} else if (auto gc = dynamic_pointer_cast<GaussianConditional>(f)) {
@ -172,7 +167,7 @@ HybridGaussianFactorGraph::collectProductFactor() const {
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
result += *gmf;
} else if (auto gm = dynamic_pointer_cast<HybridGaussianConditional>(f)) {
result += *gm; // handled above already?
result += *gm; // handled above already?
} else if (auto hc = dynamic_pointer_cast<HybridConditional>(f)) {
if (auto gm = hc->asHybrid()) {
result += *gm;
@ -198,11 +193,10 @@ HybridGaussianFactorGraph::collectProductFactor() const {
}
/* ************************************************************************ */
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
continuousElimination(const HybridGaussianFactorGraph &factors,
const Ordering &frontalKeys) {
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>> continuousElimination(
const HybridGaussianFactorGraph& factors, const Ordering& frontalKeys) {
GaussianFactorGraph gfg;
for (auto &f : factors) {
for (auto& f : factors) {
if (auto gf = dynamic_pointer_cast<GaussianFactor>(f)) {
gfg.push_back(gf);
} else if (auto orphan = dynamic_pointer_cast<OrphanWrapper>(f)) {
@ -230,7 +224,7 @@ continuousElimination(const HybridGaussianFactorGraph &factors,
* @return AlgebraicDecisionTree<Key>
*/
static AlgebraicDecisionTree<Key> probabilitiesFromNegativeLogValues(
const AlgebraicDecisionTree<Key> &logValues) {
const AlgebraicDecisionTree<Key>& logValues) {
// Perform normalization
double min_log = logValues.min();
AlgebraicDecisionTree<Key> probabilities = DecisionTree<Key, double>(
@ -241,18 +235,17 @@ static AlgebraicDecisionTree<Key> probabilitiesFromNegativeLogValues(
}
/* ************************************************************************ */
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
discreteElimination(const HybridGaussianFactorGraph &factors,
const Ordering &frontalKeys) {
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>> discreteElimination(
const HybridGaussianFactorGraph& factors, const Ordering& frontalKeys) {
DiscreteFactorGraph dfg;
for (auto &f : factors) {
for (auto& f : factors) {
if (auto df = dynamic_pointer_cast<DiscreteFactor>(f)) {
dfg.push_back(df);
} else if (auto gmf = dynamic_pointer_cast<HybridGaussianFactor>(f)) {
// Case where we have a HybridGaussianFactor with no continuous keys.
// In this case, compute discrete probabilities.
auto logProbability = [&](const auto &pair) -> double {
auto logProbability = [&](const auto& pair) -> double {
auto [factor, _] = pair;
if (!factor) return 0.0;
return factor->error(VectorValues());
@ -262,8 +255,7 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
AlgebraicDecisionTree<Key> probabilities =
probabilitiesFromNegativeLogValues(logProbabilities);
dfg.emplace_shared<DecisionTreeFactor>(gmf->discreteKeys(),
probabilities);
dfg.emplace_shared<DecisionTreeFactor>(gmf->discreteKeys(), probabilities);
} else if (auto orphan = dynamic_pointer_cast<OrphanWrapper>(f)) {
// Ignore orphaned clique.
@ -284,8 +276,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
}
/* ************************************************************************ */
using Result = std::pair<std::shared_ptr<GaussianConditional>,
HybridGaussianFactor::sharedFactor>;
using Result = std::pair<std::shared_ptr<GaussianConditional>, GaussianFactor::shared_ptr>;
using ResultTree = DecisionTree<Key, std::pair<Result, double>>;
/**
* Compute the probability p(μ;m) = exp(-error(μ;m)) * sqrt(det(2π Σ_m)
@ -293,11 +285,10 @@ using Result = std::pair<std::shared_ptr<GaussianConditional>,
* The residual error contains no keys, and only
* depends on the discrete separator if present.
*/
static std::shared_ptr<Factor> createDiscreteFactor(
const DecisionTree<Key, Result> &eliminationResults,
const DiscreteKeys &discreteSeparator) {
auto negLogProbability = [&](const Result &pair) -> double {
const auto &[conditional, factor] = pair;
static std::shared_ptr<Factor> createDiscreteFactor(const ResultTree& eliminationResults,
const DiscreteKeys& discreteSeparator) {
auto negLogProbability = [&](const auto& pair) -> double {
const auto& [conditional, factor] = pair.first;
if (conditional && factor) {
static const VectorValues kEmpty;
// If the factor is not null, it has no keys, just contains the residual.
@ -324,12 +315,11 @@ static std::shared_ptr<Factor> createDiscreteFactor(
// Create HybridGaussianFactor on the separator, taking care to correct
// for conditional constants.
static std::shared_ptr<Factor> createHybridGaussianFactor(
const DecisionTree<Key, Result> &eliminationResults,
const DiscreteKeys &discreteSeparator) {
static std::shared_ptr<Factor> createHybridGaussianFactor(const ResultTree& eliminationResults,
const DiscreteKeys& discreteSeparator) {
// Correct for the normalization constant used up by the conditional
auto correct = [&](const Result &pair) -> GaussianFactorValuePair {
const auto &[conditional, factor] = pair;
auto correct = [&](const auto& pair) -> GaussianFactorValuePair {
const auto& [conditional, factor] = pair.first;
if (conditional && factor) {
auto hf = std::dynamic_pointer_cast<HessianFactor>(factor);
if (!hf) throw std::runtime_error("Expected HessianFactor!");
@ -339,29 +329,27 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(
const double negLogK = conditional->negLogConstant();
hf->constantTerm() += -2.0 * negLogK;
return {factor, negLogK};
} else if (!conditional && !factor){
} else if (!conditional && !factor) {
return {nullptr, 0.0}; // TODO(frank): or should this be infinity?
} else {
throw std::runtime_error("createHybridGaussianFactors has mixed NULLs");
throw std::runtime_error("createHybridGaussianFactors has mixed NULLs");
}
};
DecisionTree<Key, GaussianFactorValuePair> newFactors(eliminationResults,
correct);
DecisionTree<Key, GaussianFactorValuePair> newFactors(eliminationResults, correct);
return std::make_shared<HybridGaussianFactor>(discreteSeparator, newFactors);
}
/* *******************************************************************************/
/// Get the discrete keys from the HybridGaussianFactorGraph as DiscreteKeys.
static auto GetDiscreteKeys =
[](const HybridGaussianFactorGraph &hfg) -> DiscreteKeys {
static auto GetDiscreteKeys = [](const HybridGaussianFactorGraph& hfg) -> DiscreteKeys {
const std::set<DiscreteKey> discreteKeySet = hfg.discreteKeys();
return {discreteKeySet.begin(), discreteKeySet.end()};
};
/* *******************************************************************************/
std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
HybridGaussianFactorGraph::eliminate(const Ordering &keys) const {
HybridGaussianFactorGraph::eliminate(const Ordering& keys) const {
// Since we eliminate all continuous variables first,
// the discrete separator will be *all* the discrete keys.
DiscreteKeys discreteSeparator = GetDiscreteKeys(*this);
@ -377,9 +365,12 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const {
// This is the elimination method on the leaf nodes
bool someContinuousLeft = false;
auto eliminate = [&](const GaussianFactorGraph &graph) -> Result {
auto eliminate =
[&](const std::pair<GaussianFactorGraph, double>& pair) -> std::pair<Result, double> {
const auto& [graph, scalar] = pair;
if (graph.empty()) {
return {nullptr, nullptr};
return {{nullptr, nullptr}, 0.0};
}
// Expensive elimination of product factor.
@ -388,25 +379,24 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const {
// Record whether there any continuous variables left
someContinuousLeft |= !result.second->empty();
return result;
return {result, scalar};
};
// Perform elimination!
DecisionTree<Key, Result> eliminationResults(prunedProductFactor, eliminate);
ResultTree eliminationResults(prunedProductFactor, eliminate);
// If there are no more continuous parents we create a DiscreteFactor with the
// error for each discrete choice. Otherwise, create a HybridGaussianFactor
// on the separator, taking care to correct for conditional constants.
auto newFactor =
someContinuousLeft
? createHybridGaussianFactor(eliminationResults, discreteSeparator)
: createDiscreteFactor(eliminationResults, discreteSeparator);
auto newFactor = someContinuousLeft
? createHybridGaussianFactor(eliminationResults, discreteSeparator)
: createDiscreteFactor(eliminationResults, discreteSeparator);
// Create the HybridGaussianConditional from the conditionals
HybridGaussianConditional::Conditionals conditionals(
eliminationResults, [](const Result &pair) { return pair.first; });
auto hybridGaussian = std::make_shared<HybridGaussianConditional>(
discreteSeparator, conditionals);
eliminationResults, [](const auto& pair) { return pair.first.first; });
auto hybridGaussian =
std::make_shared<HybridGaussianConditional>(discreteSeparator, conditionals);
return {std::make_shared<HybridConditional>(hybridGaussian), newFactor};
}
@ -426,8 +416,7 @@ HybridGaussianFactorGraph::eliminate(const Ordering &keys) const {
* be INCORRECT and there will be NO error raised.
*/
std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>> //
EliminateHybrid(const HybridGaussianFactorGraph &factors,
const Ordering &keys) {
EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys) {
// NOTE: Because we are in the Conditional Gaussian regime there are only
// a few cases:
// 1. continuous variable, make a hybrid Gaussian conditional if there are
@ -478,7 +467,7 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors,
// 3. if not, we do hybrid elimination:
bool only_discrete = true, only_continuous = true;
for (auto &&factor : factors) {
for (auto&& factor : factors) {
if (auto hybrid_factor = std::dynamic_pointer_cast<HybridFactor>(factor)) {
if (hybrid_factor->isDiscrete()) {
only_continuous = false;
@ -489,11 +478,9 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors,
only_discrete = false;
break;
}
} else if (auto cont_factor =
std::dynamic_pointer_cast<GaussianFactor>(factor)) {
} else if (auto cont_factor = std::dynamic_pointer_cast<GaussianFactor>(factor)) {
only_discrete = false;
} else if (auto discrete_factor =
std::dynamic_pointer_cast<DiscreteFactor>(factor)) {
} else if (auto discrete_factor = std::dynamic_pointer_cast<DiscreteFactor>(factor)) {
only_continuous = false;
}
}
@ -514,10 +501,10 @@ EliminateHybrid(const HybridGaussianFactorGraph &factors,
/* ************************************************************************ */
AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::errorTree(
const VectorValues &continuousValues) const {
const VectorValues& continuousValues) const {
AlgebraicDecisionTree<Key> result(0.0);
// Iterate over each factor.
for (auto &factor : factors_) {
for (auto& factor : factors_) {
if (auto hf = std::dynamic_pointer_cast<HybridFactor>(factor)) {
// Add errorTree for hybrid factors, includes HybridGaussianConditionals!
result = result + hf->errorTree(continuousValues);
@ -535,7 +522,7 @@ AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::errorTree(
}
/* ************************************************************************ */
double HybridGaussianFactorGraph::probPrime(const HybridValues &values) const {
double HybridGaussianFactorGraph::probPrime(const HybridValues& values) const {
double error = this->error(values);
// NOTE: The 0.5 term is handled by each factor
return std::exp(-error);
@ -543,7 +530,7 @@ double HybridGaussianFactorGraph::probPrime(const HybridValues &values) const {
/* ************************************************************************ */
AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::discretePosterior(
const VectorValues &continuousValues) const {
const VectorValues& continuousValues) const {
AlgebraicDecisionTree<Key> errors = this->errorTree(continuousValues);
AlgebraicDecisionTree<Key> p = errors.apply([](double error) {
// NOTE: The 0.5 term is handled by each factor
@ -553,10 +540,9 @@ AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::discretePosterior(
}
/* ************************************************************************ */
GaussianFactorGraph HybridGaussianFactorGraph::choose(
const DiscreteValues &assignment) const {
GaussianFactorGraph HybridGaussianFactorGraph::choose(const DiscreteValues& assignment) const {
GaussianFactorGraph gfg;
for (auto &&f : *this) {
for (auto&& f : *this) {
if (auto gf = std::dynamic_pointer_cast<GaussianFactor>(f)) {
gfg.push_back(gf);
} else if (auto gc = std::dynamic_pointer_cast<GaussianConditional>(f)) {

View File

@ -24,66 +24,64 @@
namespace gtsam {
static GaussianFactorGraph add(const GaussianFactorGraph &graph1,
const GaussianFactorGraph &graph2) {
auto result = graph1;
result.push_back(graph2);
return result;
using Y = HybridGaussianProductFactor::Y;
static Y add(const Y& y1, const Y& y2) {
GaussianFactorGraph result = y1.first;
result.push_back(y2.first);
return {result, y1.second + y2.second};
};
HybridGaussianProductFactor operator+(const HybridGaussianProductFactor &a,
const HybridGaussianProductFactor &b) {
HybridGaussianProductFactor operator+(const HybridGaussianProductFactor& a,
const HybridGaussianProductFactor& b) {
return a.empty() ? b : HybridGaussianProductFactor(a.apply(b, add));
}
HybridGaussianProductFactor HybridGaussianProductFactor::operator+(
const HybridGaussianFactor &factor) const {
const HybridGaussianFactor& factor) const {
return *this + factor.asProductFactor();
}
HybridGaussianProductFactor HybridGaussianProductFactor::operator+(
const GaussianFactor::shared_ptr &factor) const {
const GaussianFactor::shared_ptr& factor) const {
return *this + HybridGaussianProductFactor(factor);
}
HybridGaussianProductFactor &HybridGaussianProductFactor::operator+=(
const GaussianFactor::shared_ptr &factor) {
HybridGaussianProductFactor& HybridGaussianProductFactor::operator+=(
const GaussianFactor::shared_ptr& factor) {
*this = *this + factor;
return *this;
}
HybridGaussianProductFactor &
HybridGaussianProductFactor::operator+=(const HybridGaussianFactor &factor) {
HybridGaussianProductFactor& HybridGaussianProductFactor::operator+=(
const HybridGaussianFactor& factor) {
*this = *this + factor;
return *this;
}
void HybridGaussianProductFactor::print(const std::string &s,
const KeyFormatter &formatter) const {
void HybridGaussianProductFactor::print(const std::string& s, const KeyFormatter& formatter) const {
KeySet keys;
auto printer = [&](const Y &graph) {
if (keys.size() == 0)
keys = graph.keys();
return "Graph of size " + std::to_string(graph.size());
auto printer = [&](const Y& y) {
if (keys.empty()) keys = y.first.keys();
return "Graph of size " + std::to_string(y.first.size()) +
", scalar sum: " + std::to_string(y.second);
};
Base::print(s, formatter, printer);
if (keys.size() > 0) {
if (!keys.empty()) {
std::stringstream ss;
ss << s << " Keys:";
for (auto &&key : keys)
ss << " " << formatter(key);
for (auto&& key : keys) ss << " " << formatter(key);
std::cout << ss.str() << "." << std::endl;
}
}
HybridGaussianProductFactor HybridGaussianProductFactor::removeEmpty() const {
auto emptyGaussian = [](const GaussianFactorGraph &graph) {
bool hasNull =
std::any_of(graph.begin(), graph.end(),
[](const GaussianFactor::shared_ptr &ptr) { return !ptr; });
return hasNull ? GaussianFactorGraph() : graph;
auto emptyGaussian = [](const Y& y) {
bool hasNull = std::any_of(
y.first.begin(), y.first.end(), [](const GaussianFactor::shared_ptr& ptr) { return !ptr; });
return hasNull ? Y{GaussianFactorGraph(), 0.0} : y;
};
return {Base(*this, emptyGaussian)};
}
} // namespace gtsam
} // namespace gtsam

View File

@ -26,10 +26,11 @@ namespace gtsam {
class HybridGaussianFactor;
/// Alias for DecisionTree of GaussianFactorGraphs
class HybridGaussianProductFactor : public DecisionTree<Key, GaussianFactorGraph> {
/// Alias for DecisionTree of GaussianFactorGraphs and their scalar sums
class HybridGaussianProductFactor
: public DecisionTree<Key, std::pair<GaussianFactorGraph, double>> {
public:
using Y = GaussianFactorGraph;
using Y = std::pair<GaussianFactorGraph, double>;
using Base = DecisionTree<Key, Y>;
/// @name Constructors
@ -44,7 +45,8 @@ class HybridGaussianProductFactor : public DecisionTree<Key, GaussianFactorGraph
* @param factor Shared pointer to the factor
*/
template <class FACTOR>
HybridGaussianProductFactor(const std::shared_ptr<FACTOR>& factor) : Base(Y{factor}) {}
HybridGaussianProductFactor(const std::shared_ptr<FACTOR>& factor)
: Base(Y{GaussianFactorGraph{factor}, 0.0}) {}
/**
* @brief Construct from DecisionTree
@ -88,7 +90,9 @@ class HybridGaussianProductFactor : public DecisionTree<Key, GaussianFactorGraph
* @return true if equal, false otherwise
*/
bool equals(const HybridGaussianProductFactor& other, double tol = 1e-9) const {
return Base::equals(other, [tol](const Y& a, const Y& b) { return a.equals(b, tol); });
return Base::equals(other, [tol](const Y& a, const Y& b) {
return a.first.equals(b.first, tol) && std::abs(a.second - b.second) < tol;
});
}
/// @}
@ -101,9 +105,9 @@ class HybridGaussianProductFactor : public DecisionTree<Key, GaussianFactorGraph
* @return A new HybridGaussianProductFactor with empty GaussianFactorGraphs removed
*
* If any GaussianFactorGraph in the decision tree contains a nullptr, convert
* that leaf to an empty GaussianFactorGraph. This is needed because the DecisionTree
* will otherwise create a GaussianFactorGraph with a single (null) factor,
* which doesn't register as null.
* that leaf to an empty GaussianFactorGraph with zero scalar sum. This is needed because the
* DecisionTree will otherwise create a GaussianFactorGraph with a single (null) factor, which
* doesn't register as null.
*/
HybridGaussianProductFactor removeEmpty() const;

View File

@ -46,6 +46,7 @@
#include "Switching.h"
#include "TinyHybridExample.h"
#include "gtsam/linear/GaussianFactorGraph.h"
using namespace std;
using namespace gtsam;
@ -73,8 +74,7 @@ TEST(HybridGaussianFactorGraph, Creation) {
HybridGaussianConditional gm(
m0,
{std::make_shared<GaussianConditional>(X(0), Z_3x1, I_3x3, X(1), I_3x3),
std::make_shared<GaussianConditional>(X(0), Vector3::Ones(), I_3x3, X(1),
I_3x3)});
std::make_shared<GaussianConditional>(X(0), Vector3::Ones(), I_3x3, X(1), I_3x3)});
hfg.add(gm);
EXPECT_LONGS_EQUAL(2, hfg.size());
@ -99,7 +99,7 @@ std::vector<GaussianFactor::shared_ptr> components(Key key) {
return {std::make_shared<JacobianFactor>(key, I_3x3, Z_3x1),
std::make_shared<JacobianFactor>(key, I_3x3, Vector3::Ones())};
}
} // namespace two
} // namespace two
/* ************************************************************************* */
TEST(HybridGaussianFactorGraph, hybridEliminationOneFactor) {
@ -239,16 +239,16 @@ TEST(HybridGaussianFactorGraph, Conditionals) {
Switching switching(4);
HybridGaussianFactorGraph hfg;
hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
Ordering ordering;
ordering.push_back(X(0));
HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering);
HybridGaussianFactorGraph hfg2;
hfg2.push_back(*bayes_net); // P(X0)
hfg2.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
hfg2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
hfg2.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
hfg2.push_back(*bayes_net); // P(X0)
hfg2.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
hfg2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
hfg2.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
ordering += X(1), X(2), M(0), M(1);
// Created product of first two factors and check eliminate:
@ -282,8 +282,7 @@ TEST(HybridGaussianFactorGraph, Conditionals) {
expected_continuous.insert<double>(X(1), 1);
expected_continuous.insert<double>(X(2), 2);
expected_continuous.insert<double>(X(3), 4);
Values result_continuous =
switching.linearizationPoint.retract(result.continuous());
Values result_continuous = switching.linearizationPoint.retract(result.continuous());
EXPECT(assert_equal(expected_continuous, result_continuous));
DiscreteValues expected_discrete;
@ -318,7 +317,7 @@ TEST(HybridGaussianFactorGraph, ErrorAndProbPrimeTree) {
// ϕ(x0) ϕ(x0,x1,m0) ϕ(x1,x2,m1) ϕ(x0;z0) ϕ(x1;z1) ϕ(x2;z2) ϕ(m0) ϕ(m0,m1)
Switching s(3);
const HybridGaussianFactorGraph &graph = s.linearizedFactorGraph;
const HybridGaussianFactorGraph& graph = s.linearizedFactorGraph;
const HybridBayesNet::shared_ptr hybridBayesNet = graph.eliminateSequential();
@ -376,19 +375,18 @@ TEST(HybridGaussianFactorGraph, IncrementalErrorTree) {
auto error_tree2 = graph.errorTree(delta.continuous());
// regression
leaves = {0.50985198, 0.0097577296, 0.50009425, 0,
0.52922138, 0.029127133, 0.50985105, 0.0097567964};
leaves = {
0.50985198, 0.0097577296, 0.50009425, 0, 0.52922138, 0.029127133, 0.50985105, 0.0097567964};
AlgebraicDecisionTree<Key> expected_error2(s.modes, leaves);
EXPECT(assert_equal(expected_error, error_tree, 1e-7));
}
/* ****************************************************************************/
// Check that assembleGraphTree assembles Gaussian factor graphs for each
// assignment.
// Check that collectProductFactor works correctly.
TEST(HybridGaussianFactorGraph, collectProductFactor) {
const int num_measurements = 1;
auto fg = tiny::createHybridGaussianFactorGraph(
num_measurements, VectorValues{{Z(0), Vector1(5.0)}});
VectorValues vv{{Z(0), Vector1(5.0)}};
auto fg = tiny::createHybridGaussianFactorGraph(num_measurements, vv);
EXPECT_LONGS_EQUAL(3, fg.size());
// Assemble graph tree:
@ -411,23 +409,26 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) {
DiscreteValues d0{{M(0), 0}}, d1{{M(0), 1}};
// Expected decision tree with two factor graphs:
// f(x0;mode=0)P(x0) and f(x0;mode=1)P(x0)
HybridGaussianProductFactor expected{
{M(0), GaussianFactorGraph(std::vector<GF>{(*hybrid)(d0), prior}),
GaussianFactorGraph(std::vector<GF>{(*hybrid)(d1), prior})}};
// f(x0;mode=0)P(x0)
GaussianFactorGraph expectedFG0{(*hybrid)(d0), prior};
EXPECT(assert_equal(expectedFG0, actual(d0).first, 1e-5));
EXPECT(assert_equal(0.0, actual(d0).second, 1e-5));
EXPECT(assert_equal(expected(d0), actual(d0), 1e-5));
EXPECT(assert_equal(expected(d1), actual(d1), 1e-5));
// f(x0;mode=1)P(x0)
GaussianFactorGraph expectedFG1{(*hybrid)(d1), prior};
EXPECT(assert_equal(expectedFG1, actual(d1).first, 1e-5));
EXPECT(assert_equal(1.79176, actual(d1).second, 1e-5));
}
/* ****************************************************************************/
// Check that the factor graph unnormalized probability is proportional to the
// Bayes net probability for the given measurements.
bool
ratioTest(const HybridBayesNet &bn, const VectorValues &measurements,
const HybridGaussianFactorGraph &fg, size_t num_samples = 100) {
auto compute_ratio = [&](HybridValues *sample) -> double {
sample->update(measurements); // update sample with given measurements:
// Bayes net probability for the given measurements.
bool ratioTest(const HybridBayesNet& bn,
const VectorValues& measurements,
const HybridGaussianFactorGraph& fg,
size_t num_samples = 100) {
auto compute_ratio = [&](HybridValues* sample) -> double {
sample->update(measurements); // update sample with given measurements:
return bn.evaluate(*sample) / fg.probPrime(*sample);
};
@ -437,8 +438,7 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) {
// Test ratios for a number of independent samples:
for (size_t i = 0; i < num_samples; i++) {
HybridValues sample = bn.sample(&kRng);
if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6)
return false;
if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false;
}
return true;
}
@ -446,10 +446,12 @@ TEST(HybridGaussianFactorGraph, collectProductFactor) {
/* ****************************************************************************/
// Check that the bayes net unnormalized probability is proportional to the
// Bayes net probability for the given measurements.
bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements,
const HybridBayesNet &posterior, size_t num_samples = 100) {
auto compute_ratio = [&](HybridValues *sample) -> double {
sample->update(measurements); // update sample with given measurements:
bool ratioTest(const HybridBayesNet& bn,
const VectorValues& measurements,
const HybridBayesNet& posterior,
size_t num_samples = 100) {
auto compute_ratio = [&](HybridValues* sample) -> double {
sample->update(measurements); // update sample with given measurements:
return bn.evaluate(*sample) / posterior.evaluate(*sample);
};
@ -461,8 +463,7 @@ bool ratioTest(const HybridBayesNet &bn, const VectorValues &measurements,
HybridValues sample = bn.sample(&kRng);
// GTSAM_PRINT(sample);
// std::cout << "ratio: " << compute_ratio(&sample) << std::endl;
if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6)
return false;
if (std::abs(expected_ratio - compute_ratio(&sample)) > 1e-6) return false;
}
return true;
}
@ -484,10 +485,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1) {
// Create hybrid Gaussian factor on X(0).
using tiny::mode;
// regression, but mean checked to be 5.0 in both cases:
const auto conditional0 = std::make_shared<GaussianConditional>(
X(0), Vector1(14.1421), I_1x1 * 2.82843),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.1379), I_1x1 * 2.02759);
const auto conditional0 =
std::make_shared<GaussianConditional>(X(0), Vector1(14.1421), I_1x1 * 2.82843),
conditional1 =
std::make_shared<GaussianConditional>(X(0), Vector1(10.1379), I_1x1 * 2.02759);
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
mode, std::vector{conditional0, conditional1});
@ -515,8 +516,7 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
bn.emplace_shared<HybridGaussianConditional>(m1, Z(0), I_1x1, X(0), parms);
// Create prior on X(0).
bn.push_back(
GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5));
bn.push_back(GaussianConditional::sharedMeanAndStddev(X(0), Vector1(5.0), 0.5));
// Add prior on m1.
bn.emplace_shared<DiscreteConditional>(m1, "1/1");
@ -534,10 +534,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny1Swapped) {
// Create hybrid Gaussian factor on X(0).
// regression, but mean checked to be 5.0 in both cases:
const auto conditional0 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.1379), I_1x1 * 2.02759),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(14.1421), I_1x1 * 2.82843);
const auto conditional0 =
std::make_shared<GaussianConditional>(X(0), Vector1(10.1379), I_1x1 * 2.02759),
conditional1 =
std::make_shared<GaussianConditional>(X(0), Vector1(14.1421), I_1x1 * 2.82843);
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
m1, std::vector{conditional0, conditional1});
@ -570,10 +570,10 @@ TEST(HybridGaussianFactorGraph, EliminateTiny2) {
// Create hybrid Gaussian factor on X(0).
using tiny::mode;
// regression, but mean checked to be 5.0 in both cases:
const auto conditional0 = std::make_shared<GaussianConditional>(
X(0), Vector1(17.3205), I_1x1 * 3.4641),
conditional1 = std::make_shared<GaussianConditional>(
X(0), Vector1(10.274), I_1x1 * 2.0548);
const auto conditional0 =
std::make_shared<GaussianConditional>(X(0), Vector1(17.3205), I_1x1 * 3.4641),
conditional1 =
std::make_shared<GaussianConditional>(X(0), Vector1(10.274), I_1x1 * 2.0548);
expectedBayesNet.emplace_shared<HybridGaussianConditional>(
mode, std::vector{conditional0, conditional1});
@ -617,27 +617,25 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
// NOTE: we add reverse topological so we can sample from the Bayes net.:
// Add measurements:
std::vector<std::pair<Vector, double>> measurementModels{{Z_1x1, 3},
{Z_1x1, 0.5}};
std::vector<std::pair<Vector, double>> measurementModels{{Z_1x1, 3}, {Z_1x1, 0.5}};
for (size_t t : {0, 1, 2}) {
// Create hybrid Gaussian factor on Z(t) conditioned on X(t) and mode N(t):
const auto noise_mode_t = DiscreteKey{N(t), 2};
bn.emplace_shared<HybridGaussianConditional>(noise_mode_t, Z(t), I_1x1,
X(t), measurementModels);
bn.emplace_shared<HybridGaussianConditional>(
noise_mode_t, Z(t), I_1x1, X(t), measurementModels);
// Create prior on discrete mode N(t):
bn.emplace_shared<DiscreteConditional>(noise_mode_t, "20/80");
}
// Add motion models. TODO(frank): why are they exactly the same?
std::vector<std::pair<Vector, double>> motionModels{{Z_1x1, 0.2},
{Z_1x1, 0.2}};
std::vector<std::pair<Vector, double>> motionModels{{Z_1x1, 0.2}, {Z_1x1, 0.2}};
for (size_t t : {2, 1}) {
// Create hybrid Gaussian factor on X(t) conditioned on X(t-1)
// and mode M(t-1):
const auto motion_model_t = DiscreteKey{M(t), 2};
bn.emplace_shared<HybridGaussianConditional>(motion_model_t, X(t), I_1x1,
X(t - 1), motionModels);
bn.emplace_shared<HybridGaussianConditional>(
motion_model_t, X(t), I_1x1, X(t - 1), motionModels);
// Create prior on motion model M(t):
bn.emplace_shared<DiscreteConditional>(motion_model_t, "40/60");
@ -650,8 +648,7 @@ TEST(HybridGaussianFactorGraph, EliminateSwitchingNetwork) {
EXPECT_LONGS_EQUAL(6, bn.sample().continuous().size());
// Create measurements consistent with moving right every time:
const VectorValues measurements{
{Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}};
const VectorValues measurements{{Z(0), Vector1(0.0)}, {Z(1), Vector1(1.0)}, {Z(2), Vector1(2.0)}};
const HybridGaussianFactorGraph fg = bn.toFactorGraph(measurements);
// Factor graph is:

View File

@ -16,11 +16,11 @@
* @date October 2024
*/
#include "gtsam/inference/Key.h"
#include <gtsam/base/Testable.h>
#include <gtsam/base/TestableAssertions.h>
#include <gtsam/hybrid/HybridGaussianFactor.h>
#include <gtsam/hybrid/HybridGaussianProductFactor.h>
#include <gtsam/inference/Key.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/JacobianFactor.h>
@ -39,29 +39,27 @@ using symbol_shorthand::X;
namespace examples {
static const DiscreteKey m1(M(1), 2), m2(M(2), 3);
auto A1 = Matrix::Zero(2, 1);
auto A2 = Matrix::Zero(2, 2);
auto b = Matrix::Zero(2, 1);
const auto A1 = Matrix::Zero(2, 1);
const auto A2 = Matrix::Zero(2, 2);
const auto b = Matrix::Zero(2, 1);
auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
const auto f10 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
const auto f11 = std::make_shared<JacobianFactor>(X(1), A1, X(2), A2, b);
const HybridGaussianFactor hybridFactorA(m1, {{f10, 10}, {f11, 11}});
auto A3 = Matrix::Zero(2, 3);
auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
const auto A3 = Matrix::Zero(2, 3);
const auto f20 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
const auto f21 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
const auto f22 = std::make_shared<JacobianFactor>(X(1), A1, X(3), A3, b);
HybridGaussianFactor hybridFactorA(m1, {f10, f11});
HybridGaussianFactor hybridFactorB(m2, {f20, f21, f22});
const HybridGaussianFactor hybridFactorB(m2, {{f20, 20}, {f21, 21}, {f22, 22}});
// Simulate a pruned hybrid factor, in this case m2==1 is nulled out.
HybridGaussianFactor prunedFactorB(m2, {f20, nullptr, f22});
} // namespace examples
const HybridGaussianFactor prunedFactorB(m2, {{f20, 20}, {nullptr, 1000}, {f22, 22}});
} // namespace examples
/* ************************************************************************* */
// Constructor
TEST(HybridGaussianProductFactor, Construct) {
HybridGaussianProductFactor product;
}
TEST(HybridGaussianProductFactor, Construct) { HybridGaussianProductFactor product; }
/* ************************************************************************* */
// Add two Gaussian factors and check only one leaf in tree
@ -80,9 +78,10 @@ TEST(HybridGaussianProductFactor, AddTwoGaussianFactors) {
auto leaf = product(Assignment<Key>());
// Check that the leaf contains both factors
EXPECT_LONGS_EQUAL(2, leaf.size());
EXPECT(leaf.at(0) == f10);
EXPECT(leaf.at(1) == f11);
EXPECT_LONGS_EQUAL(2, leaf.first.size());
EXPECT(leaf.first.at(0) == f10);
EXPECT(leaf.first.at(1) == f11);
EXPECT_DOUBLES_EQUAL(0, leaf.second, 1e-9);
}
/* ************************************************************************* */
@ -107,9 +106,10 @@ TEST(HybridGaussianProductFactor, AddTwoGaussianConditionals) {
auto leaf = product(Assignment<Key>());
// Check that the leaf contains both conditionals
EXPECT_LONGS_EQUAL(2, leaf.size());
EXPECT(leaf.at(0) == gc1);
EXPECT(leaf.at(1) == gc2);
EXPECT_LONGS_EQUAL(2, leaf.first.size());
EXPECT(leaf.first.at(0) == gc1);
EXPECT(leaf.first.at(1) == gc2);
EXPECT_DOUBLES_EQUAL(0, leaf.second, 1e-9);
}
/* ************************************************************************* */
@ -120,9 +120,12 @@ TEST(HybridGaussianProductFactor, AsProductFactor) {
// Let's check that this worked:
Assignment<Key> mode;
mode[m1.first] = 1;
mode[m1.first] = 0;
auto actual = product(mode);
EXPECT(actual.at(0) == f11);
EXPECT(actual.first.at(0) == f10);
EXPECT_DOUBLES_EQUAL(10, actual.second, 1e-9);
// TODO(Frank): when killed hiding, f11 should also be there
}
/* ************************************************************************* */
@ -134,9 +137,12 @@ TEST(HybridGaussianProductFactor, AddOne) {
// Let's check that this worked:
Assignment<Key> mode;
mode[m1.first] = 1;
mode[m1.first] = 0;
auto actual = product(mode);
EXPECT(actual.at(0) == f11);
EXPECT(actual.first.at(0) == f10);
EXPECT_DOUBLES_EQUAL(10, actual.second, 1e-9);
// TODO(Frank): when killed hiding, f11 should also be there
}
/* ************************************************************************* */
@ -152,12 +158,15 @@ TEST(HybridGaussianProductFactor, AddTwo) {
// Let's check that this worked:
auto actual00 = product({{M(1), 0}, {M(2), 0}});
EXPECT(actual00.at(0) == f10);
EXPECT(actual00.at(1) == f20);
EXPECT(actual00.first.at(0) == f10);
EXPECT(actual00.first.at(1) == f20);
EXPECT_DOUBLES_EQUAL(10 + 20, actual00.second, 1e-9);
auto actual12 = product({{M(1), 1}, {M(2), 2}});
EXPECT(actual12.at(0) == f11);
EXPECT(actual12.at(1) == f22);
// TODO(Frank): when killed hiding, these should also equal:
// EXPECT(actual12.first.at(0) == f11);
// EXPECT(actual12.first.at(1) == f22);
EXPECT_DOUBLES_EQUAL(11 + 22, actual12.second, 1e-9);
}
/* ************************************************************************* */