Merge branch 'develop' into hybrid/tests
commit
cf46c36862
|
@ -340,6 +340,10 @@ class Rot3 {
|
|||
gtsam::Point3 rotate(const gtsam::Point3& p) const;
|
||||
gtsam::Point3 unrotate(const gtsam::Point3& p) const;
|
||||
|
||||
// Group action on Unit3
|
||||
gtsam::Unit3 rotate(const gtsam::Unit3& p) const;
|
||||
gtsam::Unit3 unrotate(const gtsam::Unit3& p) const;
|
||||
|
||||
// Standard Interface
|
||||
static gtsam::Rot3 Expmap(Vector v);
|
||||
static Vector Logmap(const gtsam::Rot3& p);
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include <gtsam/base/utilities.h>
|
||||
#include <gtsam/discrete/DiscreteValues.h>
|
||||
#include <gtsam/hybrid/GaussianMixture.h>
|
||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
||||
#include <gtsam/inference/Conditional-inst.h>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
|
||||
|
@ -36,20 +37,17 @@ GaussianMixture::GaussianMixture(
|
|||
conditionals_(conditionals) {}
|
||||
|
||||
/* *******************************************************************************/
|
||||
const GaussianMixture::Conditionals &GaussianMixture::conditionals() {
|
||||
const GaussianMixture::Conditionals &GaussianMixture::conditionals() const {
|
||||
return conditionals_;
|
||||
}
|
||||
|
||||
/* *******************************************************************************/
|
||||
GaussianMixture GaussianMixture::FromConditionals(
|
||||
GaussianMixture::GaussianMixture(
|
||||
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
||||
const DiscreteKeys &discreteParents,
|
||||
const std::vector<GaussianConditional::shared_ptr> &conditionalsList) {
|
||||
Conditionals dt(discreteParents, conditionalsList);
|
||||
|
||||
return GaussianMixture(continuousFrontals, continuousParents, discreteParents,
|
||||
dt);
|
||||
}
|
||||
const std::vector<GaussianConditional::shared_ptr> &conditionalsList)
|
||||
: GaussianMixture(continuousFrontals, continuousParents, discreteParents,
|
||||
Conditionals(discreteParents, conditionalsList)) {}
|
||||
|
||||
/* *******************************************************************************/
|
||||
GaussianMixture::Sum GaussianMixture::add(
|
||||
|
@ -128,6 +126,36 @@ void GaussianMixture::print(const std::string &s,
|
|||
});
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
KeyVector GaussianMixture::continuousParents() const {
|
||||
// Get all parent keys:
|
||||
const auto range = parents();
|
||||
KeyVector continuousParentKeys(range.begin(), range.end());
|
||||
// Loop over all discrete keys:
|
||||
for (const auto &discreteKey : discreteKeys()) {
|
||||
const Key key = discreteKey.first;
|
||||
// remove that key from continuousParentKeys:
|
||||
continuousParentKeys.erase(std::remove(continuousParentKeys.begin(),
|
||||
continuousParentKeys.end(), key),
|
||||
continuousParentKeys.end());
|
||||
}
|
||||
return continuousParentKeys;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
boost::shared_ptr<GaussianMixtureFactor> GaussianMixture::likelihood(
|
||||
const VectorValues &frontals) const {
|
||||
// TODO(dellaert): check that values has all frontals
|
||||
const DiscreteKeys discreteParentKeys = discreteKeys();
|
||||
const KeyVector continuousParentKeys = continuousParents();
|
||||
const GaussianMixtureFactor::Factors likelihoods(
|
||||
conditionals(), [&](const GaussianConditional::shared_ptr &conditional) {
|
||||
return conditional->likelihood(frontals);
|
||||
});
|
||||
return boost::make_shared<GaussianMixtureFactor>(
|
||||
continuousParentKeys, discreteParentKeys, likelihoods);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &dkeys) {
|
||||
std::set<DiscreteKey> s;
|
||||
|
|
|
@ -29,6 +29,8 @@
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
class GaussianMixtureFactor;
|
||||
|
||||
/**
|
||||
* @brief A conditional of gaussian mixtures indexed by discrete variables, as
|
||||
* part of a Bayes Network. This is the result of the elimination of a
|
||||
|
@ -112,21 +114,11 @@ class GTSAM_EXPORT GaussianMixture
|
|||
* @param discreteParents Discrete parents variables
|
||||
* @param conditionals List of conditionals
|
||||
*/
|
||||
static This FromConditionals(
|
||||
GaussianMixture(
|
||||
const KeyVector &continuousFrontals, const KeyVector &continuousParents,
|
||||
const DiscreteKeys &discreteParents,
|
||||
const std::vector<GaussianConditional::shared_ptr> &conditionals);
|
||||
|
||||
/// @}
|
||||
/// @name Standard API
|
||||
/// @{
|
||||
|
||||
GaussianConditional::shared_ptr operator()(
|
||||
const DiscreteValues &discreteValues) const;
|
||||
|
||||
/// Returns the total number of continuous components
|
||||
size_t nrComponents() const;
|
||||
|
||||
/// @}
|
||||
/// @name Testable
|
||||
/// @{
|
||||
|
@ -140,9 +132,25 @@ class GTSAM_EXPORT GaussianMixture
|
|||
const KeyFormatter &formatter = DefaultKeyFormatter) const override;
|
||||
|
||||
/// @}
|
||||
/// @name Standard API
|
||||
/// @{
|
||||
|
||||
GaussianConditional::shared_ptr operator()(
|
||||
const DiscreteValues &discreteValues) const;
|
||||
|
||||
/// Returns the total number of continuous components
|
||||
size_t nrComponents() const;
|
||||
|
||||
/// Returns the continuous keys among the parents.
|
||||
KeyVector continuousParents() const;
|
||||
|
||||
// Create a likelihood factor for a Gaussian mixture, return a Mixture factor
|
||||
// on the parents.
|
||||
boost::shared_ptr<GaussianMixtureFactor> likelihood(
|
||||
const VectorValues &frontals) const;
|
||||
|
||||
/// Getter for the underlying Conditionals DecisionTree
|
||||
const Conditionals &conditionals();
|
||||
const Conditionals &conditionals() const;
|
||||
|
||||
/**
|
||||
* @brief Compute error of the GaussianMixture as a tree.
|
||||
|
@ -181,6 +189,7 @@ class GTSAM_EXPORT GaussianMixture
|
|||
* @return Sum
|
||||
*/
|
||||
Sum add(const Sum &sum) const;
|
||||
/// @}
|
||||
};
|
||||
|
||||
/// Return the DiscreteKey vector as a set.
|
||||
|
|
|
@ -35,16 +35,19 @@ GaussianMixtureFactor::GaussianMixtureFactor(const KeyVector &continuousKeys,
|
|||
/* *******************************************************************************/
|
||||
bool GaussianMixtureFactor::equals(const HybridFactor &lf, double tol) const {
|
||||
const This *e = dynamic_cast<const This *>(&lf);
|
||||
return e != nullptr && Base::equals(*e, tol);
|
||||
}
|
||||
if (e == nullptr) return false;
|
||||
|
||||
/* *******************************************************************************/
|
||||
GaussianMixtureFactor GaussianMixtureFactor::FromFactors(
|
||||
const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys,
|
||||
const std::vector<GaussianFactor::shared_ptr> &factors) {
|
||||
Factors dt(discreteKeys, factors);
|
||||
// This will return false if either factors_ is empty or e->factors_ is empty,
|
||||
// but not if both are empty or both are not empty:
|
||||
if (factors_.empty() ^ e->factors_.empty()) return false;
|
||||
|
||||
return GaussianMixtureFactor(continuousKeys, discreteKeys, dt);
|
||||
// Check the base and the factors:
|
||||
return Base::equals(*e, tol) &&
|
||||
factors_.equals(e->factors_,
|
||||
[tol](const GaussianFactor::shared_ptr &f1,
|
||||
const GaussianFactor::shared_ptr &f2) {
|
||||
return f1->equals(*f2, tol);
|
||||
});
|
||||
}
|
||||
|
||||
/* *******************************************************************************/
|
||||
|
@ -52,18 +55,22 @@ void GaussianMixtureFactor::print(const std::string &s,
|
|||
const KeyFormatter &formatter) const {
|
||||
HybridFactor::print(s, formatter);
|
||||
std::cout << "{\n";
|
||||
factors_.print(
|
||||
"", [&](Key k) { return formatter(k); },
|
||||
[&](const GaussianFactor::shared_ptr &gf) -> std::string {
|
||||
RedirectCout rd;
|
||||
std::cout << ":\n";
|
||||
if (gf && !gf->empty()) {
|
||||
gf->print("", formatter);
|
||||
return rd.str();
|
||||
} else {
|
||||
return "nullptr";
|
||||
}
|
||||
});
|
||||
if (factors_.empty()) {
|
||||
std::cout << " empty" << std::endl;
|
||||
} else {
|
||||
factors_.print(
|
||||
"", [&](Key k) { return formatter(k); },
|
||||
[&](const GaussianFactor::shared_ptr &gf) -> std::string {
|
||||
RedirectCout rd;
|
||||
std::cout << ":\n";
|
||||
if (gf && !gf->empty()) {
|
||||
gf->print("", formatter);
|
||||
return rd.str();
|
||||
} else {
|
||||
return "nullptr";
|
||||
}
|
||||
});
|
||||
}
|
||||
std::cout << "}" << std::endl;
|
||||
}
|
||||
|
||||
|
|
|
@ -93,19 +93,16 @@ class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
|
|||
* @brief Construct a new GaussianMixtureFactor object using a vector of
|
||||
* GaussianFactor shared pointers.
|
||||
*
|
||||
* @param keys Vector of keys for continuous factors.
|
||||
* @param continuousKeys Vector of keys for continuous factors.
|
||||
* @param discreteKeys Vector of discrete keys.
|
||||
* @param factors Vector of gaussian factor shared pointers.
|
||||
*/
|
||||
GaussianMixtureFactor(const KeyVector &keys, const DiscreteKeys &discreteKeys,
|
||||
GaussianMixtureFactor(const KeyVector &continuousKeys,
|
||||
const DiscreteKeys &discreteKeys,
|
||||
const std::vector<GaussianFactor::shared_ptr> &factors)
|
||||
: GaussianMixtureFactor(keys, discreteKeys,
|
||||
: GaussianMixtureFactor(continuousKeys, discreteKeys,
|
||||
Factors(discreteKeys, factors)) {}
|
||||
|
||||
static This FromFactors(
|
||||
const KeyVector &continuousKeys, const DiscreteKeys &discreteKeys,
|
||||
const std::vector<GaussianFactor::shared_ptr> &factors);
|
||||
|
||||
/// @}
|
||||
/// @name Testable
|
||||
/// @{
|
||||
|
|
|
@ -36,7 +36,7 @@ DecisionTreeFactor::shared_ptr HybridBayesNet::discreteConditionals() const {
|
|||
for (auto &&conditional : *this) {
|
||||
if (conditional->isDiscrete()) {
|
||||
// Convert to a DecisionTreeFactor and add it to the main factor.
|
||||
DecisionTreeFactor f(*conditional->asDiscreteConditional());
|
||||
DecisionTreeFactor f(*conditional->asDiscrete());
|
||||
dtFactor = dtFactor * f;
|
||||
}
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ void HybridBayesNet::updateDiscreteConditionals(
|
|||
HybridConditional::shared_ptr conditional = this->at(i);
|
||||
if (conditional->isDiscrete()) {
|
||||
// std::cout << demangle(typeid(conditional).name()) << std::endl;
|
||||
auto discrete = conditional->asDiscreteConditional();
|
||||
auto discrete = conditional->asDiscrete();
|
||||
KeyVector frontals(discrete->frontals().begin(),
|
||||
discrete->frontals().end());
|
||||
|
||||
|
@ -152,13 +152,10 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
|
|||
// Go through all the conditionals in the
|
||||
// Bayes Net and prune them as per decisionTree.
|
||||
for (auto &&conditional : *this) {
|
||||
if (conditional->isHybrid()) {
|
||||
GaussianMixture::shared_ptr gaussianMixture = conditional->asMixture();
|
||||
|
||||
if (auto gm = conditional->asMixture()) {
|
||||
// Make a copy of the Gaussian mixture and prune it!
|
||||
auto prunedGaussianMixture =
|
||||
boost::make_shared<GaussianMixture>(*gaussianMixture);
|
||||
prunedGaussianMixture->prune(*decisionTree);
|
||||
auto prunedGaussianMixture = boost::make_shared<GaussianMixture>(*gm);
|
||||
prunedGaussianMixture->prune(*decisionTree); // imperative :-(
|
||||
|
||||
// Type-erase and add to the pruned Bayes Net fragment.
|
||||
prunedBayesNetFragment.push_back(
|
||||
|
@ -185,7 +182,7 @@ GaussianConditional::shared_ptr HybridBayesNet::atGaussian(size_t i) const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
DiscreteConditional::shared_ptr HybridBayesNet::atDiscrete(size_t i) const {
|
||||
return at(i)->asDiscreteConditional();
|
||||
return at(i)->asDiscrete();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -193,16 +190,13 @@ GaussianBayesNet HybridBayesNet::choose(
|
|||
const DiscreteValues &assignment) const {
|
||||
GaussianBayesNet gbn;
|
||||
for (auto &&conditional : *this) {
|
||||
if (conditional->isHybrid()) {
|
||||
if (auto gm = conditional->asMixture()) {
|
||||
// If conditional is hybrid, select based on assignment.
|
||||
GaussianMixture gm = *conditional->asMixture();
|
||||
gbn.push_back(gm(assignment));
|
||||
|
||||
} else if (conditional->isContinuous()) {
|
||||
gbn.push_back((*gm)(assignment));
|
||||
} else if (auto gc = conditional->asGaussian()) {
|
||||
// If continuous only, add Gaussian conditional.
|
||||
gbn.push_back((conditional->asGaussian()));
|
||||
|
||||
} else if (conditional->isDiscrete()) {
|
||||
gbn.push_back(gc);
|
||||
} else if (auto dc = conditional->asDiscrete()) {
|
||||
// If conditional is discrete-only, we simply continue.
|
||||
continue;
|
||||
}
|
||||
|
@ -217,20 +211,20 @@ HybridValues HybridBayesNet::optimize() const {
|
|||
DiscreteBayesNet discrete_bn;
|
||||
for (auto &&conditional : *this) {
|
||||
if (conditional->isDiscrete()) {
|
||||
discrete_bn.push_back(conditional->asDiscreteConditional());
|
||||
discrete_bn.push_back(conditional->asDiscrete());
|
||||
}
|
||||
}
|
||||
|
||||
DiscreteValues mpe = DiscreteFactorGraph(discrete_bn).optimize();
|
||||
|
||||
// Given the MPE, compute the optimal continuous values.
|
||||
GaussianBayesNet gbn = this->choose(mpe);
|
||||
return HybridValues(mpe, gbn.optimize());
|
||||
GaussianBayesNet gbn = choose(mpe);
|
||||
return HybridValues(gbn.optimize(), mpe);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorValues HybridBayesNet::optimize(const DiscreteValues &assignment) const {
|
||||
GaussianBayesNet gbn = this->choose(assignment);
|
||||
GaussianBayesNet gbn = choose(assignment);
|
||||
|
||||
// Check if there exists a nullptr in the GaussianBayesNet
|
||||
// If yes, return an empty VectorValues
|
||||
|
@ -240,6 +234,30 @@ VectorValues HybridBayesNet::optimize(const DiscreteValues &assignment) const {
|
|||
return gbn.optimize();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double HybridBayesNet::evaluate(const HybridValues &values) const {
|
||||
const DiscreteValues &discreteValues = values.discrete();
|
||||
const VectorValues &continuousValues = values.continuous();
|
||||
|
||||
double logDensity = 0.0, probability = 1.0;
|
||||
|
||||
// Iterate over each conditional.
|
||||
for (auto &&conditional : *this) {
|
||||
if (auto gm = conditional->asMixture()) {
|
||||
const auto component = (*gm)(discreteValues);
|
||||
logDensity += component->logDensity(continuousValues);
|
||||
} else if (auto gc = conditional->asGaussian()) {
|
||||
// If continuous only, evaluate the probability and multiply.
|
||||
logDensity += gc->logDensity(continuousValues);
|
||||
} else if (auto dc = conditional->asDiscrete()) {
|
||||
// Conditional is discrete-only, so return its probability.
|
||||
probability *= dc->operator()(discreteValues);
|
||||
}
|
||||
}
|
||||
|
||||
return probability * exp(logDensity);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
HybridValues HybridBayesNet::sample(const HybridValues &given,
|
||||
std::mt19937_64 *rng) const {
|
||||
|
@ -247,7 +265,7 @@ HybridValues HybridBayesNet::sample(const HybridValues &given,
|
|||
for (auto &&conditional : *this) {
|
||||
if (conditional->isDiscrete()) {
|
||||
// If conditional is discrete-only, we add to the discrete Bayes net.
|
||||
dbn.push_back(conditional->asDiscreteConditional());
|
||||
dbn.push_back(conditional->asDiscrete());
|
||||
}
|
||||
}
|
||||
// Sample a discrete assignment.
|
||||
|
@ -256,7 +274,7 @@ HybridValues HybridBayesNet::sample(const HybridValues &given,
|
|||
GaussianBayesNet gbn = choose(assignment);
|
||||
// Sample from the Gaussian Bayes net.
|
||||
VectorValues sample = gbn.sample(given.continuous(), rng);
|
||||
return {assignment, sample};
|
||||
return {sample, assignment};
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -278,7 +296,7 @@ HybridValues HybridBayesNet::sample() const {
|
|||
/* ************************************************************************* */
|
||||
double HybridBayesNet::error(const VectorValues &continuousValues,
|
||||
const DiscreteValues &discreteValues) const {
|
||||
GaussianBayesNet gbn = this->choose(discreteValues);
|
||||
GaussianBayesNet gbn = choose(discreteValues);
|
||||
return gbn.error(continuousValues);
|
||||
}
|
||||
|
||||
|
@ -289,23 +307,20 @@ AlgebraicDecisionTree<Key> HybridBayesNet::error(
|
|||
|
||||
// Iterate over each conditional.
|
||||
for (auto &&conditional : *this) {
|
||||
if (conditional->isHybrid()) {
|
||||
if (auto gm = conditional->asMixture()) {
|
||||
// If conditional is hybrid, select based on assignment and compute error.
|
||||
GaussianMixture::shared_ptr gm = conditional->asMixture();
|
||||
AlgebraicDecisionTree<Key> conditional_error =
|
||||
gm->error(continuousValues);
|
||||
|
||||
error_tree = error_tree + conditional_error;
|
||||
|
||||
} else if (conditional->isContinuous()) {
|
||||
} else if (auto gc = conditional->asGaussian()) {
|
||||
// If continuous only, get the (double) error
|
||||
// and add it to the error_tree
|
||||
double error = conditional->asGaussian()->error(continuousValues);
|
||||
double error = gc->error(continuousValues);
|
||||
// Add the computed error to every leaf of the error tree.
|
||||
error_tree = error_tree.apply(
|
||||
[error](double leaf_value) { return leaf_value + error; });
|
||||
|
||||
} else if (conditional->isDiscrete()) {
|
||||
} else if (auto dc = conditional->asDiscrete()) {
|
||||
// Conditional is discrete-only, we skip.
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -69,10 +69,40 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
|
|||
/// Add HybridConditional to Bayes Net
|
||||
using Base::add;
|
||||
|
||||
/// Add a Gaussian Mixture to the Bayes Net.
|
||||
void addMixture(const GaussianMixture::shared_ptr &ptr) {
|
||||
push_back(HybridConditional(ptr));
|
||||
}
|
||||
|
||||
/// Add a Gaussian conditional to the Bayes Net.
|
||||
void addGaussian(const GaussianConditional::shared_ptr &ptr) {
|
||||
push_back(HybridConditional(ptr));
|
||||
}
|
||||
|
||||
/// Add a discrete conditional to the Bayes Net.
|
||||
void add(const DiscreteKey &key, const std::string &table) {
|
||||
push_back(
|
||||
HybridConditional(boost::make_shared<DiscreteConditional>(key, table)));
|
||||
void addDiscrete(const DiscreteConditional::shared_ptr &ptr) {
|
||||
push_back(HybridConditional(ptr));
|
||||
}
|
||||
|
||||
/// Add a Gaussian Mixture to the Bayes Net.
|
||||
template <typename... T>
|
||||
void emplaceMixture(T &&...args) {
|
||||
push_back(HybridConditional(
|
||||
boost::make_shared<GaussianMixture>(std::forward<T>(args)...)));
|
||||
}
|
||||
|
||||
/// Add a Gaussian conditional to the Bayes Net.
|
||||
template <typename... T>
|
||||
void emplaceGaussian(T &&...args) {
|
||||
push_back(HybridConditional(
|
||||
boost::make_shared<GaussianConditional>(std::forward<T>(args)...)));
|
||||
}
|
||||
|
||||
/// Add a discrete conditional to the Bayes Net.
|
||||
template <typename... T>
|
||||
void emplaceDiscrete(T &&...args) {
|
||||
push_back(HybridConditional(
|
||||
boost::make_shared<DiscreteConditional>(std::forward<T>(args)...)));
|
||||
}
|
||||
|
||||
using Base::push_back;
|
||||
|
@ -95,6 +125,14 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
|
|||
*/
|
||||
GaussianBayesNet choose(const DiscreteValues &assignment) const;
|
||||
|
||||
/// Evaluate hybrid probability density for given HybridValues.
|
||||
double evaluate(const HybridValues &values) const;
|
||||
|
||||
/// Evaluate hybrid probability density for given HybridValues, sugar.
|
||||
double operator()(const HybridValues &values) const {
|
||||
return evaluate(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Solve the HybridBayesNet by first computing the MPE of all the
|
||||
* discrete variables and then optimizing the continuous variables based on
|
||||
|
|
|
@ -49,7 +49,7 @@ HybridValues HybridBayesTree::optimize() const {
|
|||
|
||||
// The root should be discrete only, we compute the MPE
|
||||
if (root_conditional->isDiscrete()) {
|
||||
dbn.push_back(root_conditional->asDiscreteConditional());
|
||||
dbn.push_back(root_conditional->asDiscrete());
|
||||
mpe = DiscreteFactorGraph(dbn).optimize();
|
||||
} else {
|
||||
throw std::runtime_error(
|
||||
|
@ -58,7 +58,7 @@ HybridValues HybridBayesTree::optimize() const {
|
|||
}
|
||||
|
||||
VectorValues values = optimize(mpe);
|
||||
return HybridValues(mpe, values);
|
||||
return HybridValues(values, mpe);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -162,7 +162,7 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const {
|
|||
/* ************************************************************************* */
|
||||
void HybridBayesTree::prune(const size_t maxNrLeaves) {
|
||||
auto decisionTree =
|
||||
this->roots_.at(0)->conditional()->asDiscreteConditional();
|
||||
this->roots_.at(0)->conditional()->asDiscrete();
|
||||
|
||||
DecisionTreeFactor prunedDecisionTree = decisionTree->prune(maxNrLeaves);
|
||||
decisionTree->root_ = prunedDecisionTree.root_;
|
||||
|
|
|
@ -131,34 +131,29 @@ class GTSAM_EXPORT HybridConditional
|
|||
|
||||
/**
|
||||
* @brief Return HybridConditional as a GaussianMixture
|
||||
*
|
||||
* @return GaussianMixture::shared_ptr
|
||||
* @return nullptr if not a mixture
|
||||
* @return GaussianMixture::shared_ptr otherwise
|
||||
*/
|
||||
GaussianMixture::shared_ptr asMixture() {
|
||||
if (!isHybrid()) throw std::invalid_argument("Not a mixture");
|
||||
return boost::static_pointer_cast<GaussianMixture>(inner_);
|
||||
return boost::dynamic_pointer_cast<GaussianMixture>(inner_);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Return HybridConditional as a GaussianConditional
|
||||
*
|
||||
* @return GaussianConditional::shared_ptr
|
||||
* @return nullptr if not a GaussianConditional
|
||||
* @return GaussianConditional::shared_ptr otherwise
|
||||
*/
|
||||
GaussianConditional::shared_ptr asGaussian() {
|
||||
if (!isContinuous())
|
||||
throw std::invalid_argument("Not a continuous conditional");
|
||||
return boost::static_pointer_cast<GaussianConditional>(inner_);
|
||||
return boost::dynamic_pointer_cast<GaussianConditional>(inner_);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Return conditional as a DiscreteConditional
|
||||
*
|
||||
* @return nullptr if not a DiscreteConditional
|
||||
* @return DiscreteConditional::shared_ptr
|
||||
*/
|
||||
DiscreteConditional::shared_ptr asDiscreteConditional() {
|
||||
if (!isDiscrete())
|
||||
throw std::invalid_argument("Not a discrete conditional");
|
||||
return boost::static_pointer_cast<DiscreteConditional>(inner_);
|
||||
DiscreteConditional::shared_ptr asDiscrete() {
|
||||
return boost::dynamic_pointer_cast<DiscreteConditional>(inner_);
|
||||
}
|
||||
|
||||
/// @}
|
||||
|
|
|
@ -524,6 +524,7 @@ double HybridGaussianFactorGraph::probPrime(
|
|||
const VectorValues &continuousValues,
|
||||
const DiscreteValues &discreteValues) const {
|
||||
double error = this->error(continuousValues, discreteValues);
|
||||
// NOTE: The 0.5 term is handled by each factor
|
||||
return std::exp(-error);
|
||||
}
|
||||
|
||||
|
@ -531,8 +532,10 @@ double HybridGaussianFactorGraph::probPrime(
|
|||
AlgebraicDecisionTree<Key> HybridGaussianFactorGraph::probPrime(
|
||||
const VectorValues &continuousValues) const {
|
||||
AlgebraicDecisionTree<Key> error_tree = this->error(continuousValues);
|
||||
AlgebraicDecisionTree<Key> prob_tree =
|
||||
error_tree.apply([](double error) { return exp(-error); });
|
||||
AlgebraicDecisionTree<Key> prob_tree = error_tree.apply([](double error) {
|
||||
// NOTE: The 0.5 term is handled by each factor
|
||||
return exp(-error);
|
||||
});
|
||||
return prob_tree;
|
||||
}
|
||||
|
||||
|
|
|
@ -219,94 +219,6 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
|
|||
double probPrime(const VectorValues& continuousValues,
|
||||
const DiscreteValues& discreteValues) const;
|
||||
|
||||
/**
|
||||
* @brief Helper method to compute the VectorValues solution for the
|
||||
* continuous variables for each discrete mode.
|
||||
* Used as a helper to compute q(\mu | M, Z) which is used by
|
||||
* both P(X | M, Z) and P(M | Z).
|
||||
*
|
||||
* @tparam BAYES Template on the type of Bayes graph, either a bayes net or a
|
||||
* bayes tree.
|
||||
* @param discrete_keys The discrete keys which form all the modes.
|
||||
* @param continuousBayesNet The Bayes Net/Tree representing the continuous
|
||||
* eliminated variables.
|
||||
* @param assignments List of all discrete assignments to create the final
|
||||
* decision tree.
|
||||
* @return DecisionTree<Key, VectorValues::shared_ptr>
|
||||
*/
|
||||
template <typename BAYES>
|
||||
DecisionTree<Key, VectorValues::shared_ptr> continuousDelta(
|
||||
const DiscreteKeys& discrete_keys,
|
||||
const boost::shared_ptr<BAYES>& continuousBayesNet,
|
||||
const std::vector<DiscreteValues>& assignments) const {
|
||||
// Create a decision tree of all the different VectorValues
|
||||
std::vector<VectorValues::shared_ptr> vector_values;
|
||||
for (const DiscreteValues& assignment : assignments) {
|
||||
VectorValues values = continuousBayesNet->optimize(assignment);
|
||||
vector_values.push_back(boost::make_shared<VectorValues>(values));
|
||||
}
|
||||
DecisionTree<Key, VectorValues::shared_ptr> delta_tree(discrete_keys,
|
||||
vector_values);
|
||||
|
||||
return delta_tree;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Compute the unnormalized probabilities of the continuous variables
|
||||
* for each of the modes.
|
||||
*
|
||||
* @tparam BAYES Template on the type of Bayes graph, either a bayes net or a
|
||||
* bayes tree.
|
||||
* @param discrete_keys The discrete keys which form all the modes.
|
||||
* @param continuousBayesNet The Bayes Net representing the continuous
|
||||
* eliminated variables.
|
||||
* @return AlgebraicDecisionTree<Key>
|
||||
*/
|
||||
template <typename BAYES>
|
||||
AlgebraicDecisionTree<Key> continuousProbPrimes(
|
||||
const DiscreteKeys& discrete_keys,
|
||||
const boost::shared_ptr<BAYES>& continuousBayesNet) const {
|
||||
// Generate all possible assignments.
|
||||
const std::vector<DiscreteValues> assignments =
|
||||
DiscreteValues::CartesianProduct(discrete_keys);
|
||||
|
||||
// Save a copy of the original discrete key ordering
|
||||
DiscreteKeys reversed_discrete_keys(discrete_keys);
|
||||
// Reverse discrete keys order for correct tree construction
|
||||
std::reverse(reversed_discrete_keys.begin(), reversed_discrete_keys.end());
|
||||
|
||||
// Create a decision tree of all the different VectorValues
|
||||
DecisionTree<Key, VectorValues::shared_ptr> delta_tree =
|
||||
this->continuousDelta(reversed_discrete_keys, continuousBayesNet,
|
||||
assignments);
|
||||
|
||||
// Get the probPrime tree with the correct leaf probabilities
|
||||
std::vector<double> probPrimes;
|
||||
for (const DiscreteValues& assignment : assignments) {
|
||||
VectorValues delta = *delta_tree(assignment);
|
||||
|
||||
// If VectorValues is empty, it means this is a pruned branch.
|
||||
// Set thr probPrime to 0.0.
|
||||
if (delta.size() == 0) {
|
||||
probPrimes.push_back(0.0);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Compute the error given the delta and the assignment.
|
||||
double error = this->error(delta, assignment);
|
||||
probPrimes.push_back(exp(-error));
|
||||
}
|
||||
|
||||
AlgebraicDecisionTree<Key> probPrimeTree(reversed_discrete_keys,
|
||||
probPrimes);
|
||||
return probPrimeTree;
|
||||
}
|
||||
|
||||
std::pair<Ordering, Ordering> separateContinuousDiscreteOrdering(
|
||||
const Ordering& ordering) const;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @brief Return a Colamd constrained ordering where the discrete keys are
|
||||
* eliminated after the continuous keys.
|
||||
|
|
|
@ -37,12 +37,12 @@ namespace gtsam {
|
|||
*/
|
||||
class GTSAM_EXPORT HybridValues {
|
||||
private:
|
||||
// DiscreteValue stored the discrete components of the HybridValues.
|
||||
DiscreteValues discrete_;
|
||||
|
||||
// VectorValue stored the continuous components of the HybridValues.
|
||||
VectorValues continuous_;
|
||||
|
||||
// DiscreteValue stored the discrete components of the HybridValues.
|
||||
DiscreteValues discrete_;
|
||||
|
||||
public:
|
||||
/// @name Standard Constructors
|
||||
/// @{
|
||||
|
@ -51,8 +51,8 @@ class GTSAM_EXPORT HybridValues {
|
|||
HybridValues() = default;
|
||||
|
||||
/// Construct from DiscreteValues and VectorValues.
|
||||
HybridValues(const DiscreteValues& dv, const VectorValues& cv)
|
||||
: discrete_(dv), continuous_(cv){};
|
||||
HybridValues(const VectorValues& cv, const DiscreteValues& dv)
|
||||
: continuous_(cv), discrete_(dv){};
|
||||
|
||||
/// @}
|
||||
/// @name Testable
|
||||
|
@ -62,15 +62,15 @@ class GTSAM_EXPORT HybridValues {
|
|||
void print(const std::string& s = "HybridValues",
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
|
||||
std::cout << s << ": \n";
|
||||
discrete_.print(" Discrete", keyFormatter); // print discrete components
|
||||
continuous_.print(" Continuous",
|
||||
keyFormatter); // print continuous components
|
||||
keyFormatter); // print continuous components
|
||||
discrete_.print(" Discrete", keyFormatter); // print discrete components
|
||||
};
|
||||
|
||||
/// equals required by Testable for unit testing
|
||||
bool equals(const HybridValues& other, double tol = 1e-9) const {
|
||||
return discrete_.equals(other.discrete_, tol) &&
|
||||
continuous_.equals(other.continuous_, tol);
|
||||
return continuous_.equals(other.continuous_, tol) &&
|
||||
discrete_.equals(other.discrete_, tol);
|
||||
}
|
||||
|
||||
/// @}
|
||||
|
@ -78,10 +78,10 @@ class GTSAM_EXPORT HybridValues {
|
|||
/// @{
|
||||
|
||||
/// Return the discrete MPE assignment
|
||||
DiscreteValues discrete() const { return discrete_; }
|
||||
const DiscreteValues& discrete() const { return discrete_; }
|
||||
|
||||
/// Return the delta update for the continuous vectors
|
||||
VectorValues continuous() const { return continuous_; }
|
||||
const VectorValues& continuous() const { return continuous_; }
|
||||
|
||||
/// Check whether a variable with key \c j exists in DiscreteValue.
|
||||
bool existsDiscrete(Key j) { return (discrete_.find(j) != discrete_.end()); };
|
||||
|
@ -96,7 +96,7 @@ class GTSAM_EXPORT HybridValues {
|
|||
* the key \c j is already used.
|
||||
* @param value The vector to be inserted.
|
||||
* @param j The index with which the value will be associated. */
|
||||
void insert(Key j, int value) { discrete_[j] = value; };
|
||||
void insert(Key j, size_t value) { discrete_[j] = value; };
|
||||
|
||||
/** Insert a vector \c value with key \c j. Throws an invalid_argument
|
||||
* exception if the key \c j is already used.
|
||||
|
@ -130,8 +130,8 @@ class GTSAM_EXPORT HybridValues {
|
|||
std::string html(
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const {
|
||||
std::stringstream ss;
|
||||
ss << this->discrete_.html(keyFormatter);
|
||||
ss << this->continuous_.html(keyFormatter);
|
||||
ss << this->discrete_.html(keyFormatter);
|
||||
return ss.str();
|
||||
};
|
||||
|
||||
|
|
|
@ -6,10 +6,11 @@ namespace gtsam {
|
|||
|
||||
#include <gtsam/hybrid/HybridValues.h>
|
||||
class HybridValues {
|
||||
gtsam::DiscreteValues discrete() const;
|
||||
gtsam::VectorValues continuous() const;
|
||||
gtsam::DiscreteValues discrete() const;
|
||||
|
||||
HybridValues();
|
||||
HybridValues(const gtsam::DiscreteValues &dv, const gtsam::VectorValues &cv);
|
||||
HybridValues(const gtsam::VectorValues &cv, const gtsam::DiscreteValues &dv);
|
||||
void print(string s = "HybridValues",
|
||||
const gtsam::KeyFormatter& keyFormatter =
|
||||
gtsam::DefaultKeyFormatter) const;
|
||||
|
@ -54,7 +55,7 @@ virtual class HybridDiscreteFactor {
|
|||
|
||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
||||
class GaussianMixtureFactor : gtsam::HybridFactor {
|
||||
static GaussianMixtureFactor FromFactors(
|
||||
GaussianMixtureFactor(
|
||||
const gtsam::KeyVector& continuousKeys,
|
||||
const gtsam::DiscreteKeys& discreteKeys,
|
||||
const std::vector<gtsam::GaussianFactor::shared_ptr>& factorsList);
|
||||
|
@ -66,12 +67,13 @@ class GaussianMixtureFactor : gtsam::HybridFactor {
|
|||
|
||||
#include <gtsam/hybrid/GaussianMixture.h>
|
||||
class GaussianMixture : gtsam::HybridFactor {
|
||||
static GaussianMixture FromConditionals(
|
||||
const gtsam::KeyVector& continuousFrontals,
|
||||
const gtsam::KeyVector& continuousParents,
|
||||
const gtsam::DiscreteKeys& discreteParents,
|
||||
const std::vector<gtsam::GaussianConditional::shared_ptr>&
|
||||
conditionalsList);
|
||||
GaussianMixture(const gtsam::KeyVector& continuousFrontals,
|
||||
const gtsam::KeyVector& continuousParents,
|
||||
const gtsam::DiscreteKeys& discreteParents,
|
||||
const std::vector<gtsam::GaussianConditional::shared_ptr>&
|
||||
conditionalsList);
|
||||
|
||||
gtsam::GaussianMixtureFactor* likelihood(const gtsam::VectorValues &frontals) const;
|
||||
|
||||
void print(string s = "GaussianMixture\n",
|
||||
const gtsam::KeyFormatter& keyFormatter =
|
||||
|
@ -87,7 +89,6 @@ class HybridBayesTreeClique {
|
|||
// double evaluate(const gtsam::HybridValues& values) const;
|
||||
};
|
||||
|
||||
#include <gtsam/hybrid/HybridBayesTree.h>
|
||||
class HybridBayesTree {
|
||||
HybridBayesTree();
|
||||
void print(string s = "HybridBayesTree\n",
|
||||
|
@ -105,14 +106,43 @@ class HybridBayesTree {
|
|||
gtsam::DefaultKeyFormatter) const;
|
||||
};
|
||||
|
||||
#include <gtsam/hybrid/HybridBayesNet.h>
|
||||
class HybridBayesNet {
|
||||
HybridBayesNet();
|
||||
void add(const gtsam::HybridConditional& s);
|
||||
void addMixture(const gtsam::GaussianMixture* s);
|
||||
void addGaussian(const gtsam::GaussianConditional* s);
|
||||
void addDiscrete(const gtsam::DiscreteConditional* s);
|
||||
|
||||
void emplaceMixture(const gtsam::GaussianMixture& s);
|
||||
void emplaceMixture(const gtsam::KeyVector& continuousFrontals,
|
||||
const gtsam::KeyVector& continuousParents,
|
||||
const gtsam::DiscreteKeys& discreteParents,
|
||||
const std::vector<gtsam::GaussianConditional::shared_ptr>&
|
||||
conditionalsList);
|
||||
void emplaceGaussian(const gtsam::GaussianConditional& s);
|
||||
void emplaceDiscrete(const gtsam::DiscreteConditional& s);
|
||||
void emplaceDiscrete(const gtsam::DiscreteKey& key, string spec);
|
||||
void emplaceDiscrete(const gtsam::DiscreteKey& key,
|
||||
const gtsam::DiscreteKeys& parents, string spec);
|
||||
void emplaceDiscrete(const gtsam::DiscreteKey& key,
|
||||
const std::vector<gtsam::DiscreteKey>& parents,
|
||||
string spec);
|
||||
|
||||
gtsam::GaussianMixture* atMixture(size_t i) const;
|
||||
gtsam::GaussianConditional* atGaussian(size_t i) const;
|
||||
gtsam::DiscreteConditional* atDiscrete(size_t i) const;
|
||||
|
||||
bool empty() const;
|
||||
size_t size() const;
|
||||
gtsam::KeySet keys() const;
|
||||
const gtsam::HybridConditional* at(size_t i) const;
|
||||
|
||||
double evaluate(const gtsam::HybridValues& x) const;
|
||||
gtsam::HybridValues optimize() const;
|
||||
gtsam::HybridValues sample(const gtsam::HybridValues &given) const;
|
||||
gtsam::HybridValues sample() const;
|
||||
|
||||
void print(string s = "HybridBayesNet\n",
|
||||
const gtsam::KeyFormatter& keyFormatter =
|
||||
gtsam::DefaultKeyFormatter) const;
|
||||
|
@ -139,9 +169,8 @@ class HybridGaussianFactorGraph {
|
|||
void push_back(const gtsam::HybridBayesNet& bayesNet);
|
||||
void push_back(const gtsam::HybridBayesTree& bayesTree);
|
||||
void push_back(const gtsam::GaussianMixtureFactor* gmm);
|
||||
|
||||
void add(gtsam::DecisionTreeFactor* factor);
|
||||
void add(gtsam::JacobianFactor* factor);
|
||||
void push_back(gtsam::DecisionTreeFactor* factor);
|
||||
void push_back(gtsam::JacobianFactor* factor);
|
||||
|
||||
bool empty() const;
|
||||
void remove(size_t i);
|
||||
|
@ -152,6 +181,12 @@ class HybridGaussianFactorGraph {
|
|||
void print(string s = "") const;
|
||||
bool equals(const gtsam::HybridGaussianFactorGraph& fg, double tol = 1e-9) const;
|
||||
|
||||
// evaluation
|
||||
double error(const gtsam::VectorValues& continuousValues,
|
||||
const gtsam::DiscreteValues& discreteValues) const;
|
||||
double probPrime(const gtsam::VectorValues& continuousValues,
|
||||
const gtsam::DiscreteValues& discreteValues) const;
|
||||
|
||||
gtsam::HybridBayesNet* eliminateSequential();
|
||||
gtsam::HybridBayesNet* eliminateSequential(
|
||||
gtsam::Ordering::OrderingType type);
|
||||
|
|
|
@ -57,7 +57,7 @@ inline HybridGaussianFactorGraph::shared_ptr makeSwitchingChain(
|
|||
|
||||
// keyFunc(1) to keyFunc(n+1)
|
||||
for (size_t t = 1; t < n; t++) {
|
||||
hfg.add(GaussianMixtureFactor::FromFactors(
|
||||
hfg.add(GaussianMixtureFactor(
|
||||
{keyFunc(t), keyFunc(t + 1)}, {{dKeyFunc(t), 2}},
|
||||
{boost::make_shared<JacobianFactor>(keyFunc(t), I_3x3, keyFunc(t + 1),
|
||||
I_3x3, Z_3x1),
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
|
||||
#include <gtsam/discrete/DiscreteValues.h>
|
||||
#include <gtsam/hybrid/GaussianMixture.h>
|
||||
#include <gtsam/hybrid/GaussianMixtureFactor.h>
|
||||
#include <gtsam/hybrid/HybridValues.h>
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/linear/GaussianConditional.h>
|
||||
|
||||
|
@ -33,6 +35,7 @@ using namespace gtsam;
|
|||
using noiseModel::Isotropic;
|
||||
using symbol_shorthand::M;
|
||||
using symbol_shorthand::X;
|
||||
using symbol_shorthand::Z;
|
||||
|
||||
/* ************************************************************************* */
|
||||
/* Check construction of GaussianMixture P(x1 | x2, m1) as well as accessing a
|
||||
|
@ -127,7 +130,59 @@ TEST(GaussianMixture, Error) {
|
|||
assignment[M(1)] = 0;
|
||||
EXPECT_DOUBLES_EQUAL(0.5, mixture.error(values, assignment), 1e-8);
|
||||
assignment[M(1)] = 1;
|
||||
EXPECT_DOUBLES_EQUAL(4.3252595155709335, mixture.error(values, assignment), 1e-8);
|
||||
EXPECT_DOUBLES_EQUAL(4.3252595155709335, mixture.error(values, assignment),
|
||||
1e-8);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Create mode key: 0 is low-noise, 1 is high-noise.
|
||||
static const Key modeKey = M(0);
|
||||
static const DiscreteKey mode(modeKey, 2);
|
||||
|
||||
// Create a simple GaussianMixture
|
||||
static GaussianMixture createSimpleGaussianMixture() {
|
||||
// Create Gaussian mixture Z(0) = X(0) + noise.
|
||||
// TODO(dellaert): making copies below is not ideal !
|
||||
Matrix1 I = Matrix1::Identity();
|
||||
const auto conditional0 = boost::make_shared<GaussianConditional>(
|
||||
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 0.5));
|
||||
const auto conditional1 = boost::make_shared<GaussianConditional>(
|
||||
GaussianConditional::FromMeanAndStddev(Z(0), I, X(0), Vector1(0), 3));
|
||||
return GaussianMixture({Z(0)}, {X(0)}, {mode}, {conditional0, conditional1});
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Create a test for continuousParents.
|
||||
TEST(GaussianMixture, ContinuousParents) {
|
||||
const GaussianMixture gm = createSimpleGaussianMixture();
|
||||
const KeyVector continuousParentKeys = gm.continuousParents();
|
||||
// Check that the continuous parent keys are correct:
|
||||
EXPECT(continuousParentKeys.size() == 1);
|
||||
EXPECT(continuousParentKeys[0] == X(0));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/// Check that likelihood returns a mixture factor on the parents.
|
||||
TEST(GaussianMixture, Likelihood) {
|
||||
const GaussianMixture gm = createSimpleGaussianMixture();
|
||||
|
||||
// Call the likelihood function:
|
||||
VectorValues measurements;
|
||||
measurements.insert(Z(0), Vector1(0));
|
||||
const auto factor = gm.likelihood(measurements);
|
||||
|
||||
// Check that the factor is a mixture factor on the parents.
|
||||
// Loop over all discrete assignments over the discrete parents:
|
||||
const DiscreteKeys discreteParentKeys = gm.discreteKeys();
|
||||
|
||||
// Apply the likelihood function to all conditionals:
|
||||
const GaussianMixtureFactor::Factors factors(
|
||||
gm.conditionals(),
|
||||
[measurements](const GaussianConditional::shared_ptr& conditional) {
|
||||
return conditional->likelihood(measurements);
|
||||
});
|
||||
const GaussianMixtureFactor expected({X(0)}, {mode}, factors);
|
||||
EXPECT(assert_equal(*factor, expected));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -36,36 +36,75 @@ using noiseModel::Isotropic;
|
|||
using symbol_shorthand::M;
|
||||
using symbol_shorthand::X;
|
||||
|
||||
static const DiscreteKey Asia(0, 2);
|
||||
static const Key asiaKey = 0;
|
||||
static const DiscreteKey Asia(asiaKey, 2);
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test creation
|
||||
// Test creation of a pure discrete Bayes net.
|
||||
TEST(HybridBayesNet, Creation) {
|
||||
HybridBayesNet bayesNet;
|
||||
|
||||
bayesNet.add(Asia, "99/1");
|
||||
bayesNet.emplaceDiscrete(Asia, "99/1");
|
||||
|
||||
DiscreteConditional expected(Asia, "99/1");
|
||||
|
||||
CHECK(bayesNet.atDiscrete(0));
|
||||
auto& df = *bayesNet.atDiscrete(0);
|
||||
EXPECT(df.equals(expected));
|
||||
EXPECT(assert_equal(expected, *bayesNet.atDiscrete(0)));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test adding a bayes net to another one.
|
||||
// Test adding a Bayes net to another one.
|
||||
TEST(HybridBayesNet, Add) {
|
||||
HybridBayesNet bayesNet;
|
||||
|
||||
bayesNet.add(Asia, "99/1");
|
||||
|
||||
DiscreteConditional expected(Asia, "99/1");
|
||||
bayesNet.emplaceDiscrete(Asia, "99/1");
|
||||
|
||||
HybridBayesNet other;
|
||||
other.push_back(bayesNet);
|
||||
EXPECT(bayesNet.equals(other));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test evaluate for a pure discrete Bayes net P(Asia).
|
||||
TEST(HybridBayesNet, evaluatePureDiscrete) {
|
||||
HybridBayesNet bayesNet;
|
||||
bayesNet.emplaceDiscrete(Asia, "99/1");
|
||||
HybridValues values;
|
||||
values.insert(asiaKey, 0);
|
||||
EXPECT_DOUBLES_EQUAL(0.99, bayesNet.evaluate(values), 1e-9);
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia).
|
||||
TEST(HybridBayesNet, evaluateHybrid) {
|
||||
const auto continuousConditional = GaussianConditional::FromMeanAndStddev(
|
||||
X(0), 2 * I_1x1, X(1), Vector1(-4.0), 5.0);
|
||||
|
||||
const SharedDiagonal model0 = noiseModel::Diagonal::Sigmas(Vector1(2.0)),
|
||||
model1 = noiseModel::Diagonal::Sigmas(Vector1(3.0));
|
||||
|
||||
const auto conditional0 = boost::make_shared<GaussianConditional>(
|
||||
X(1), Vector1::Constant(5), I_1x1, model0),
|
||||
conditional1 = boost::make_shared<GaussianConditional>(
|
||||
X(1), Vector1::Constant(2), I_1x1, model1);
|
||||
|
||||
// Create hybrid Bayes net.
|
||||
HybridBayesNet bayesNet;
|
||||
bayesNet.emplaceGaussian(continuousConditional);
|
||||
GaussianMixture gm({X(1)}, {}, {Asia}, {conditional0, conditional1});
|
||||
bayesNet.emplaceMixture(gm); // copy :-(
|
||||
bayesNet.emplaceDiscrete(Asia, "99/1");
|
||||
|
||||
// Create values at which to evaluate.
|
||||
HybridValues values;
|
||||
values.insert(asiaKey, 0);
|
||||
values.insert(X(0), Vector1(-6));
|
||||
values.insert(X(1), Vector1(1));
|
||||
|
||||
const double conditionalProbability =
|
||||
continuousConditional.evaluate(values.continuous());
|
||||
const double mixtureProbability = conditional0->evaluate(values.continuous());
|
||||
EXPECT_DOUBLES_EQUAL(conditionalProbability * mixtureProbability * 0.99,
|
||||
bayesNet.evaluate(values), 1e-9);
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test choosing an assignment of conditionals
|
||||
TEST(HybridBayesNet, Choose) {
|
||||
|
@ -105,7 +144,7 @@ TEST(HybridBayesNet, Choose) {
|
|||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test bayes net optimize
|
||||
// Test Bayes net optimize
|
||||
TEST(HybridBayesNet, OptimizeAssignment) {
|
||||
Switching s(4);
|
||||
|
||||
|
@ -139,7 +178,7 @@ TEST(HybridBayesNet, OptimizeAssignment) {
|
|||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test bayes net optimize
|
||||
// Test Bayes net optimize
|
||||
TEST(HybridBayesNet, Optimize) {
|
||||
Switching s(4);
|
||||
|
||||
|
@ -184,7 +223,7 @@ TEST(HybridBayesNet, Error) {
|
|||
// regression
|
||||
EXPECT(assert_equal(expected_error, error_tree, 1e-9));
|
||||
|
||||
// Error on pruned bayes net
|
||||
// Error on pruned Bayes net
|
||||
auto prunedBayesNet = hybridBayesNet->prune(2);
|
||||
auto pruned_error_tree = prunedBayesNet.error(delta.continuous());
|
||||
|
||||
|
@ -219,7 +258,7 @@ TEST(HybridBayesNet, Error) {
|
|||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test bayes net pruning
|
||||
// Test Bayes net pruning
|
||||
TEST(HybridBayesNet, Prune) {
|
||||
Switching s(4);
|
||||
|
||||
|
@ -237,7 +276,7 @@ TEST(HybridBayesNet, Prune) {
|
|||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test bayes net updateDiscreteConditionals
|
||||
// Test Bayes net updateDiscreteConditionals
|
||||
TEST(HybridBayesNet, UpdateDiscreteConditionals) {
|
||||
Switching s(4);
|
||||
|
||||
|
@ -254,8 +293,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) {
|
|||
EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/,
|
||||
prunedDecisionTree->nrLeaves());
|
||||
|
||||
auto original_discrete_conditionals =
|
||||
*(hybridBayesNet->at(4)->asDiscreteConditional());
|
||||
auto original_discrete_conditionals = *(hybridBayesNet->at(4)->asDiscrete());
|
||||
|
||||
// Prune!
|
||||
hybridBayesNet->prune(maxNrLeaves);
|
||||
|
@ -275,8 +313,7 @@ TEST(HybridBayesNet, UpdateDiscreteConditionals) {
|
|||
};
|
||||
|
||||
// Get the pruned discrete conditionals as an AlgebraicDecisionTree
|
||||
auto pruned_discrete_conditionals =
|
||||
hybridBayesNet->at(4)->asDiscreteConditional();
|
||||
auto pruned_discrete_conditionals = hybridBayesNet->at(4)->asDiscrete();
|
||||
auto discrete_conditional_tree =
|
||||
boost::dynamic_pointer_cast<DecisionTreeFactor::ADT>(
|
||||
pruned_discrete_conditionals);
|
||||
|
@ -339,7 +376,7 @@ TEST(HybridBayesNet, Sampling) {
|
|||
// Sample
|
||||
HybridValues sample = bn->sample(&gen);
|
||||
|
||||
discrete_samples.push_back(sample.discrete()[M(0)]);
|
||||
discrete_samples.push_back(sample.discrete().at(M(0)));
|
||||
|
||||
if (i == 0) {
|
||||
average_continuous.insert(sample.continuous());
|
||||
|
|
|
@ -133,7 +133,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullSequentialEqualChance) {
|
|||
auto result =
|
||||
hfg.eliminateSequential(Ordering::ColamdConstrainedLast(hfg, {M(1)}));
|
||||
|
||||
auto dc = result->at(2)->asDiscreteConditional();
|
||||
auto dc = result->at(2)->asDiscrete();
|
||||
DiscreteValues dv;
|
||||
dv[M(1)] = 0;
|
||||
EXPECT_DOUBLES_EQUAL(1, dc->operator()(dv), 1e-3);
|
||||
|
@ -176,7 +176,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalSimple) {
|
|||
hfg.add(JacobianFactor(X(0), I_3x3, Z_3x1));
|
||||
hfg.add(JacobianFactor(X(0), I_3x3, X(1), -I_3x3, Z_3x1));
|
||||
|
||||
hfg.add(GaussianMixtureFactor::FromFactors(
|
||||
hfg.add(GaussianMixtureFactor(
|
||||
{X(1)}, {{M(1), 2}},
|
||||
{boost::make_shared<JacobianFactor>(X(1), I_3x3, Z_3x1),
|
||||
boost::make_shared<JacobianFactor>(X(1), I_3x3, Vector3::Ones())}));
|
||||
|
@ -237,7 +237,7 @@ TEST(HybridGaussianFactorGraph, eliminateFullMultifrontalTwoClique) {
|
|||
hfg.add(JacobianFactor(X(1), I_3x3, X(2), -I_3x3, Z_3x1));
|
||||
|
||||
{
|
||||
hfg.add(GaussianMixtureFactor::FromFactors(
|
||||
hfg.add(GaussianMixtureFactor(
|
||||
{X(0)}, {{M(0), 2}},
|
||||
{boost::make_shared<JacobianFactor>(X(0), I_3x3, Z_3x1),
|
||||
boost::make_shared<JacobianFactor>(X(0), I_3x3, Vector3::Ones())}));
|
||||
|
|
|
@ -111,8 +111,7 @@ TEST(HybridGaussianElimination, IncrementalInference) {
|
|||
// Run update step
|
||||
isam.update(graph1);
|
||||
|
||||
auto discreteConditional_m0 =
|
||||
isam[M(0)]->conditional()->asDiscreteConditional();
|
||||
auto discreteConditional_m0 = isam[M(0)]->conditional()->asDiscrete();
|
||||
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
|
||||
|
||||
/********************************************************/
|
||||
|
@ -171,10 +170,10 @@ TEST(HybridGaussianElimination, IncrementalInference) {
|
|||
DiscreteValues m00;
|
||||
m00[M(0)] = 0, m00[M(1)] = 0;
|
||||
DiscreteConditional decisionTree =
|
||||
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
|
||||
*(*discreteBayesTree)[M(1)]->conditional()->asDiscrete();
|
||||
double m00_prob = decisionTree(m00);
|
||||
|
||||
auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional();
|
||||
auto discreteConditional = isam[M(1)]->conditional()->asDiscrete();
|
||||
|
||||
// Test the probability values with regression tests.
|
||||
DiscreteValues assignment;
|
||||
|
@ -540,7 +539,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
|
|||
|
||||
// The final discrete graph should not be empty since we have eliminated
|
||||
// all continuous variables.
|
||||
auto discreteTree = inc[M(3)]->conditional()->asDiscreteConditional();
|
||||
auto discreteTree = inc[M(3)]->conditional()->asDiscrete();
|
||||
EXPECT_LONGS_EQUAL(3, discreteTree->size());
|
||||
|
||||
// Test if the optimal discrete mode assignment is (1, 1, 1).
|
||||
|
|
|
@ -124,8 +124,7 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
|||
isam.update(graph1, initial);
|
||||
HybridGaussianISAM bayesTree = isam.bayesTree();
|
||||
|
||||
auto discreteConditional_m0 =
|
||||
bayesTree[M(0)]->conditional()->asDiscreteConditional();
|
||||
auto discreteConditional_m0 = bayesTree[M(0)]->conditional()->asDiscrete();
|
||||
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
|
||||
|
||||
/********************************************************/
|
||||
|
@ -189,11 +188,11 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
|
|||
DiscreteValues m00;
|
||||
m00[M(0)] = 0, m00[M(1)] = 0;
|
||||
DiscreteConditional decisionTree =
|
||||
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
|
||||
*(*discreteBayesTree)[M(1)]->conditional()->asDiscrete();
|
||||
double m00_prob = decisionTree(m00);
|
||||
|
||||
auto discreteConditional =
|
||||
bayesTree[M(1)]->conditional()->asDiscreteConditional();
|
||||
bayesTree[M(1)]->conditional()->asDiscrete();
|
||||
|
||||
// Test the probability values with regression tests.
|
||||
DiscreteValues assignment;
|
||||
|
@ -564,7 +563,7 @@ TEST(HybridNonlinearISAM, NonTrivial) {
|
|||
|
||||
// The final discrete graph should not be empty since we have eliminated
|
||||
// all continuous variables.
|
||||
auto discreteTree = bayesTree[M(3)]->conditional()->asDiscreteConditional();
|
||||
auto discreteTree = bayesTree[M(3)]->conditional()->asDiscrete();
|
||||
EXPECT_LONGS_EQUAL(3, discreteTree->size());
|
||||
|
||||
// Test if the optimal discrete mode assignment is (1, 1, 1).
|
||||
|
|
|
@ -46,7 +46,8 @@ namespace gtsam {
|
|||
return optimize(solution);
|
||||
}
|
||||
|
||||
VectorValues GaussianBayesNet::optimize(VectorValues solution) const {
|
||||
VectorValues GaussianBayesNet::optimize(const VectorValues& given) const {
|
||||
VectorValues solution = given;
|
||||
// (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas)
|
||||
// solve each node in reverse topological sort order (parents first)
|
||||
for (auto cg : boost::adaptors::reverse(*this)) {
|
||||
|
@ -224,5 +225,19 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double GaussianBayesNet::logDensity(const VectorValues& x) const {
|
||||
double sum = 0.0;
|
||||
for (const auto& conditional : *this) {
|
||||
if (conditional) sum += conditional->logDensity(x);
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double GaussianBayesNet::evaluate(const VectorValues& x) const {
|
||||
return exp(logDensity(x));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -88,12 +88,32 @@ namespace gtsam {
|
|||
/// @name Standard Interface
|
||||
/// @{
|
||||
|
||||
/**
|
||||
* Calculate probability density for given values `x`:
|
||||
* exp(-error(x)) / sqrt((2*pi)^n*det(Sigma))
|
||||
* where x is the vector of values, and Sigma is the covariance matrix.
|
||||
* Note that error(x)=0.5*e'*e includes the 0.5 factor already.
|
||||
*/
|
||||
double evaluate(const VectorValues& x) const;
|
||||
|
||||
/// Evaluate probability density, sugar.
|
||||
double operator()(const VectorValues& x) const {
|
||||
return evaluate(x);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate log-density for given values `x`:
|
||||
* -error(x) - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma)
|
||||
* where x is the vector of values, and Sigma is the covariance matrix.
|
||||
*/
|
||||
double logDensity(const VectorValues& x) const;
|
||||
|
||||
/// Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, by
|
||||
/// back-substitution
|
||||
VectorValues optimize() const;
|
||||
|
||||
/// Version of optimize for incomplete BayesNet, given missing variables
|
||||
VectorValues optimize(const VectorValues given) const;
|
||||
VectorValues optimize(const VectorValues& given) const;
|
||||
|
||||
/**
|
||||
* Sample using ancestral sampling
|
||||
|
|
|
@ -63,13 +63,24 @@ namespace gtsam {
|
|||
: BaseFactor(key, R, parent1, S, parent2, T, d, sigmas),
|
||||
BaseConditional(1) {}
|
||||
|
||||
/* ************************************************************************ */
|
||||
GaussianConditional GaussianConditional::FromMeanAndStddev(Key key,
|
||||
const Vector& mu,
|
||||
double sigma) {
|
||||
// |Rx - d| = |x-(Ay + b)|/sigma
|
||||
const Matrix R = Matrix::Identity(mu.size(), mu.size());
|
||||
const Vector& d = mu;
|
||||
return GaussianConditional(key, d, R,
|
||||
noiseModel::Isotropic::Sigma(mu.size(), sigma));
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
GaussianConditional GaussianConditional::FromMeanAndStddev(
|
||||
Key key, const Matrix& A, Key parent, const Vector& b, double sigma) {
|
||||
// |Rx + Sy - d| = |x-(Ay + b)|/sigma
|
||||
const Matrix R = Matrix::Identity(b.size(), b.size());
|
||||
const Matrix S = -A;
|
||||
const Vector d = b;
|
||||
const Vector& d = b;
|
||||
return GaussianConditional(key, d, R, parent, S,
|
||||
noiseModel::Isotropic::Sigma(b.size(), sigma));
|
||||
}
|
||||
|
@ -82,7 +93,7 @@ namespace gtsam {
|
|||
const Matrix R = Matrix::Identity(b.size(), b.size());
|
||||
const Matrix S = -A1;
|
||||
const Matrix T = -A2;
|
||||
const Vector d = b;
|
||||
const Vector& d = b;
|
||||
return GaussianConditional(key, d, R, parent1, S, parent2, T,
|
||||
noiseModel::Isotropic::Sigma(b.size(), sigma));
|
||||
}
|
||||
|
@ -169,6 +180,21 @@ double GaussianConditional::logDeterminant() const {
|
|||
return logDet;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// density = exp(-error(x)) / sqrt((2*pi)^n*det(Sigma))
|
||||
// log = -error(x) - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma)
|
||||
double GaussianConditional::logDensity(const VectorValues& x) const {
|
||||
constexpr double log2pi = 1.8378770664093454835606594728112;
|
||||
size_t n = d().size();
|
||||
// log det(Sigma)) = - 2.0 * logDeterminant()
|
||||
return - error(x) - 0.5 * n * log2pi + logDeterminant();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double GaussianConditional::evaluate(const VectorValues& x) const {
|
||||
return exp(logDensity(x));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorValues GaussianConditional::solve(const VectorValues& x) const {
|
||||
// Concatenate all vector values that correspond to parent variables
|
||||
|
|
|
@ -84,12 +84,17 @@ namespace gtsam {
|
|||
const KEYS& keys, size_t nrFrontals, const VerticalBlockMatrix& augmentedMatrix,
|
||||
const SharedDiagonal& sigmas = SharedDiagonal());
|
||||
|
||||
/// Construct from mean A1 p1 + b and standard deviation.
|
||||
/// Construct from mean `mu` and standard deviation `sigma`.
|
||||
static GaussianConditional FromMeanAndStddev(Key key, const Vector& mu,
|
||||
double sigma);
|
||||
|
||||
/// Construct from conditional mean `A1 p1 + b` and standard deviation.
|
||||
static GaussianConditional FromMeanAndStddev(Key key, const Matrix& A,
|
||||
Key parent, const Vector& b,
|
||||
double sigma);
|
||||
|
||||
/// Construct from mean A1 p1 + A2 p2 + b and standard deviation.
|
||||
/// Construct from conditional mean `A1 p1 + A2 p2 + b` and standard
|
||||
/// deviation `sigma`.
|
||||
static GaussianConditional FromMeanAndStddev(Key key, //
|
||||
const Matrix& A1, Key parent1,
|
||||
const Matrix& A2, Key parent2,
|
||||
|
@ -121,6 +126,26 @@ namespace gtsam {
|
|||
/// @name Standard Interface
|
||||
/// @{
|
||||
|
||||
/**
|
||||
* Calculate probability density for given values `x`:
|
||||
* exp(-error(x)) / sqrt((2*pi)^n*det(Sigma))
|
||||
* where x is the vector of values, and Sigma is the covariance matrix.
|
||||
* Note that error(x)=0.5*e'*e includes the 0.5 factor already.
|
||||
*/
|
||||
double evaluate(const VectorValues& x) const;
|
||||
|
||||
/// Evaluate probability density, sugar.
|
||||
double operator()(const VectorValues& x) const {
|
||||
return evaluate(x);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate log-density for given values `x`:
|
||||
* -error(x) - 0.5 * n*log(2*pi) - 0.5 * log det(Sigma)
|
||||
* where x is the vector of values, and Sigma is the covariance matrix.
|
||||
*/
|
||||
double logDensity(const VectorValues& x) const;
|
||||
|
||||
/** Return a view of the upper-triangular R block of the conditional */
|
||||
constABlock R() const { return Ab_.range(0, nrFrontals()); }
|
||||
|
||||
|
@ -134,27 +159,31 @@ namespace gtsam {
|
|||
const constBVector d() const { return BaseFactor::getb(); }
|
||||
|
||||
/**
|
||||
* @brief Compute the log determinant of the Gaussian conditional.
|
||||
* The determinant is computed using the R matrix, which is upper
|
||||
* triangular.
|
||||
* For numerical stability, the determinant is computed in log
|
||||
* form, so it is a summation rather than a multiplication.
|
||||
* @brief Compute the determinant of the R matrix.
|
||||
*
|
||||
* @return double
|
||||
*/
|
||||
double logDeterminant() const;
|
||||
|
||||
/**
|
||||
* @brief Compute the determinant of the conditional from the
|
||||
* upper-triangular R matrix.
|
||||
* The determinant is computed in log form using logDeterminant for
|
||||
* numerical stability and then exponentiated.
|
||||
*
|
||||
* The determinant is computed in log form (hence summation) for numerical
|
||||
* stability and then exponentiated.
|
||||
* Note, the covariance matrix \f$ \Sigma = (R^T R)^{-1} \f$, and hence
|
||||
* \f$ \det(\Sigma) = 1 / \det(R^T R) = 1 / determinant()^ 2 \f$.
|
||||
*
|
||||
* @return double
|
||||
*/
|
||||
double determinant() const { return exp(this->logDeterminant()); }
|
||||
|
||||
/**
|
||||
* @brief Compute the log determinant of the R matrix.
|
||||
*
|
||||
* For numerical stability, the determinant is computed in log
|
||||
* form, so it is a summation rather than a multiplication.
|
||||
*
|
||||
* Note, the covariance matrix \f$ \Sigma = (R^T R)^{-1} \f$, and hence
|
||||
* \f$ \log \det(\Sigma) = - \log \det(R^T R) = - 2 logDeterminant() \f$.
|
||||
*
|
||||
* @return double
|
||||
*/
|
||||
double logDeterminant() const;
|
||||
|
||||
/**
|
||||
* Solves a conditional Gaussian and writes the solution into the entries of
|
||||
* \c x for each frontal variable of the conditional. The parents are
|
||||
|
|
|
@ -470,6 +470,10 @@ virtual class GaussianConditional : gtsam::JacobianFactor {
|
|||
size_t name2, Matrix T);
|
||||
|
||||
// Named constructors
|
||||
static gtsam::GaussianConditional FromMeanAndStddev(gtsam::Key key,
|
||||
const Vector& mu,
|
||||
double sigma);
|
||||
|
||||
static gtsam::GaussianConditional FromMeanAndStddev(gtsam::Key key,
|
||||
const Matrix& A,
|
||||
gtsam::Key parent,
|
||||
|
@ -490,6 +494,8 @@ virtual class GaussianConditional : gtsam::JacobianFactor {
|
|||
bool equals(const gtsam::GaussianConditional& cg, double tol) const;
|
||||
|
||||
// Standard Interface
|
||||
double evaluate(const gtsam::VectorValues& x) const;
|
||||
double logDensity(const gtsam::VectorValues& x) const;
|
||||
gtsam::Key firstFrontalKey() const;
|
||||
gtsam::VectorValues solve(const gtsam::VectorValues& parents) const;
|
||||
gtsam::JacobianFactor* likelihood(
|
||||
|
@ -543,17 +549,20 @@ virtual class GaussianBayesNet {
|
|||
bool equals(const gtsam::GaussianBayesNet& other, double tol) const;
|
||||
size_t size() const;
|
||||
|
||||
// Standard interface
|
||||
void push_back(gtsam::GaussianConditional* conditional);
|
||||
void push_back(const gtsam::GaussianBayesNet& bayesNet);
|
||||
gtsam::GaussianConditional* front() const;
|
||||
gtsam::GaussianConditional* back() const;
|
||||
|
||||
// Standard interface
|
||||
double evaluate(const gtsam::VectorValues& x) const;
|
||||
double logDensity(const gtsam::VectorValues& x) const;
|
||||
|
||||
gtsam::VectorValues optimize() const;
|
||||
gtsam::VectorValues optimize(gtsam::VectorValues given) const;
|
||||
gtsam::VectorValues optimize(const gtsam::VectorValues& given) const;
|
||||
gtsam::VectorValues optimizeGradientSearch() const;
|
||||
|
||||
gtsam::VectorValues sample(gtsam::VectorValues given) const;
|
||||
gtsam::VectorValues sample(const gtsam::VectorValues& given) const;
|
||||
gtsam::VectorValues sample() const;
|
||||
gtsam::VectorValues backSubstitute(const gtsam::VectorValues& gx) const;
|
||||
gtsam::VectorValues backSubstituteTranspose(const gtsam::VectorValues& gx) const;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* GTSAM Copyright 2010-2022, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
@ -67,6 +67,36 @@ TEST( GaussianBayesNet, Matrix )
|
|||
EXPECT(assert_equal(d,d1));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Check that the evaluate function matches direct calculation with R.
|
||||
TEST(GaussianBayesNet, Evaluate1) {
|
||||
// Let's evaluate at the mean
|
||||
const VectorValues mean = smallBayesNet.optimize();
|
||||
|
||||
// We get the matrix, which has noise model applied!
|
||||
const Matrix R = smallBayesNet.matrix().first;
|
||||
const Matrix invSigma = R.transpose() * R;
|
||||
|
||||
// The Bayes net is a Gaussian density ~ exp (-0.5*(Rx-d)'*(Rx-d))
|
||||
// which at the mean is 1.0! So, the only thing we need to calculate is
|
||||
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
|
||||
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
|
||||
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||
const double actual = smallBayesNet.evaluate(mean);
|
||||
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
||||
}
|
||||
|
||||
// Check the evaluate with non-unit noise.
|
||||
TEST(GaussianBayesNet, Evaluate2) {
|
||||
// See comments in test above.
|
||||
const VectorValues mean = noisyBayesNet.optimize();
|
||||
const Matrix R = noisyBayesNet.matrix().first;
|
||||
const Matrix invSigma = R.transpose() * R;
|
||||
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||
const double actual = noisyBayesNet.evaluate(mean);
|
||||
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( GaussianBayesNet, NoisyMatrix )
|
||||
{
|
||||
|
@ -142,14 +172,18 @@ TEST( GaussianBayesNet, optimize3 )
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianBayesNet, sample) {
|
||||
GaussianBayesNet gbn;
|
||||
Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished();
|
||||
const Vector2 mean(20, 40), b(10, 10);
|
||||
const double sigma = 0.01;
|
||||
namespace sampling {
|
||||
static Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished();
|
||||
static const Vector2 mean(20, 40), b(10, 10);
|
||||
static const double sigma = 0.01;
|
||||
static const GaussianBayesNet gbn =
|
||||
list_of(GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma))(
|
||||
GaussianDensity::FromMeanAndStddev(X(1), mean, sigma));
|
||||
} // namespace sampling
|
||||
|
||||
gbn.add(GaussianConditional::FromMeanAndStddev(X(0), A1, X(1), b, sigma));
|
||||
gbn.add(GaussianDensity::FromMeanAndStddev(X(1), mean, sigma));
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianBayesNet, sample) {
|
||||
using namespace sampling;
|
||||
|
||||
auto actual = gbn.sample();
|
||||
EXPECT_LONGS_EQUAL(2, actual.size());
|
||||
|
@ -165,6 +199,23 @@ TEST(GaussianBayesNet, sample) {
|
|||
// EXPECT(assert_equal(Vector2(110.032083, 230.039811), actual[X(0)], 1e-5));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Do Monte Carlo integration of square deviation, should be equal to 9.0.
|
||||
TEST(GaussianBayesNet, MonteCarloIntegration) {
|
||||
GaussianBayesNet gbn;
|
||||
gbn.push_back(noisyBayesNet.at(1));
|
||||
|
||||
double sum = 0.0;
|
||||
constexpr size_t N = 1000;
|
||||
// loop for N samples:
|
||||
for (size_t i = 0; i < N; i++) {
|
||||
const auto X_i = gbn.sample();
|
||||
sum += pow(X_i[_y_].x() - 5.0, 2.0);
|
||||
}
|
||||
// Expected is variance = 3*3
|
||||
EXPECT_DOUBLES_EQUAL(9.0, sum / N, 0.5); // Pretty high.
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(GaussianBayesNet, ordering)
|
||||
{
|
||||
|
|
|
@ -130,6 +130,75 @@ TEST( GaussianConditional, equals )
|
|||
EXPECT( expected.equals(actual) );
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
namespace density {
|
||||
static const Key key = 77;
|
||||
static constexpr double sigma = 3.0;
|
||||
static const auto unitPrior =
|
||||
GaussianConditional(key, Vector1::Constant(5), I_1x1),
|
||||
widerPrior = GaussianConditional(
|
||||
key, Vector1::Constant(5), I_1x1,
|
||||
noiseModel::Isotropic::Sigma(1, sigma));
|
||||
} // namespace density
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Check that the evaluate function matches direct calculation with R.
|
||||
TEST(GaussianConditional, Evaluate1) {
|
||||
// Let's evaluate at the mean
|
||||
const VectorValues mean = density::unitPrior.solve(VectorValues());
|
||||
|
||||
// We get the Hessian matrix, which has noise model applied!
|
||||
const Matrix invSigma = density::unitPrior.information();
|
||||
|
||||
// A Gaussian density ~ exp (-0.5*(Rx-d)'*(Rx-d))
|
||||
// which at the mean is 1.0! So, the only thing we need to calculate is
|
||||
// the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
|
||||
// The covariance matrix inv(Sigma) = R'*R, so the determinant is
|
||||
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||
const double actual = density::unitPrior.evaluate(mean);
|
||||
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
||||
|
||||
using density::key;
|
||||
using density::sigma;
|
||||
|
||||
// Let's numerically integrate and see that we integrate to 1.0.
|
||||
double integral = 0.0;
|
||||
// Loop from -5*sigma to 5*sigma in 0.1*sigma steps:
|
||||
for (double x = -5.0 * sigma; x <= 5.0 * sigma; x += 0.1 * sigma) {
|
||||
VectorValues xValues;
|
||||
xValues.insert(key, mean.at(key) + Vector1(x));
|
||||
const double density = density::unitPrior.evaluate(xValues);
|
||||
integral += 0.1 * sigma * density;
|
||||
}
|
||||
EXPECT_DOUBLES_EQUAL(1.0, integral, 1e-9);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Check the evaluate with non-unit noise.
|
||||
TEST(GaussianConditional, Evaluate2) {
|
||||
// See comments in test above.
|
||||
const VectorValues mean = density::widerPrior.solve(VectorValues());
|
||||
const Matrix R = density::widerPrior.R();
|
||||
const Matrix invSigma = density::widerPrior.information();
|
||||
const double expected = sqrt((invSigma / (2 * M_PI)).determinant());
|
||||
const double actual = density::widerPrior.evaluate(mean);
|
||||
EXPECT_DOUBLES_EQUAL(expected, actual, 1e-9);
|
||||
|
||||
using density::key;
|
||||
using density::sigma;
|
||||
|
||||
// Let's numerically integrate and see that we integrate to 1.0.
|
||||
double integral = 0.0;
|
||||
// Loop from -5*sigma to 5*sigma in 0.1*sigma steps:
|
||||
for (double x = -5.0 * sigma; x <= 5.0 * sigma; x += 0.1 * sigma) {
|
||||
VectorValues xValues;
|
||||
xValues.insert(key, mean.at(key) + Vector1(x));
|
||||
const double density = density::widerPrior.evaluate(xValues);
|
||||
integral += 0.1 * sigma * density;
|
||||
}
|
||||
EXPECT_DOUBLES_EQUAL(1.0, integral, 1e-5);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( GaussianConditional, solve )
|
||||
{
|
||||
|
|
|
@ -216,7 +216,13 @@ virtual class CombinedImuFactor: gtsam::NonlinearFactor {
|
|||
#include <gtsam/navigation/AHRSFactor.h>
|
||||
class PreintegratedAhrsMeasurements {
|
||||
// Standard Constructor
|
||||
PreintegratedAhrsMeasurements(Vector bias, Matrix measuredOmegaCovariance);
|
||||
PreintegratedAhrsMeasurements(const gtsam::PreintegrationParams* params,
|
||||
const Vector& biasHat);
|
||||
PreintegratedAhrsMeasurements(const gtsam::PreintegrationParams* p,
|
||||
const Vector& bias_hat, double deltaTij,
|
||||
const gtsam::Rot3& deltaRij,
|
||||
const Matrix& delRdelBiasOmega,
|
||||
const Matrix& preint_meas_cov);
|
||||
PreintegratedAhrsMeasurements(const gtsam::PreintegratedAhrsMeasurements& rhs);
|
||||
|
||||
// Testable
|
||||
|
|
|
@ -144,7 +144,7 @@ public:
|
|||
std::cout << e.what() << ": Landmark "<< DefaultKeyFormatter(this->template key<2>()) <<
|
||||
" moved behind camera " << DefaultKeyFormatter(this->template key<1>()) << std::endl;
|
||||
if (throwCheirality_)
|
||||
throw StereoCheiralityException(this->key2());
|
||||
throw StereoCheiralityException(this->template key<2>());
|
||||
}
|
||||
return Vector3::Constant(2.0 * K_->fx());
|
||||
}
|
||||
|
|
|
@ -21,9 +21,7 @@ virtual class gtsam::noiseModel::Isotropic;
|
|||
virtual class gtsam::imuBias::ConstantBias;
|
||||
virtual class gtsam::NonlinearFactor;
|
||||
virtual class gtsam::NoiseModelFactor;
|
||||
virtual class gtsam::NoiseModelFactor2;
|
||||
virtual class gtsam::NoiseModelFactor3;
|
||||
virtual class gtsam::NoiseModelFactor4;
|
||||
virtual class gtsam::NoiseModelFactorN;
|
||||
virtual class gtsam::GaussianFactor;
|
||||
virtual class gtsam::HessianFactor;
|
||||
virtual class gtsam::JacobianFactor;
|
||||
|
@ -430,8 +428,9 @@ virtual class IMUFactor : gtsam::NoiseModelFactor {
|
|||
Vector gyro() const;
|
||||
Vector accel() const;
|
||||
Vector z() const;
|
||||
size_t key1() const;
|
||||
size_t key2() const;
|
||||
|
||||
template <I = {1, 2}>
|
||||
size_t key() const;
|
||||
};
|
||||
|
||||
#include <gtsam_unstable/dynamics/FullIMUFactor.h>
|
||||
|
@ -448,8 +447,9 @@ virtual class FullIMUFactor : gtsam::NoiseModelFactor {
|
|||
Vector gyro() const;
|
||||
Vector accel() const;
|
||||
Vector z() const;
|
||||
size_t key1() const;
|
||||
size_t key2() const;
|
||||
|
||||
template <I = {1, 2}>
|
||||
size_t key() const;
|
||||
};
|
||||
|
||||
#include <gtsam_unstable/dynamics/DynamicsPriors.h>
|
||||
|
@ -733,14 +733,14 @@ class AHRS {
|
|||
// Tectonic SAM Factors
|
||||
|
||||
#include <gtsam_unstable/slam/TSAMFactors.h>
|
||||
//typedef gtsam::NoiseModelFactor2<gtsam::Pose2, gtsam::Point2> NLPosePose;
|
||||
//typedef gtsam::NoiseModelFactorN<gtsam::Pose2, gtsam::Point2> NLPosePose;
|
||||
virtual class DeltaFactor : gtsam::NoiseModelFactor {
|
||||
DeltaFactor(size_t i, size_t j, const gtsam::Point2& measured,
|
||||
const gtsam::noiseModel::Base* noiseModel);
|
||||
//void print(string s) const;
|
||||
};
|
||||
|
||||
//typedef gtsam::NoiseModelFactor4<gtsam::Pose2, gtsam::Pose2, gtsam::Pose2,
|
||||
//typedef gtsam::NoiseModelFactorN<gtsam::Pose2, gtsam::Pose2, gtsam::Pose2,
|
||||
// gtsam::Point2> NLPosePosePosePoint;
|
||||
virtual class DeltaFactorBase : gtsam::NoiseModelFactor {
|
||||
DeltaFactorBase(size_t b1, size_t i, size_t b2, size_t j,
|
||||
|
@ -748,7 +748,7 @@ virtual class DeltaFactorBase : gtsam::NoiseModelFactor {
|
|||
//void print(string s) const;
|
||||
};
|
||||
|
||||
//typedef gtsam::NoiseModelFactor4<gtsam::Pose2, gtsam::Pose2, gtsam::Pose2,
|
||||
//typedef gtsam::NoiseModelFactorN<gtsam::Pose2, gtsam::Pose2, gtsam::Pose2,
|
||||
// gtsam::Pose2> NLPosePosePosePose;
|
||||
virtual class OdometryFactorBase : gtsam::NoiseModelFactor {
|
||||
OdometryFactorBase(size_t b1, size_t i, size_t b2, size_t j,
|
||||
|
|
|
@ -15,8 +15,8 @@ namespace gtsam {
|
|||
void LocalOrientedPlane3Factor::print(const string& s,
|
||||
const KeyFormatter& keyFormatter) const {
|
||||
cout << s << (s == "" ? "" : "\n");
|
||||
cout << "LocalOrientedPlane3Factor Factor (" << keyFormatter(key1()) << ", "
|
||||
<< keyFormatter(key2()) << ", " << keyFormatter(key3()) << ")\n";
|
||||
cout << "LocalOrientedPlane3Factor Factor (" << keyFormatter(key<1>()) << ", "
|
||||
<< keyFormatter(key<2>()) << ", " << keyFormatter(key<3>()) << ")\n";
|
||||
measured_p_.print("Measured Plane");
|
||||
this->noiseModel_->print(" noise model: ");
|
||||
}
|
||||
|
|
|
@ -12,10 +12,9 @@
|
|||
*/
|
||||
#include <pybind11/stl.h>
|
||||
|
||||
// NOTE: Needed since we are including pybind11/stl.h.
|
||||
#ifdef GTSAM_ALLOCATOR_TBB
|
||||
PYBIND11_MAKE_OPAQUE(std::vector<gtsam::Key, tbb::tbb_allocator<gtsam::Key>>);
|
||||
#else
|
||||
PYBIND11_MAKE_OPAQUE(std::vector<gtsam::Key>);
|
||||
#endif
|
||||
|
||||
PYBIND11_MAKE_OPAQUE(std::vector<gtsam::GaussianFactor::shared_ptr>);
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
|
||||
py::bind_vector<std::vector<gtsam::GaussianFactor::shared_ptr> >(m_, "GaussianFactorVector");
|
||||
|
||||
py::implicitly_convertible<py::list, std::vector<gtsam::GaussianFactor::shared_ptr> >();
|
|
@ -29,8 +29,7 @@ def smallBayesNet():
|
|||
"""Create a small Bayes Net for testing"""
|
||||
bayesNet = GaussianBayesNet()
|
||||
I_1x1 = np.eye(1, dtype=float)
|
||||
bayesNet.push_back(GaussianConditional(
|
||||
_x_, [9.0], I_1x1, _y_, I_1x1))
|
||||
bayesNet.push_back(GaussianConditional(_x_, [9.0], I_1x1, _y_, I_1x1))
|
||||
bayesNet.push_back(GaussianConditional(_y_, [5.0], I_1x1))
|
||||
return bayesNet
|
||||
|
||||
|
@ -41,13 +40,21 @@ class TestGaussianBayesNet(GtsamTestCase):
|
|||
def test_matrix(self):
|
||||
"""Test matrix method"""
|
||||
R, d = smallBayesNet().matrix() # get matrix and RHS
|
||||
R1 = np.array([
|
||||
[1.0, 1.0],
|
||||
[0.0, 1.0]])
|
||||
R1 = np.array([[1.0, 1.0], [0.0, 1.0]])
|
||||
d1 = np.array([9.0, 5.0])
|
||||
np.testing.assert_equal(R, R1)
|
||||
np.testing.assert_equal(d, d1)
|
||||
|
||||
def test_sample(self):
|
||||
"""Test sample method"""
|
||||
bayesNet = smallBayesNet()
|
||||
sample = bayesNet.sample()
|
||||
self.assertIsInstance(sample, gtsam.VectorValues)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# standard deviation is 1.0 for both, so we set tolerance to 3*sigma
|
||||
mean = bayesNet.optimize()
|
||||
self.gtsamAssertEquals(sample, mean, tol=3.0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
"""
|
||||
GTSAM Copyright 2010-2022, Georgia Tech Research Corporation,
|
||||
Atlanta, Georgia 30332-0415
|
||||
All Rights Reserved
|
||||
|
||||
See LICENSE for the license information
|
||||
|
||||
Unit tests for Hybrid Values.
|
||||
Author: Frank Dellaert
|
||||
"""
|
||||
# pylint: disable=invalid-name, no-name-in-module, no-member
|
||||
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
from gtsam.symbol_shorthand import A, X
|
||||
from gtsam.utils.test_case import GtsamTestCase
|
||||
|
||||
import gtsam
|
||||
from gtsam import (DiscreteKeys, GaussianConditional, GaussianMixture,
|
||||
HybridBayesNet, HybridValues, noiseModel)
|
||||
|
||||
|
||||
class TestHybridBayesNet(GtsamTestCase):
|
||||
"""Unit tests for HybridValues."""
|
||||
def test_evaluate(self):
|
||||
"""Test evaluate for a hybrid Bayes net P(X0|X1) P(X1|Asia) P(Asia)."""
|
||||
asiaKey = A(0)
|
||||
Asia = (asiaKey, 2)
|
||||
|
||||
# Create the continuous conditional
|
||||
I_1x1 = np.eye(1)
|
||||
gc = GaussianConditional.FromMeanAndStddev(X(0), 2 * I_1x1, X(1), [-4],
|
||||
5.0)
|
||||
|
||||
# Create the noise models
|
||||
model0 = noiseModel.Diagonal.Sigmas([2.0])
|
||||
model1 = noiseModel.Diagonal.Sigmas([3.0])
|
||||
|
||||
# Create the conditionals
|
||||
conditional0 = GaussianConditional(X(1), [5], I_1x1, model0)
|
||||
conditional1 = GaussianConditional(X(1), [2], I_1x1, model1)
|
||||
dkeys = DiscreteKeys()
|
||||
dkeys.push_back(Asia)
|
||||
gm = GaussianMixture([X(1)], [], dkeys, [conditional0, conditional1])
|
||||
|
||||
# Create hybrid Bayes net.
|
||||
bayesNet = HybridBayesNet()
|
||||
bayesNet.addGaussian(gc)
|
||||
bayesNet.addMixture(gm)
|
||||
bayesNet.emplaceDiscrete(Asia, "99/1")
|
||||
|
||||
# Create values at which to evaluate.
|
||||
values = HybridValues()
|
||||
values.insert(asiaKey, 0)
|
||||
values.insert(X(0), [-6])
|
||||
values.insert(X(1), [1])
|
||||
|
||||
conditionalProbability = gc.evaluate(values.continuous())
|
||||
mixtureProbability = conditional0.evaluate(values.continuous())
|
||||
self.assertAlmostEqual(conditionalProbability * mixtureProbability *
|
||||
0.99,
|
||||
bayesNet.evaluate(values),
|
||||
places=5)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
|
@ -10,79 +10,168 @@ Author: Fan Jiang
|
|||
"""
|
||||
# pylint: disable=invalid-name, no-name-in-module, no-member
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import unittest
|
||||
import math
|
||||
|
||||
import numpy as np
|
||||
from gtsam.symbol_shorthand import C, M, X, Z
|
||||
from gtsam.utils.test_case import GtsamTestCase
|
||||
|
||||
import gtsam
|
||||
import numpy as np
|
||||
from gtsam.symbol_shorthand import C, X
|
||||
from gtsam.utils.test_case import GtsamTestCase
|
||||
from gtsam import (
|
||||
DecisionTreeFactor,
|
||||
DiscreteConditional,
|
||||
DiscreteKeys,
|
||||
GaussianConditional,
|
||||
GaussianMixture,
|
||||
GaussianMixtureFactor,
|
||||
HybridGaussianFactorGraph,
|
||||
JacobianFactor,
|
||||
Ordering,
|
||||
noiseModel,
|
||||
)
|
||||
|
||||
|
||||
class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||
"""Unit tests for HybridGaussianFactorGraph."""
|
||||
|
||||
def test_create(self):
|
||||
"""Test contruction of hybrid factor graph."""
|
||||
noiseModel = gtsam.noiseModel.Unit.Create(3)
|
||||
dk = gtsam.DiscreteKeys()
|
||||
"""Test construction of hybrid factor graph."""
|
||||
model = noiseModel.Unit.Create(3)
|
||||
dk = DiscreteKeys()
|
||||
dk.push_back((C(0), 2))
|
||||
|
||||
jf1 = gtsam.JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)),
|
||||
noiseModel)
|
||||
jf2 = gtsam.JacobianFactor(X(0), np.eye(3), np.ones((3, 1)),
|
||||
noiseModel)
|
||||
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
|
||||
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
|
||||
|
||||
gmf = gtsam.GaussianMixtureFactor.FromFactors([X(0)], dk, [jf1, jf2])
|
||||
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2])
|
||||
|
||||
hfg = gtsam.HybridGaussianFactorGraph()
|
||||
hfg.add(jf1)
|
||||
hfg.add(jf2)
|
||||
hfg = HybridGaussianFactorGraph()
|
||||
hfg.push_back(jf1)
|
||||
hfg.push_back(jf2)
|
||||
hfg.push_back(gmf)
|
||||
|
||||
hbn = hfg.eliminateSequential(
|
||||
gtsam.Ordering.ColamdConstrainedLastHybridGaussianFactorGraph(
|
||||
hfg, [C(0)]))
|
||||
Ordering.ColamdConstrainedLastHybridGaussianFactorGraph(hfg, [C(0)])
|
||||
)
|
||||
|
||||
# print("hbn = ", hbn)
|
||||
self.assertEqual(hbn.size(), 2)
|
||||
|
||||
mixture = hbn.at(0).inner()
|
||||
self.assertIsInstance(mixture, gtsam.GaussianMixture)
|
||||
self.assertIsInstance(mixture, GaussianMixture)
|
||||
self.assertEqual(len(mixture.keys()), 2)
|
||||
|
||||
discrete_conditional = hbn.at(hbn.size() - 1).inner()
|
||||
self.assertIsInstance(discrete_conditional, gtsam.DiscreteConditional)
|
||||
self.assertIsInstance(discrete_conditional, DiscreteConditional)
|
||||
|
||||
def test_optimize(self):
|
||||
"""Test contruction of hybrid factor graph."""
|
||||
noiseModel = gtsam.noiseModel.Unit.Create(3)
|
||||
dk = gtsam.DiscreteKeys()
|
||||
"""Test construction of hybrid factor graph."""
|
||||
model = noiseModel.Unit.Create(3)
|
||||
dk = DiscreteKeys()
|
||||
dk.push_back((C(0), 2))
|
||||
|
||||
jf1 = gtsam.JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)),
|
||||
noiseModel)
|
||||
jf2 = gtsam.JacobianFactor(X(0), np.eye(3), np.ones((3, 1)),
|
||||
noiseModel)
|
||||
jf1 = JacobianFactor(X(0), np.eye(3), np.zeros((3, 1)), model)
|
||||
jf2 = JacobianFactor(X(0), np.eye(3), np.ones((3, 1)), model)
|
||||
|
||||
gmf = gtsam.GaussianMixtureFactor.FromFactors([X(0)], dk, [jf1, jf2])
|
||||
gmf = GaussianMixtureFactor([X(0)], dk, [jf1, jf2])
|
||||
|
||||
hfg = gtsam.HybridGaussianFactorGraph()
|
||||
hfg.add(jf1)
|
||||
hfg.add(jf2)
|
||||
hfg = HybridGaussianFactorGraph()
|
||||
hfg.push_back(jf1)
|
||||
hfg.push_back(jf2)
|
||||
hfg.push_back(gmf)
|
||||
|
||||
dtf = gtsam.DecisionTreeFactor([(C(0), 2)],"0 1")
|
||||
hfg.add(dtf)
|
||||
dtf = gtsam.DecisionTreeFactor([(C(0), 2)], "0 1")
|
||||
hfg.push_back(dtf)
|
||||
|
||||
hbn = hfg.eliminateSequential(
|
||||
gtsam.Ordering.ColamdConstrainedLastHybridGaussianFactorGraph(
|
||||
hfg, [C(0)]))
|
||||
Ordering.ColamdConstrainedLastHybridGaussianFactorGraph(hfg, [C(0)])
|
||||
)
|
||||
|
||||
# print("hbn = ", hbn)
|
||||
hv = hbn.optimize()
|
||||
self.assertEqual(hv.atDiscrete(C(0)), 1)
|
||||
|
||||
@staticmethod
|
||||
def tiny(num_measurements: int = 1):
|
||||
"""Create a tiny two variable hybrid model."""
|
||||
# Create hybrid Bayes net.
|
||||
bayesNet = gtsam.HybridBayesNet()
|
||||
|
||||
# Create mode key: 0 is low-noise, 1 is high-noise.
|
||||
modeKey = M(0)
|
||||
mode = (modeKey, 2)
|
||||
|
||||
# Create Gaussian mixture Z(0) = X(0) + noise for each measurement.
|
||||
I = np.eye(1)
|
||||
keys = DiscreteKeys()
|
||||
keys.push_back(mode)
|
||||
for i in range(num_measurements):
|
||||
conditional0 = GaussianConditional.FromMeanAndStddev(
|
||||
Z(i), I, X(0), [0], sigma=0.5
|
||||
)
|
||||
conditional1 = GaussianConditional.FromMeanAndStddev(
|
||||
Z(i), I, X(0), [0], sigma=3
|
||||
)
|
||||
bayesNet.emplaceMixture(
|
||||
[Z(i)], [X(0)], keys, [conditional0, conditional1]
|
||||
)
|
||||
|
||||
# Create prior on X(0).
|
||||
prior_on_x0 = GaussianConditional.FromMeanAndStddev(X(0), [5.0], 5.0)
|
||||
bayesNet.addGaussian(prior_on_x0)
|
||||
|
||||
# Add prior on mode.
|
||||
bayesNet.emplaceDiscrete(mode, "1/1")
|
||||
|
||||
return bayesNet
|
||||
|
||||
def test_tiny(self):
|
||||
"""Test a tiny two variable hybrid model."""
|
||||
bayesNet = self.tiny()
|
||||
sample = bayesNet.sample()
|
||||
# print(sample)
|
||||
|
||||
# Create a factor graph from the Bayes net with sampled measurements.
|
||||
fg = HybridGaussianFactorGraph()
|
||||
conditional = bayesNet.atMixture(0)
|
||||
measurement = gtsam.VectorValues()
|
||||
measurement.insert(Z(0), sample.at(Z(0)))
|
||||
factor = conditional.likelihood(measurement)
|
||||
fg.push_back(factor)
|
||||
fg.push_back(bayesNet.atGaussian(1))
|
||||
fg.push_back(bayesNet.atDiscrete(2))
|
||||
|
||||
self.assertEqual(fg.size(), 3)
|
||||
|
||||
def test_tiny2(self):
|
||||
"""Test a tiny two variable hybrid model, with 2 measurements."""
|
||||
# Create the Bayes net and sample from it.
|
||||
bayesNet = self.tiny(num_measurements=2)
|
||||
sample = bayesNet.sample()
|
||||
# print(sample)
|
||||
|
||||
# Create a factor graph from the Bayes net with sampled measurements.
|
||||
fg = HybridGaussianFactorGraph()
|
||||
for i in range(2):
|
||||
conditional = bayesNet.atMixture(i)
|
||||
measurement = gtsam.VectorValues()
|
||||
measurement.insert(Z(i), sample.at(Z(i)))
|
||||
factor = conditional.likelihood(measurement)
|
||||
fg.push_back(factor)
|
||||
fg.push_back(bayesNet.atGaussian(2))
|
||||
fg.push_back(bayesNet.atDiscrete(3))
|
||||
|
||||
self.assertEqual(fg.size(), 4)
|
||||
# Calculate ratio between Bayes net probability and the factor graph:
|
||||
continuousValues = gtsam.VectorValues()
|
||||
continuousValues.insert(X(0), sample.at(X(0)))
|
||||
discreteValues = sample.discrete()
|
||||
expected_ratio = bayesNet.evaluate(sample) / fg.probPrime(
|
||||
continuousValues, discreteValues
|
||||
)
|
||||
print(expected_ratio)
|
||||
|
||||
# TODO(dellaert): Change the mode to 0 and calculate the ratio again.
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
GTSAM Copyright 2010-2019, Georgia Tech Research Corporation,
|
||||
GTSAM Copyright 2010-2022, Georgia Tech Research Corporation,
|
||||
Atlanta, Georgia 30332-0415
|
||||
All Rights Reserved
|
||||
|
||||
|
@ -20,22 +20,23 @@ from gtsam.symbol_shorthand import C, X
|
|||
from gtsam.utils.test_case import GtsamTestCase
|
||||
|
||||
|
||||
class TestHybridGaussianFactorGraph(GtsamTestCase):
|
||||
class TestHybridValues(GtsamTestCase):
|
||||
"""Unit tests for HybridValues."""
|
||||
|
||||
def test_basic(self):
|
||||
"""Test contruction and basic methods of hybrid values."""
|
||||
"""Test construction and basic methods of hybrid values."""
|
||||
|
||||
hv1 = gtsam.HybridValues()
|
||||
hv1.insert(X(0), np.ones((3,1)))
|
||||
hv1.insert(X(0), np.ones((3, 1)))
|
||||
hv1.insert(C(0), 2)
|
||||
|
||||
hv2 = gtsam.HybridValues()
|
||||
hv2.insert(C(0), 2)
|
||||
hv2.insert(X(0), np.ones((3,1)))
|
||||
hv2.insert(X(0), np.ones((3, 1)))
|
||||
|
||||
self.assertEqual(hv1.atDiscrete(C(0)), 2)
|
||||
self.assertEqual(hv1.at(X(0))[0], np.ones((3,1))[0])
|
||||
self.assertEqual(hv1.at(X(0))[0], np.ones((3, 1))[0])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
@ -13,7 +13,7 @@ import unittest
|
|||
import numpy as np
|
||||
|
||||
import gtsam
|
||||
from gtsam import Rot3
|
||||
from gtsam import Point3, Rot3, Unit3
|
||||
from gtsam.utils.test_case import GtsamTestCase
|
||||
|
||||
|
||||
|
@ -2032,6 +2032,31 @@ class TestRot3(GtsamTestCase):
|
|||
angle_deg = np.rad2deg(angle)
|
||||
assert angle_deg < 180
|
||||
|
||||
def test_rotate(self) -> None:
|
||||
"""Test that rotate() works for both Point3 and Unit3."""
|
||||
R = Rot3(np.array([[1, 0, 0], [0, -1, 0], [0, 0, 1]]))
|
||||
p = Point3(1., 1., 1.)
|
||||
u = Unit3(np.array([1, 1, 1]))
|
||||
actual_p = R.rotate(p)
|
||||
actual_u = R.rotate(u)
|
||||
expected_p = Point3(np.array([1, -1, 1]))
|
||||
expected_u = Unit3(np.array([1, -1, 1]))
|
||||
np.testing.assert_array_equal(actual_p, expected_p)
|
||||
np.testing.assert_array_equal(actual_u.point3(), expected_u.point3())
|
||||
|
||||
def test_unrotate(self) -> None:
|
||||
"""Test that unrotate() after rotate() yields original Point3/Unit3."""
|
||||
wRc = Rot3(np.array(R1_R2_pairs[0][0]))
|
||||
c_p = Point3(1., 1., 1.)
|
||||
c_u = Unit3(np.array([1, 1, 1]))
|
||||
w_p = wRc.rotate(c_p)
|
||||
w_u = wRc.rotate(c_u)
|
||||
actual_p = wRc.unrotate(w_p)
|
||||
actual_u = wRc.unrotate(w_u)
|
||||
|
||||
np.testing.assert_almost_equal(actual_p, c_p, decimal=6)
|
||||
np.testing.assert_almost_equal(actual_u.point3(), c_u.point3(), decimal=6)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
Loading…
Reference in New Issue