commit
01409fc409
|
@ -1,11 +1,18 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <string>
|
|
||||||
#include <iostream>
|
|
||||||
#include <sstream>
|
|
||||||
|
|
||||||
#include <gtsam/dllexport.h>
|
#include <gtsam/dllexport.h>
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
|
#include <random>
|
||||||
|
#include <sstream>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Global default pseudo-random number generator object.
|
||||||
|
* In wrappers we can access std::mt19937_64 via gtsam.MT19937
|
||||||
|
*/
|
||||||
|
static std::mt19937_64 kRandomNumberGenerator(42);
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
/**
|
/**
|
||||||
* For Python __str__().
|
* For Python __str__().
|
||||||
|
@ -28,7 +35,7 @@ private:
|
||||||
std::streambuf* coutBuffer_;
|
std::streambuf* coutBuffer_;
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
} // namespace gtsam
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
// Adapted from https://stackoverflow.com/a/32223343/9151520
|
// Adapted from https://stackoverflow.com/a/32223343/9151520
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
#include <gtsam/base/Testable.h>
|
#include <gtsam/base/Testable.h>
|
||||||
#include <gtsam/base/debug.h>
|
#include <gtsam/base/debug.h>
|
||||||
|
#include <gtsam/base/utilities.h>
|
||||||
#include <gtsam/discrete/DiscreteConditional.h>
|
#include <gtsam/discrete/DiscreteConditional.h>
|
||||||
#include <gtsam/discrete/Ring.h>
|
#include <gtsam/discrete/Ring.h>
|
||||||
#include <gtsam/discrete/Signature.h>
|
#include <gtsam/discrete/Signature.h>
|
||||||
|
|
|
@ -27,9 +27,6 @@
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
// In wrappers we can access std::mt19937_64 via gtsam.MT19937
|
|
||||||
static std::mt19937_64 kRandomNumberGenerator(42);
|
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
|
|
||||||
#include <gtsam/base/Testable.h>
|
#include <gtsam/base/Testable.h>
|
||||||
#include <gtsam/base/debug.h>
|
#include <gtsam/base/debug.h>
|
||||||
|
#include <gtsam/base/utilities.h>
|
||||||
#include <gtsam/discrete/Ring.h>
|
#include <gtsam/discrete/Ring.h>
|
||||||
#include <gtsam/discrete/Signature.h>
|
#include <gtsam/discrete/Signature.h>
|
||||||
#include <gtsam/discrete/TableDistribution.h>
|
#include <gtsam/discrete/TableDistribution.h>
|
||||||
|
|
|
@ -202,16 +202,6 @@ HybridValues HybridBayesNet::sample(std::mt19937_64 *rng) const {
|
||||||
return sample(given, rng);
|
return sample(given, rng);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
HybridValues HybridBayesNet::sample(const HybridValues &given) const {
|
|
||||||
return sample(given, &kRandomNumberGenerator);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
|
||||||
HybridValues HybridBayesNet::sample() const {
|
|
||||||
return sample(&kRandomNumberGenerator);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
AlgebraicDecisionTree<Key> HybridBayesNet::errorTree(
|
AlgebraicDecisionTree<Key> HybridBayesNet::errorTree(
|
||||||
const VectorValues &continuousValues) const {
|
const VectorValues &continuousValues) const {
|
||||||
|
|
|
@ -181,10 +181,11 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
|
||||||
* auto sample = bn.sample(given, &rng);
|
* auto sample = bn.sample(given, &rng);
|
||||||
*
|
*
|
||||||
* @param given Values of missing variables.
|
* @param given Values of missing variables.
|
||||||
* @param rng The pseudo-random number generator.
|
* @param rng The optional pseudo-random number generator.
|
||||||
* @return HybridValues
|
* @return HybridValues
|
||||||
*/
|
*/
|
||||||
HybridValues sample(const HybridValues &given, std::mt19937_64 *rng) const;
|
HybridValues sample(const HybridValues &given,
|
||||||
|
std::mt19937_64 *rng = nullptr) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Sample using ancestral sampling.
|
* @brief Sample using ancestral sampling.
|
||||||
|
@ -193,25 +194,10 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
|
||||||
* std::mt19937_64 rng(42);
|
* std::mt19937_64 rng(42);
|
||||||
* auto sample = bn.sample(&rng);
|
* auto sample = bn.sample(&rng);
|
||||||
*
|
*
|
||||||
* @param rng The pseudo-random number generator.
|
* @param rng The optional pseudo-random number generator.
|
||||||
* @return HybridValues
|
* @return HybridValues
|
||||||
*/
|
*/
|
||||||
HybridValues sample(std::mt19937_64 *rng) const;
|
HybridValues sample(std::mt19937_64 *rng = nullptr) const;
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Sample from an incomplete BayesNet, use default rng.
|
|
||||||
*
|
|
||||||
* @param given Values of missing variables.
|
|
||||||
* @return HybridValues
|
|
||||||
*/
|
|
||||||
HybridValues sample(const HybridValues &given) const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Sample using ancestral sampling, use default rng.
|
|
||||||
*
|
|
||||||
* @return HybridValues
|
|
||||||
*/
|
|
||||||
HybridValues sample() const;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Prune the Bayes Net such that we have at most maxNrLeaves leaves.
|
* @brief Prune the Bayes Net such that we have at most maxNrLeaves leaves.
|
||||||
|
|
|
@ -158,10 +158,8 @@ class HybridBayesNet {
|
||||||
gtsam::HybridValues optimize() const;
|
gtsam::HybridValues optimize() const;
|
||||||
gtsam::VectorValues optimize(const gtsam::DiscreteValues& assignment) const;
|
gtsam::VectorValues optimize(const gtsam::DiscreteValues& assignment) const;
|
||||||
|
|
||||||
gtsam::HybridValues sample(const gtsam::HybridValues& given, std::mt19937_64@ rng) const;
|
gtsam::HybridValues sample(const gtsam::HybridValues& given, std::mt19937_64@ rng = nullptr) const;
|
||||||
gtsam::HybridValues sample(std::mt19937_64@ rng) const;
|
gtsam::HybridValues sample(std::mt19937_64@ rng = nullptr) const;
|
||||||
gtsam::HybridValues sample(const gtsam::HybridValues& given) const;
|
|
||||||
gtsam::HybridValues sample() const;
|
|
||||||
|
|
||||||
void print(string s = "HybridBayesNet\n",
|
void print(string s = "HybridBayesNet\n",
|
||||||
const gtsam::KeyFormatter& keyFormatter =
|
const gtsam::KeyFormatter& keyFormatter =
|
||||||
|
|
|
@ -26,9 +26,6 @@
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace gtsam;
|
using namespace gtsam;
|
||||||
|
|
||||||
// In Wrappers we have no access to this so have a default ready
|
|
||||||
static std::mt19937_64 kRandomNumberGenerator(42);
|
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
// Instantiate base class
|
// Instantiate base class
|
||||||
|
@ -76,15 +73,6 @@ namespace gtsam {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************ */
|
|
||||||
VectorValues GaussianBayesNet::sample() const {
|
|
||||||
return sample(&kRandomNumberGenerator);
|
|
||||||
}
|
|
||||||
|
|
||||||
VectorValues GaussianBayesNet::sample(const VectorValues& given) const {
|
|
||||||
return sample(given, &kRandomNumberGenerator);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
VectorValues GaussianBayesNet::optimizeGradientSearch() const
|
VectorValues GaussianBayesNet::optimizeGradientSearch() const
|
||||||
{
|
{
|
||||||
|
|
|
@ -131,7 +131,7 @@ namespace gtsam {
|
||||||
* std::mt19937_64 rng(42);
|
* std::mt19937_64 rng(42);
|
||||||
* auto sample = gbn.sample(&rng);
|
* auto sample = gbn.sample(&rng);
|
||||||
*/
|
*/
|
||||||
VectorValues sample(std::mt19937_64* rng) const;
|
VectorValues sample(std::mt19937_64* rng = nullptr) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sample from an incomplete BayesNet, given missing variables
|
* Sample from an incomplete BayesNet, given missing variables
|
||||||
|
@ -140,13 +140,7 @@ namespace gtsam {
|
||||||
* VectorValues given = ...;
|
* VectorValues given = ...;
|
||||||
* auto sample = gbn.sample(given, &rng);
|
* auto sample = gbn.sample(given, &rng);
|
||||||
*/
|
*/
|
||||||
VectorValues sample(const VectorValues& given, std::mt19937_64* rng) const;
|
VectorValues sample(const VectorValues& given, std::mt19937_64* rng = nullptr) const;
|
||||||
|
|
||||||
/// Sample using ancestral sampling, use default rng
|
|
||||||
VectorValues sample() const;
|
|
||||||
|
|
||||||
/// Sample from an incomplete BayesNet, use default rng
|
|
||||||
VectorValues sample(const VectorValues& given) const;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return ordering corresponding to a topological sort.
|
* Return ordering corresponding to a topological sort.
|
||||||
|
|
|
@ -15,11 +15,12 @@
|
||||||
* @author Christian Potthast, Frank Dellaert
|
* @author Christian Potthast, Frank Dellaert
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <gtsam/base/utilities.h>
|
||||||
|
#include <gtsam/hybrid/HybridValues.h>
|
||||||
#include <gtsam/linear/GaussianConditional.h>
|
#include <gtsam/linear/GaussianConditional.h>
|
||||||
#include <gtsam/linear/Sampler.h>
|
#include <gtsam/linear/Sampler.h>
|
||||||
#include <gtsam/linear/VectorValues.h>
|
#include <gtsam/linear/VectorValues.h>
|
||||||
#include <gtsam/linear/linearExceptions.h>
|
#include <gtsam/linear/linearExceptions.h>
|
||||||
#include <gtsam/hybrid/HybridValues.h>
|
|
||||||
|
|
||||||
#ifdef __GNUC__
|
#ifdef __GNUC__
|
||||||
#pragma GCC diagnostic push
|
#pragma GCC diagnostic push
|
||||||
|
@ -34,9 +35,6 @@
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
|
|
||||||
// In wrappers we can access std::mt19937_64 via gtsam.MT19937
|
|
||||||
static std::mt19937_64 kRandomNumberGenerator(42);
|
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
@ -347,6 +345,10 @@ namespace gtsam {
|
||||||
|
|
||||||
VectorValues solution = solve(parentsValues);
|
VectorValues solution = solve(parentsValues);
|
||||||
Key key = firstFrontalKey();
|
Key key = firstFrontalKey();
|
||||||
|
|
||||||
|
// Check if rng is nullptr, then assign default
|
||||||
|
rng = (rng == nullptr) ? &kRandomNumberGenerator : rng;
|
||||||
|
|
||||||
// The vector of sigma values for sampling.
|
// The vector of sigma values for sampling.
|
||||||
// If no model, initialize sigmas to 1, else to model sigmas
|
// If no model, initialize sigmas to 1, else to model sigmas
|
||||||
const Vector& sigmas = (!model_) ? Vector::Ones(rows()) : model_->sigmas();
|
const Vector& sigmas = (!model_) ? Vector::Ones(rows()) : model_->sigmas();
|
||||||
|
@ -359,16 +361,7 @@ namespace gtsam {
|
||||||
throw std::invalid_argument(
|
throw std::invalid_argument(
|
||||||
"sample() can only be invoked on no-parent prior");
|
"sample() can only be invoked on no-parent prior");
|
||||||
VectorValues values;
|
VectorValues values;
|
||||||
return sample(values);
|
return sample(values, rng);
|
||||||
}
|
|
||||||
|
|
||||||
/* ************************************************************************ */
|
|
||||||
VectorValues GaussianConditional::sample() const {
|
|
||||||
return sample(&kRandomNumberGenerator);
|
|
||||||
}
|
|
||||||
|
|
||||||
VectorValues GaussianConditional::sample(const VectorValues& given) const {
|
|
||||||
return sample(given, &kRandomNumberGenerator);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
|
|
|
@ -217,7 +217,7 @@ namespace gtsam {
|
||||||
* std::mt19937_64 rng(42);
|
* std::mt19937_64 rng(42);
|
||||||
* auto sample = gc.sample(&rng);
|
* auto sample = gc.sample(&rng);
|
||||||
*/
|
*/
|
||||||
VectorValues sample(std::mt19937_64* rng) const;
|
VectorValues sample(std::mt19937_64* rng = nullptr) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sample from conditional, given missing variables
|
* Sample from conditional, given missing variables
|
||||||
|
@ -227,13 +227,7 @@ namespace gtsam {
|
||||||
* auto sample = gc.sample(given, &rng);
|
* auto sample = gc.sample(given, &rng);
|
||||||
*/
|
*/
|
||||||
VectorValues sample(const VectorValues& parentsValues,
|
VectorValues sample(const VectorValues& parentsValues,
|
||||||
std::mt19937_64* rng) const;
|
std::mt19937_64* rng = nullptr) const;
|
||||||
|
|
||||||
/// Sample, use default rng
|
|
||||||
VectorValues sample() const;
|
|
||||||
|
|
||||||
/// Sample with given values, use default rng
|
|
||||||
VectorValues sample(const VectorValues& parentsValues) const;
|
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
/// @name Linear algebra.
|
/// @name Linear algebra.
|
||||||
|
|
|
@ -560,10 +560,9 @@ virtual class GaussianConditional : gtsam::JacobianFactor {
|
||||||
const gtsam::VectorValues& frontalValues) const;
|
const gtsam::VectorValues& frontalValues) const;
|
||||||
gtsam::JacobianFactor* likelihood(gtsam::Vector frontal) const;
|
gtsam::JacobianFactor* likelihood(gtsam::Vector frontal) const;
|
||||||
|
|
||||||
gtsam::VectorValues sample(std::mt19937_64@ rng) const;
|
gtsam::VectorValues sample(std::mt19937_64 @rng = nullptr) const;
|
||||||
gtsam::VectorValues sample(const gtsam::VectorValues& parents, std::mt19937_64@ rng) const;
|
gtsam::VectorValues sample(const gtsam::VectorValues& parents,
|
||||||
gtsam::VectorValues sample() const;
|
std::mt19937_64 @rng = nullptr) const;
|
||||||
gtsam::VectorValues sample(const gtsam::VectorValues& parents) const;
|
|
||||||
|
|
||||||
// Advanced Interface
|
// Advanced Interface
|
||||||
gtsam::VectorValues solveOtherRHS(const gtsam::VectorValues& parents,
|
gtsam::VectorValues solveOtherRHS(const gtsam::VectorValues& parents,
|
||||||
|
@ -629,9 +628,10 @@ virtual class GaussianBayesNet {
|
||||||
gtsam::VectorValues optimize() const;
|
gtsam::VectorValues optimize() const;
|
||||||
gtsam::VectorValues optimize(const gtsam::VectorValues& given) const;
|
gtsam::VectorValues optimize(const gtsam::VectorValues& given) const;
|
||||||
gtsam::VectorValues optimizeGradientSearch() const;
|
gtsam::VectorValues optimizeGradientSearch() const;
|
||||||
|
|
||||||
gtsam::VectorValues sample(const gtsam::VectorValues& given) const;
|
gtsam::VectorValues sample(const gtsam::VectorValues& given,
|
||||||
gtsam::VectorValues sample() const;
|
std::mt19937_64 @rng = nullptr) const;
|
||||||
|
gtsam::VectorValues sample(std::mt19937_64 @rng = nullptr) const;
|
||||||
gtsam::VectorValues backSubstitute(const gtsam::VectorValues& gx) const;
|
gtsam::VectorValues backSubstitute(const gtsam::VectorValues& gx) const;
|
||||||
gtsam::VectorValues backSubstituteTranspose(const gtsam::VectorValues& gx) const;
|
gtsam::VectorValues backSubstituteTranspose(const gtsam::VectorValues& gx) const;
|
||||||
|
|
||||||
|
|
|
@ -441,7 +441,7 @@ TEST(GaussianConditional, likelihood) {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
// Test sampling
|
// Test sampling
|
||||||
TEST(GaussianConditional, sample) {
|
TEST(GaussianConditional, Sample) {
|
||||||
Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished();
|
Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished();
|
||||||
const Vector2 b(20, 40), x1(3, 4);
|
const Vector2 b(20, 40), x1(3, 4);
|
||||||
const double sigma = 0.01;
|
const double sigma = 0.01;
|
||||||
|
@ -465,8 +465,10 @@ TEST(GaussianConditional, sample) {
|
||||||
auto actual3 = conditional.sample(given, &rng);
|
auto actual3 = conditional.sample(given, &rng);
|
||||||
EXPECT_LONGS_EQUAL(1, actual2.size());
|
EXPECT_LONGS_EQUAL(1, actual2.size());
|
||||||
// regressions
|
// regressions
|
||||||
#if __APPLE__ || _WIN32
|
#if __APPLE__
|
||||||
EXPECT(assert_equal(Vector2(31.0111856, 64.9850775), actual2[X(0)], 1e-5));
|
EXPECT(assert_equal(Vector2(31.0111856, 64.9850775), actual2[X(0)], 1e-5));
|
||||||
|
#elif _WIN32
|
||||||
|
EXPECT(assert_equal(Vector2(30.995317, 64.9943165), actual2[X(0)], 1e-5));
|
||||||
#elif __linux__
|
#elif __linux__
|
||||||
EXPECT(assert_equal(Vector2(30.9809331, 64.9927588), actual2[X(0)], 1e-5));
|
EXPECT(assert_equal(Vector2(30.9809331, 64.9927588), actual2[X(0)], 1e-5));
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -42,7 +42,7 @@
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
// In Wrappers we have no access to this so have a default ready
|
// In Wrappers we have no access to this so have a default ready
|
||||||
static std::mt19937 kRandomNumberGenerator(42);
|
static std::mt19937 kPRNG(42);
|
||||||
|
|
||||||
using Sparse = Eigen::SparseMatrix<double>;
|
using Sparse = Eigen::SparseMatrix<double>;
|
||||||
|
|
||||||
|
@ -869,7 +869,7 @@ Values ShonanAveraging<d>::initializeRandomly(std::mt19937 &rng) const {
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
template <size_t d>
|
template <size_t d>
|
||||||
Values ShonanAveraging<d>::initializeRandomly() const {
|
Values ShonanAveraging<d>::initializeRandomly() const {
|
||||||
return initializeRandomly(kRandomNumberGenerator);
|
return initializeRandomly(kPRNG);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
@ -883,7 +883,7 @@ Values ShonanAveraging<d>::initializeRandomlyAt(size_t p,
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
template <size_t d>
|
template <size_t d>
|
||||||
Values ShonanAveraging<d>::initializeRandomlyAt(size_t p) const {
|
Values ShonanAveraging<d>::initializeRandomlyAt(size_t p) const {
|
||||||
return initializeRandomlyAt(p, kRandomNumberGenerator);
|
return initializeRandomlyAt(p, kPRNG);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -39,7 +39,7 @@ using namespace gtsam;
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
// In Wrappers we have no access to this so have a default ready.
|
// In Wrappers we have no access to this so have a default ready.
|
||||||
static std::mt19937 kRandomNumberGenerator(42);
|
static std::mt19937 kPRNG(42);
|
||||||
|
|
||||||
// Some relative translations may be zero. We treat nodes that have a zero
|
// Some relative translations may be zero. We treat nodes that have a zero
|
||||||
// relativeTranslation as a single node.
|
// relativeTranslation as a single node.
|
||||||
|
@ -185,7 +185,7 @@ Values TranslationRecovery::initializeRandomly(
|
||||||
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations,
|
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations,
|
||||||
const Values &initialValues) const {
|
const Values &initialValues) const {
|
||||||
return initializeRandomly(relativeTranslations, betweenTranslations,
|
return initializeRandomly(relativeTranslations, betweenTranslations,
|
||||||
&kRandomNumberGenerator, initialValues);
|
&kPRNG, initialValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
Values TranslationRecovery::run(
|
Values TranslationRecovery::run(
|
||||||
|
|
|
@ -45,7 +45,7 @@ ShonanAveraging3 fromExampleName(
|
||||||
|
|
||||||
static const ShonanAveraging3 kShonan = fromExampleName("toyExample.g2o");
|
static const ShonanAveraging3 kShonan = fromExampleName("toyExample.g2o");
|
||||||
|
|
||||||
static std::mt19937 kRandomNumberGenerator(42);
|
static std::mt19937 kPRNG(42);
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(ShonanAveraging3, checkConstructor) {
|
TEST(ShonanAveraging3, checkConstructor) {
|
||||||
|
@ -78,7 +78,7 @@ TEST(ShonanAveraging3, buildGraphAt) {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(ShonanAveraging3, checkOptimality) {
|
TEST(ShonanAveraging3, checkOptimality) {
|
||||||
const Values randomRotations = kShonan.initializeRandomly(kRandomNumberGenerator);
|
const Values randomRotations = kShonan.initializeRandomly(kPRNG);
|
||||||
Values random = ShonanAveraging3::LiftTo<Rot3>(4, randomRotations); // lift to 4!
|
Values random = ShonanAveraging3::LiftTo<Rot3>(4, randomRotations); // lift to 4!
|
||||||
auto Lambda = kShonan.computeLambda(random);
|
auto Lambda = kShonan.computeLambda(random);
|
||||||
EXPECT_LONGS_EQUAL(15, Lambda.rows());
|
EXPECT_LONGS_EQUAL(15, Lambda.rows());
|
||||||
|
@ -106,7 +106,7 @@ TEST(ShonanAveraging3, checkSubgraph) {
|
||||||
|
|
||||||
// Create initial random estimation
|
// Create initial random estimation
|
||||||
Values initial;
|
Values initial;
|
||||||
initial = subgraphShonan.initializeRandomly(kRandomNumberGenerator);
|
initial = subgraphShonan.initializeRandomly(kPRNG);
|
||||||
|
|
||||||
// Run Shonan with SUBGRAPH solver
|
// Run Shonan with SUBGRAPH solver
|
||||||
auto result = subgraphShonan.run(initial, 3, 3);
|
auto result = subgraphShonan.run(initial, 3, 3);
|
||||||
|
@ -115,7 +115,7 @@ TEST(ShonanAveraging3, checkSubgraph) {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(ShonanAveraging3, tryOptimizingAt3) {
|
TEST(ShonanAveraging3, tryOptimizingAt3) {
|
||||||
const Values randomRotations = kShonan.initializeRandomly(kRandomNumberGenerator);
|
const Values randomRotations = kShonan.initializeRandomly(kPRNG);
|
||||||
Values initial = ShonanAveraging3::LiftTo<Rot3>(3, randomRotations); // convert to SOn
|
Values initial = ShonanAveraging3::LiftTo<Rot3>(3, randomRotations); // convert to SOn
|
||||||
EXPECT(!kShonan.checkOptimality(initial));
|
EXPECT(!kShonan.checkOptimality(initial));
|
||||||
const Values result = kShonan.tryOptimizingAt(3, initial);
|
const Values result = kShonan.tryOptimizingAt(3, initial);
|
||||||
|
@ -130,7 +130,7 @@ TEST(ShonanAveraging3, tryOptimizingAt3) {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(ShonanAveraging3, tryOptimizingAt4) {
|
TEST(ShonanAveraging3, tryOptimizingAt4) {
|
||||||
const Values randomRotations = kShonan.initializeRandomly(kRandomNumberGenerator);
|
const Values randomRotations = kShonan.initializeRandomly(kPRNG);
|
||||||
Values random = ShonanAveraging3::LiftTo<Rot3>(4, randomRotations); // lift to 4!
|
Values random = ShonanAveraging3::LiftTo<Rot3>(4, randomRotations); // lift to 4!
|
||||||
const Values result = kShonan.tryOptimizingAt(4, random);
|
const Values result = kShonan.tryOptimizingAt(4, random);
|
||||||
EXPECT(kShonan.checkOptimality(result));
|
EXPECT(kShonan.checkOptimality(result));
|
||||||
|
@ -228,7 +228,7 @@ TEST(ShonanAveraging3, CheckWithEigen) {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(ShonanAveraging3, initializeWithDescent) {
|
TEST(ShonanAveraging3, initializeWithDescent) {
|
||||||
const Values randomRotations = kShonan.initializeRandomly(kRandomNumberGenerator);
|
const Values randomRotations = kShonan.initializeRandomly(kPRNG);
|
||||||
Values random = ShonanAveraging3::LiftTo<Rot3>(3, randomRotations);
|
Values random = ShonanAveraging3::LiftTo<Rot3>(3, randomRotations);
|
||||||
const Values Qstar3 = kShonan.tryOptimizingAt(3, random);
|
const Values Qstar3 = kShonan.tryOptimizingAt(3, random);
|
||||||
Vector minEigenVector;
|
Vector minEigenVector;
|
||||||
|
@ -240,7 +240,7 @@ TEST(ShonanAveraging3, initializeWithDescent) {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
TEST(ShonanAveraging3, run) {
|
TEST(ShonanAveraging3, run) {
|
||||||
auto initial = kShonan.initializeRandomly(kRandomNumberGenerator);
|
auto initial = kShonan.initializeRandomly(kPRNG);
|
||||||
auto result = kShonan.run(initial, 5);
|
auto result = kShonan.run(initial, 5);
|
||||||
EXPECT_DOUBLES_EQUAL(0, kShonan.cost(result.first), 1e-3);
|
EXPECT_DOUBLES_EQUAL(0, kShonan.cost(result.first), 1e-3);
|
||||||
EXPECT_DOUBLES_EQUAL(-5.427688831332745e-07, result.second,
|
EXPECT_DOUBLES_EQUAL(-5.427688831332745e-07, result.second,
|
||||||
|
@ -295,7 +295,7 @@ TEST(ShonanAveraging3, runKlaus) {
|
||||||
EXPECT(assert_equal(R02, wR0.between(wR2), 0.1));
|
EXPECT(assert_equal(R02, wR0.between(wR2), 0.1));
|
||||||
|
|
||||||
// Run Shonan (with prior on first rotation)
|
// Run Shonan (with prior on first rotation)
|
||||||
auto initial = shonan.initializeRandomly(kRandomNumberGenerator);
|
auto initial = shonan.initializeRandomly(kPRNG);
|
||||||
auto result = shonan.run(initial, 5);
|
auto result = shonan.run(initial, 5);
|
||||||
EXPECT_DOUBLES_EQUAL(0, shonan.cost(result.first), 1e-2);
|
EXPECT_DOUBLES_EQUAL(0, shonan.cost(result.first), 1e-2);
|
||||||
EXPECT_DOUBLES_EQUAL(-9.2259161494467889e-05, result.second,
|
EXPECT_DOUBLES_EQUAL(-9.2259161494467889e-05, result.second,
|
||||||
|
@ -323,7 +323,7 @@ TEST(ShonanAveraging3, runKlausKarcher) {
|
||||||
static const ShonanAveraging3 shonan = fromExampleName("Klaus3.g2o");
|
static const ShonanAveraging3 shonan = fromExampleName("Klaus3.g2o");
|
||||||
|
|
||||||
// Run Shonan (with Karcher mean prior)
|
// Run Shonan (with Karcher mean prior)
|
||||||
auto initial = shonan.initializeRandomly(kRandomNumberGenerator);
|
auto initial = shonan.initializeRandomly(kPRNG);
|
||||||
auto result = shonan.run(initial, 5);
|
auto result = shonan.run(initial, 5);
|
||||||
EXPECT_DOUBLES_EQUAL(0, shonan.cost(result.first), 1e-2);
|
EXPECT_DOUBLES_EQUAL(0, shonan.cost(result.first), 1e-2);
|
||||||
EXPECT_DOUBLES_EQUAL(-1.361402670507772e-05, result.second,
|
EXPECT_DOUBLES_EQUAL(-1.361402670507772e-05, result.second,
|
||||||
|
@ -353,7 +353,7 @@ TEST(ShonanAveraging2, noisyToyGraph) {
|
||||||
// Check graph building
|
// Check graph building
|
||||||
NonlinearFactorGraph graph = shonan.buildGraphAt(2);
|
NonlinearFactorGraph graph = shonan.buildGraphAt(2);
|
||||||
EXPECT_LONGS_EQUAL(6, graph.size());
|
EXPECT_LONGS_EQUAL(6, graph.size());
|
||||||
auto initial = shonan.initializeRandomly(kRandomNumberGenerator);
|
auto initial = shonan.initializeRandomly(kPRNG);
|
||||||
auto result = shonan.run(initial, 2);
|
auto result = shonan.run(initial, 2);
|
||||||
EXPECT_DOUBLES_EQUAL(0.0008211, shonan.cost(result.first), 1e-6);
|
EXPECT_DOUBLES_EQUAL(0.0008211, shonan.cost(result.first), 1e-6);
|
||||||
EXPECT_DOUBLES_EQUAL(0, result.second, 1e-10); // certificate!
|
EXPECT_DOUBLES_EQUAL(0, result.second, 1e-10); // certificate!
|
||||||
|
@ -391,7 +391,7 @@ TEST(ShonanAveraging2, noisyToyGraphWithHuber) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// test result
|
// test result
|
||||||
auto initial = shonan.initializeRandomly(kRandomNumberGenerator);
|
auto initial = shonan.initializeRandomly(kPRNG);
|
||||||
auto result = shonan.run(initial, 2,2);
|
auto result = shonan.run(initial, 2,2);
|
||||||
EXPECT_DOUBLES_EQUAL(0.0008211, shonan.cost(result.first), 1e-6);
|
EXPECT_DOUBLES_EQUAL(0.0008211, shonan.cost(result.first), 1e-6);
|
||||||
EXPECT_DOUBLES_EQUAL(0, result.second, 1e-10); // certificate!
|
EXPECT_DOUBLES_EQUAL(0, result.second, 1e-10); // certificate!
|
||||||
|
|
Loading…
Reference in New Issue