Merge pull request #2137 from borglab/update-rng

Update `sample` methods
release/4.3a0
Frank Dellaert 2025-05-18 22:50:47 -04:00 committed by GitHub
commit 01409fc409
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 60 additions and 109 deletions

View File

@ -1,11 +1,18 @@
#pragma once
#include <string>
#include <iostream>
#include <sstream>
#include <gtsam/dllexport.h>
#include <iostream>
#include <random>
#include <sstream>
#include <string>
/**
* @brief Global default pseudo-random number generator object.
* In wrappers we can access std::mt19937_64 via gtsam.MT19937
*/
static std::mt19937_64 kRandomNumberGenerator(42);
namespace gtsam {
/**
* For Python __str__().
@ -28,7 +35,7 @@ private:
std::streambuf* coutBuffer_;
};
}
} // namespace gtsam
namespace gtsam {
// Adapted from https://stackoverflow.com/a/32223343/9151520

View File

@ -18,6 +18,7 @@
#include <gtsam/base/Testable.h>
#include <gtsam/base/debug.h>
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/DiscreteConditional.h>
#include <gtsam/discrete/Ring.h>
#include <gtsam/discrete/Signature.h>

View File

@ -27,9 +27,6 @@
#include <string>
#include <vector>
// In wrappers we can access std::mt19937_64 via gtsam.MT19937
static std::mt19937_64 kRandomNumberGenerator(42);
namespace gtsam {
/**

View File

@ -17,6 +17,7 @@
#include <gtsam/base/Testable.h>
#include <gtsam/base/debug.h>
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/Ring.h>
#include <gtsam/discrete/Signature.h>
#include <gtsam/discrete/TableDistribution.h>

View File

@ -202,16 +202,6 @@ HybridValues HybridBayesNet::sample(std::mt19937_64 *rng) const {
return sample(given, rng);
}
/* ************************************************************************* */
HybridValues HybridBayesNet::sample(const HybridValues &given) const {
return sample(given, &kRandomNumberGenerator);
}
/* ************************************************************************* */
HybridValues HybridBayesNet::sample() const {
return sample(&kRandomNumberGenerator);
}
/* ************************************************************************* */
AlgebraicDecisionTree<Key> HybridBayesNet::errorTree(
const VectorValues &continuousValues) const {

View File

@ -181,10 +181,11 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
* auto sample = bn.sample(given, &rng);
*
* @param given Values of missing variables.
* @param rng The pseudo-random number generator.
* @param rng The optional pseudo-random number generator.
* @return HybridValues
*/
HybridValues sample(const HybridValues &given, std::mt19937_64 *rng) const;
HybridValues sample(const HybridValues &given,
std::mt19937_64 *rng = nullptr) const;
/**
* @brief Sample using ancestral sampling.
@ -193,25 +194,10 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
* std::mt19937_64 rng(42);
* auto sample = bn.sample(&rng);
*
* @param rng The pseudo-random number generator.
* @param rng The optional pseudo-random number generator.
* @return HybridValues
*/
HybridValues sample(std::mt19937_64 *rng) const;
/**
* @brief Sample from an incomplete BayesNet, use default rng.
*
* @param given Values of missing variables.
* @return HybridValues
*/
HybridValues sample(const HybridValues &given) const;
/**
* @brief Sample using ancestral sampling, use default rng.
*
* @return HybridValues
*/
HybridValues sample() const;
HybridValues sample(std::mt19937_64 *rng = nullptr) const;
/**
* @brief Prune the Bayes Net such that we have at most maxNrLeaves leaves.

View File

@ -158,10 +158,8 @@ class HybridBayesNet {
gtsam::HybridValues optimize() const;
gtsam::VectorValues optimize(const gtsam::DiscreteValues& assignment) const;
gtsam::HybridValues sample(const gtsam::HybridValues& given, std::mt19937_64@ rng) const;
gtsam::HybridValues sample(std::mt19937_64@ rng) const;
gtsam::HybridValues sample(const gtsam::HybridValues& given) const;
gtsam::HybridValues sample() const;
gtsam::HybridValues sample(const gtsam::HybridValues& given, std::mt19937_64@ rng = nullptr) const;
gtsam::HybridValues sample(std::mt19937_64@ rng = nullptr) const;
void print(string s = "HybridBayesNet\n",
const gtsam::KeyFormatter& keyFormatter =

View File

@ -26,9 +26,6 @@
using namespace std;
using namespace gtsam;
// In Wrappers we have no access to this so have a default ready
static std::mt19937_64 kRandomNumberGenerator(42);
namespace gtsam {
// Instantiate base class
@ -76,15 +73,6 @@ namespace gtsam {
return result;
}
/* ************************************************************************ */
VectorValues GaussianBayesNet::sample() const {
return sample(&kRandomNumberGenerator);
}
VectorValues GaussianBayesNet::sample(const VectorValues& given) const {
return sample(given, &kRandomNumberGenerator);
}
/* ************************************************************************ */
VectorValues GaussianBayesNet::optimizeGradientSearch() const
{

View File

@ -131,7 +131,7 @@ namespace gtsam {
* std::mt19937_64 rng(42);
* auto sample = gbn.sample(&rng);
*/
VectorValues sample(std::mt19937_64* rng) const;
VectorValues sample(std::mt19937_64* rng = nullptr) const;
/**
* Sample from an incomplete BayesNet, given missing variables
@ -140,13 +140,7 @@ namespace gtsam {
* VectorValues given = ...;
* auto sample = gbn.sample(given, &rng);
*/
VectorValues sample(const VectorValues& given, std::mt19937_64* rng) const;
/// Sample using ancestral sampling, use default rng
VectorValues sample() const;
/// Sample from an incomplete BayesNet, use default rng
VectorValues sample(const VectorValues& given) const;
VectorValues sample(const VectorValues& given, std::mt19937_64* rng = nullptr) const;
/**
* Return ordering corresponding to a topological sort.

View File

@ -15,11 +15,12 @@
* @author Christian Potthast, Frank Dellaert
*/
#include <gtsam/base/utilities.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/linear/GaussianConditional.h>
#include <gtsam/linear/Sampler.h>
#include <gtsam/linear/VectorValues.h>
#include <gtsam/linear/linearExceptions.h>
#include <gtsam/hybrid/HybridValues.h>
#ifdef __GNUC__
#pragma GCC diagnostic push
@ -34,9 +35,6 @@
#include <string>
#include <cmath>
// In wrappers we can access std::mt19937_64 via gtsam.MT19937
static std::mt19937_64 kRandomNumberGenerator(42);
using namespace std;
namespace gtsam {
@ -347,6 +345,10 @@ namespace gtsam {
VectorValues solution = solve(parentsValues);
Key key = firstFrontalKey();
// Check if rng is nullptr, then assign default
rng = (rng == nullptr) ? &kRandomNumberGenerator : rng;
// The vector of sigma values for sampling.
// If no model, initialize sigmas to 1, else to model sigmas
const Vector& sigmas = (!model_) ? Vector::Ones(rows()) : model_->sigmas();
@ -359,16 +361,7 @@ namespace gtsam {
throw std::invalid_argument(
"sample() can only be invoked on no-parent prior");
VectorValues values;
return sample(values);
}
/* ************************************************************************ */
VectorValues GaussianConditional::sample() const {
return sample(&kRandomNumberGenerator);
}
VectorValues GaussianConditional::sample(const VectorValues& given) const {
return sample(given, &kRandomNumberGenerator);
return sample(values, rng);
}
/* ************************************************************************ */

View File

@ -217,7 +217,7 @@ namespace gtsam {
* std::mt19937_64 rng(42);
* auto sample = gc.sample(&rng);
*/
VectorValues sample(std::mt19937_64* rng) const;
VectorValues sample(std::mt19937_64* rng = nullptr) const;
/**
* Sample from conditional, given missing variables
@ -227,13 +227,7 @@ namespace gtsam {
* auto sample = gc.sample(given, &rng);
*/
VectorValues sample(const VectorValues& parentsValues,
std::mt19937_64* rng) const;
/// Sample, use default rng
VectorValues sample() const;
/// Sample with given values, use default rng
VectorValues sample(const VectorValues& parentsValues) const;
std::mt19937_64* rng = nullptr) const;
/// @}
/// @name Linear algebra.

View File

@ -560,10 +560,9 @@ virtual class GaussianConditional : gtsam::JacobianFactor {
const gtsam::VectorValues& frontalValues) const;
gtsam::JacobianFactor* likelihood(gtsam::Vector frontal) const;
gtsam::VectorValues sample(std::mt19937_64@ rng) const;
gtsam::VectorValues sample(const gtsam::VectorValues& parents, std::mt19937_64@ rng) const;
gtsam::VectorValues sample() const;
gtsam::VectorValues sample(const gtsam::VectorValues& parents) const;
gtsam::VectorValues sample(std::mt19937_64 @rng = nullptr) const;
gtsam::VectorValues sample(const gtsam::VectorValues& parents,
std::mt19937_64 @rng = nullptr) const;
// Advanced Interface
gtsam::VectorValues solveOtherRHS(const gtsam::VectorValues& parents,
@ -630,8 +629,9 @@ virtual class GaussianBayesNet {
gtsam::VectorValues optimize(const gtsam::VectorValues& given) const;
gtsam::VectorValues optimizeGradientSearch() const;
gtsam::VectorValues sample(const gtsam::VectorValues& given) const;
gtsam::VectorValues sample() const;
gtsam::VectorValues sample(const gtsam::VectorValues& given,
std::mt19937_64 @rng = nullptr) const;
gtsam::VectorValues sample(std::mt19937_64 @rng = nullptr) const;
gtsam::VectorValues backSubstitute(const gtsam::VectorValues& gx) const;
gtsam::VectorValues backSubstituteTranspose(const gtsam::VectorValues& gx) const;

View File

@ -441,7 +441,7 @@ TEST(GaussianConditional, likelihood) {
/* ************************************************************************* */
// Test sampling
TEST(GaussianConditional, sample) {
TEST(GaussianConditional, Sample) {
Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished();
const Vector2 b(20, 40), x1(3, 4);
const double sigma = 0.01;
@ -465,8 +465,10 @@ TEST(GaussianConditional, sample) {
auto actual3 = conditional.sample(given, &rng);
EXPECT_LONGS_EQUAL(1, actual2.size());
// regressions
#if __APPLE__ || _WIN32
EXPECT(assert_equal(Vector2(31.0111856, 64.9850775), actual2[X(0)], 1e-5));
#if __APPLE__
EXPECT(assert_equal(Vector2(31.0111856, 64.9850775), actual2[X(0)], 1e-5));
#elif _WIN32
EXPECT(assert_equal(Vector2(30.995317, 64.9943165), actual2[X(0)], 1e-5));
#elif __linux__
EXPECT(assert_equal(Vector2(30.9809331, 64.9927588), actual2[X(0)], 1e-5));
#endif

View File

@ -42,7 +42,7 @@
namespace gtsam {
// In Wrappers we have no access to this so have a default ready
static std::mt19937 kRandomNumberGenerator(42);
static std::mt19937 kPRNG(42);
using Sparse = Eigen::SparseMatrix<double>;
@ -869,7 +869,7 @@ Values ShonanAveraging<d>::initializeRandomly(std::mt19937 &rng) const {
/* ************************************************************************* */
template <size_t d>
Values ShonanAveraging<d>::initializeRandomly() const {
return initializeRandomly(kRandomNumberGenerator);
return initializeRandomly(kPRNG);
}
/* ************************************************************************* */
@ -883,7 +883,7 @@ Values ShonanAveraging<d>::initializeRandomlyAt(size_t p,
/* ************************************************************************* */
template <size_t d>
Values ShonanAveraging<d>::initializeRandomlyAt(size_t p) const {
return initializeRandomlyAt(p, kRandomNumberGenerator);
return initializeRandomlyAt(p, kPRNG);
}
/* ************************************************************************* */

View File

@ -39,7 +39,7 @@ using namespace gtsam;
using namespace std;
// In Wrappers we have no access to this so have a default ready.
static std::mt19937 kRandomNumberGenerator(42);
static std::mt19937 kPRNG(42);
// Some relative translations may be zero. We treat nodes that have a zero
// relativeTranslation as a single node.
@ -185,7 +185,7 @@ Values TranslationRecovery::initializeRandomly(
const std::vector<BinaryMeasurement<Point3>> &betweenTranslations,
const Values &initialValues) const {
return initializeRandomly(relativeTranslations, betweenTranslations,
&kRandomNumberGenerator, initialValues);
&kPRNG, initialValues);
}
Values TranslationRecovery::run(

View File

@ -45,7 +45,7 @@ ShonanAveraging3 fromExampleName(
static const ShonanAveraging3 kShonan = fromExampleName("toyExample.g2o");
static std::mt19937 kRandomNumberGenerator(42);
static std::mt19937 kPRNG(42);
/* ************************************************************************* */
TEST(ShonanAveraging3, checkConstructor) {
@ -78,7 +78,7 @@ TEST(ShonanAveraging3, buildGraphAt) {
/* ************************************************************************* */
TEST(ShonanAveraging3, checkOptimality) {
const Values randomRotations = kShonan.initializeRandomly(kRandomNumberGenerator);
const Values randomRotations = kShonan.initializeRandomly(kPRNG);
Values random = ShonanAveraging3::LiftTo<Rot3>(4, randomRotations); // lift to 4!
auto Lambda = kShonan.computeLambda(random);
EXPECT_LONGS_EQUAL(15, Lambda.rows());
@ -106,7 +106,7 @@ TEST(ShonanAveraging3, checkSubgraph) {
// Create initial random estimation
Values initial;
initial = subgraphShonan.initializeRandomly(kRandomNumberGenerator);
initial = subgraphShonan.initializeRandomly(kPRNG);
// Run Shonan with SUBGRAPH solver
auto result = subgraphShonan.run(initial, 3, 3);
@ -115,7 +115,7 @@ TEST(ShonanAveraging3, checkSubgraph) {
/* ************************************************************************* */
TEST(ShonanAveraging3, tryOptimizingAt3) {
const Values randomRotations = kShonan.initializeRandomly(kRandomNumberGenerator);
const Values randomRotations = kShonan.initializeRandomly(kPRNG);
Values initial = ShonanAveraging3::LiftTo<Rot3>(3, randomRotations); // convert to SOn
EXPECT(!kShonan.checkOptimality(initial));
const Values result = kShonan.tryOptimizingAt(3, initial);
@ -130,7 +130,7 @@ TEST(ShonanAveraging3, tryOptimizingAt3) {
/* ************************************************************************* */
TEST(ShonanAveraging3, tryOptimizingAt4) {
const Values randomRotations = kShonan.initializeRandomly(kRandomNumberGenerator);
const Values randomRotations = kShonan.initializeRandomly(kPRNG);
Values random = ShonanAveraging3::LiftTo<Rot3>(4, randomRotations); // lift to 4!
const Values result = kShonan.tryOptimizingAt(4, random);
EXPECT(kShonan.checkOptimality(result));
@ -228,7 +228,7 @@ TEST(ShonanAveraging3, CheckWithEigen) {
/* ************************************************************************* */
TEST(ShonanAveraging3, initializeWithDescent) {
const Values randomRotations = kShonan.initializeRandomly(kRandomNumberGenerator);
const Values randomRotations = kShonan.initializeRandomly(kPRNG);
Values random = ShonanAveraging3::LiftTo<Rot3>(3, randomRotations);
const Values Qstar3 = kShonan.tryOptimizingAt(3, random);
Vector minEigenVector;
@ -240,7 +240,7 @@ TEST(ShonanAveraging3, initializeWithDescent) {
/* ************************************************************************* */
TEST(ShonanAveraging3, run) {
auto initial = kShonan.initializeRandomly(kRandomNumberGenerator);
auto initial = kShonan.initializeRandomly(kPRNG);
auto result = kShonan.run(initial, 5);
EXPECT_DOUBLES_EQUAL(0, kShonan.cost(result.first), 1e-3);
EXPECT_DOUBLES_EQUAL(-5.427688831332745e-07, result.second,
@ -295,7 +295,7 @@ TEST(ShonanAveraging3, runKlaus) {
EXPECT(assert_equal(R02, wR0.between(wR2), 0.1));
// Run Shonan (with prior on first rotation)
auto initial = shonan.initializeRandomly(kRandomNumberGenerator);
auto initial = shonan.initializeRandomly(kPRNG);
auto result = shonan.run(initial, 5);
EXPECT_DOUBLES_EQUAL(0, shonan.cost(result.first), 1e-2);
EXPECT_DOUBLES_EQUAL(-9.2259161494467889e-05, result.second,
@ -323,7 +323,7 @@ TEST(ShonanAveraging3, runKlausKarcher) {
static const ShonanAveraging3 shonan = fromExampleName("Klaus3.g2o");
// Run Shonan (with Karcher mean prior)
auto initial = shonan.initializeRandomly(kRandomNumberGenerator);
auto initial = shonan.initializeRandomly(kPRNG);
auto result = shonan.run(initial, 5);
EXPECT_DOUBLES_EQUAL(0, shonan.cost(result.first), 1e-2);
EXPECT_DOUBLES_EQUAL(-1.361402670507772e-05, result.second,
@ -353,7 +353,7 @@ TEST(ShonanAveraging2, noisyToyGraph) {
// Check graph building
NonlinearFactorGraph graph = shonan.buildGraphAt(2);
EXPECT_LONGS_EQUAL(6, graph.size());
auto initial = shonan.initializeRandomly(kRandomNumberGenerator);
auto initial = shonan.initializeRandomly(kPRNG);
auto result = shonan.run(initial, 2);
EXPECT_DOUBLES_EQUAL(0.0008211, shonan.cost(result.first), 1e-6);
EXPECT_DOUBLES_EQUAL(0, result.second, 1e-10); // certificate!
@ -391,7 +391,7 @@ TEST(ShonanAveraging2, noisyToyGraphWithHuber) {
}
// test result
auto initial = shonan.initializeRandomly(kRandomNumberGenerator);
auto initial = shonan.initializeRandomly(kPRNG);
auto result = shonan.run(initial, 2,2);
EXPECT_DOUBLES_EQUAL(0.0008211, shonan.cost(result.first), 1e-6);
EXPECT_DOUBLES_EQUAL(0, result.second, 1e-10); // certificate!