asProductFactor from base class!

release/4.3a0
Frank Dellaert 2024-10-08 16:34:50 +09:00
parent bcd94e32a3
commit 0f48efb0a9
3 changed files with 45 additions and 78 deletions

View File

@ -150,36 +150,6 @@ const HybridGaussianConditional::Conditionals& HybridGaussianConditional::condit
return conditionals_;
}
/* *******************************************************************************/
HybridGaussianProductFactor HybridGaussianConditional::asProductFactor() const {
auto wrap = [this](const std::shared_ptr<GaussianConditional>& gc)
-> std::pair<GaussianFactorGraph, double> {
// First check if conditional has not been pruned
if (gc) {
const double Cgm_Kgcm = gc->negLogConstant() - this->negLogConstant_;
// If there is a difference in the covariances, we need to account for
// that since the error is dependent on the mode.
if (Cgm_Kgcm > 0.0) {
// We add a constant factor which will be used when computing
// the probability of the discrete variables.
Vector c(1);
c << std::sqrt(2.0 * Cgm_Kgcm);
auto constantFactor = std::make_shared<JacobianFactor>(c);
return {GaussianFactorGraph{gc, constantFactor}, Cgm_Kgcm};
} else {
// The scalar can be zero.
// TODO(Frank): after hiding is gone, this should be only case here.
return {GaussianFactorGraph{gc}, Cgm_Kgcm};
}
} else {
// If the conditional is pruned, return an empty GaussianFactorGraph with zero scalar sum
// TODO(Frank): Could we just return an *empty* GaussianFactorGraph?
return {GaussianFactorGraph{nullptr}, 0.0};
}
};
return {{conditionals_, wrap}};
}
/* *******************************************************************************/
size_t HybridGaussianConditional::nrComponents() const {
size_t total = 0;

View File

@ -222,9 +222,6 @@ class GTSAM_EXPORT HybridGaussianConditional
HybridGaussianConditional::shared_ptr prune(
const DecisionTreeFactor &discreteProbs) const;
/// Convert to a DecisionTree of Gaussian factor graphs.
HybridGaussianProductFactor asProductFactor() const override;
/// @}
private:

View File

@ -46,25 +46,25 @@ using symbol_shorthand::Z;
DiscreteKey m1(M(1), 2);
void addMeasurement(HybridBayesNet &hbn, Key z_key, Key x_key, double sigma) {
void addMeasurement(HybridBayesNet& hbn, Key z_key, Key x_key, double sigma) {
auto measurement_model = noiseModel::Isotropic::Sigma(1, sigma);
hbn.emplace_shared<GaussianConditional>(z_key, Vector1(0.0), I_1x1, x_key,
-I_1x1, measurement_model);
hbn.emplace_shared<GaussianConditional>(
z_key, Vector1(0.0), I_1x1, x_key, -I_1x1, measurement_model);
}
/// Create hybrid motion model p(x1 | x0, m1)
static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(
double mu0, double mu1, double sigma0, double sigma1) {
static HybridGaussianConditional::shared_ptr CreateHybridMotionModel(double mu0,
double mu1,
double sigma0,
double sigma1) {
std::vector<std::pair<Vector, double>> motionModels{{Vector1(mu0), sigma0},
{Vector1(mu1), sigma1}};
return std::make_shared<HybridGaussianConditional>(m1, X(1), I_1x1, X(0),
motionModels);
return std::make_shared<HybridGaussianConditional>(m1, X(1), I_1x1, X(0), motionModels);
}
/// Create two state Bayes network with 1 or two measurement models
HybridBayesNet CreateBayesNet(
const HybridGaussianConditional::shared_ptr &hybridMotionModel,
bool add_second_measurement = false) {
HybridBayesNet CreateBayesNet(const HybridGaussianConditional::shared_ptr& hybridMotionModel,
bool add_second_measurement = false) {
HybridBayesNet hbn;
// Add measurement model p(z0 | x0)
@ -86,15 +86,16 @@ HybridBayesNet CreateBayesNet(
/// Approximate the discrete marginal P(m1) using importance sampling
std::pair<double, double> approximateDiscreteMarginal(
const HybridBayesNet &hbn,
const HybridGaussianConditional::shared_ptr &hybridMotionModel,
const VectorValues &given, size_t N = 100000) {
const HybridBayesNet& hbn,
const HybridGaussianConditional::shared_ptr& hybridMotionModel,
const VectorValues& given,
size_t N = 100000) {
/// Create importance sampling network q(x0,x1,m) = p(x1|x0,m1) q(x0) P(m1),
/// using q(x0) = N(z0, sigmaQ) to sample x0.
HybridBayesNet q;
q.push_back(hybridMotionModel); // Add hybrid motion model
q.emplace_shared<GaussianConditional>(GaussianConditional::FromMeanAndStddev(
X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0
X(0), given.at(Z(0)), /* sigmaQ = */ 3.0)); // Add proposal q(x0) for x0
q.emplace_shared<DiscreteConditional>(m1, "50/50"); // Discrete prior.
// Do importance sampling
@ -192,24 +193,25 @@ TEST(HybridGaussianFactor, TwoStateModel2) {
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
HybridBayesNet::shared_ptr eliminated = gfg.eliminateSequential();
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
const auto& expectedDiscretePosterior = hbn.discretePosterior(vv);
// Equality of posteriors asserts that the factor graph is correct (same ratios for all modes)
EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv)));
// This one asserts that HBN resulting from elimination is correct.
EXPECT(assert_equal(expectedDiscretePosterior, eliminated->discretePosterior(vv)));
}
// Importance sampling run with 100k samples gives 50.095/49.905
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
// Since no measurement on x1, we a 50/50 probability
auto p_m = bn->at(2)->asDiscrete();
auto p_m = eliminated->at(2)->asDiscrete();
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 0}}), 1e-9);
EXPECT_DOUBLES_EQUAL(0.5, p_m->operator()({{M(1), 1}}), 1e-9);
}
@ -221,24 +223,26 @@ TEST(HybridGaussianFactor, TwoStateModel2) {
HybridBayesNet hbn = CreateBayesNet(hybridMotionModel, true);
HybridGaussianFactorGraph gfg = hbn.toFactorGraph(given);
HybridBayesNet::shared_ptr eliminated = gfg.eliminateSequential();
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
const auto& expectedDiscretePosterior = hbn.discretePosterior(vv);
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
// Equality of posteriors asserts that the factor graph is correct (same ratios for all modes)
EXPECT(assert_equal(expectedDiscretePosterior, gfg.discretePosterior(vv)));
// This one asserts that HBN resulting from elimination is correct.
EXPECT(assert_equal(expectedDiscretePosterior, eliminated->discretePosterior(vv)));
}
// Values taken from an importance sampling run with 100k samples:
// approximateDiscreteMarginal(hbn, hybridMotionModel, given);
DiscreteConditional expected(m1, "48.3158/51.6842");
EXPECT(assert_equal(expected, *(bn->at(2)->asDiscrete()), 0.002));
EXPECT(assert_equal(expected, *(eliminated->at(2)->asDiscrete()), 0.002));
}
{
@ -286,13 +290,11 @@ TEST(HybridGaussianFactor, TwoStateModel3) {
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();
@ -316,13 +318,11 @@ TEST(HybridGaussianFactor, TwoStateModel3) {
// Check that ratio of Bayes net and factor graph for different modes is
// equal for several values of {x0,x1}.
for (VectorValues vv :
{VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
for (VectorValues vv : {VectorValues{{X(0), Vector1(0.0)}, {X(1), Vector1(1.0)}},
VectorValues{{X(0), Vector1(0.5)}, {X(1), Vector1(3.0)}}}) {
vv.insert(given); // add measurements for HBN
HybridValues hv0(vv, {{M(1), 0}}), hv1(vv, {{M(1), 1}});
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0),
gfg.error(hv1) / hbn.error(hv1), 1e-9);
EXPECT_DOUBLES_EQUAL(gfg.error(hv0) / hbn.error(hv0), gfg.error(hv1) / hbn.error(hv1), 1e-9);
}
HybridBayesNet::shared_ptr bn = gfg.eliminateSequential();