Merge remote-tracking branch 'upstream/develop' into develop

release/4.3a0
senselessDev 2022-01-26 20:35:31 +01:00
commit bd4effb99d
12 changed files with 148 additions and 31 deletions

View File

@ -1188,7 +1188,7 @@ USE_MATHJAX = YES
# MathJax, but it is strongly recommended to install a local copy of MathJax
# before deployment.
MATHJAX_RELPATH = https://cdn.mathjax.org/mathjax/latest
# MATHJAX_RELPATH = https://cdn.mathjax.org/mathjax/latest
# The MATHJAX_EXTENSIONS tag can be used to specify one or MathJax extension
# names that should be enabled during MathJax rendering.

View File

@ -1,5 +1,5 @@
#LyX 2.2 created this file. For more info see http://www.lyx.org/
\lyxformat 508
#LyX 2.3 created this file. For more info see http://www.lyx.org/
\lyxformat 544
\begin_document
\begin_header
\save_transient_properties true
@ -62,6 +62,8 @@
\font_osf false
\font_sf_scale 100 100
\font_tt_scale 100 100
\use_microtype false
\use_dash_ligatures true
\graphics default
\default_output_format default
\output_sync 0
@ -91,6 +93,7 @@
\suppress_date false
\justification true
\use_refstyle 0
\use_minted 0
\index Index
\shortcut idx
\color #008000
@ -105,7 +108,10 @@
\tocdepth 3
\paragraph_separation indent
\paragraph_indentation default
\quotes_language english
\is_math_indent 0
\math_numbering_side default
\quotes_style english
\dynamic_quotes 0
\papercolumns 1
\papersides 1
\paperpagestyle default
@ -168,6 +174,7 @@ Factor graphs
\begin_inset CommandInset citation
LatexCommand citep
key "Koller09book"
literal "true"
\end_inset
@ -270,6 +277,7 @@ Let us start with a one-page primer on factor graphs, which in no way replaces
\begin_inset CommandInset citation
LatexCommand citet
key "Kschischang01it"
literal "true"
\end_inset
@ -277,6 +285,7 @@ key "Kschischang01it"
\begin_inset CommandInset citation
LatexCommand citet
key "Loeliger04spm"
literal "true"
\end_inset
@ -1321,6 +1330,7 @@ r in a pre-existing map, or indeed the presence of absence of ceiling lights
\begin_inset CommandInset citation
LatexCommand citet
key "Dellaert99b"
literal "true"
\end_inset
@ -1542,6 +1552,7 @@ which is done on line 12.
\begin_inset CommandInset citation
LatexCommand citealt
key "Dellaert06ijrr"
literal "true"
\end_inset
@ -1936,8 +1947,8 @@ reference "fig:CompareMarginals"
\end_inset
, where I show the marginals on position as covariance ellipses that contain
68.26% of all probability mass.
, where I show the marginals on position as 5-sigma covariance ellipses
that contain 99.9996% of all probability mass.
For the odometry marginals, it is immediately apparent from the figure
that (1) the uncertainty on pose keeps growing, and (2) the uncertainty
on angular odometry translates into increasing uncertainty on y.
@ -1992,6 +2003,7 @@ PoseSLAM
\begin_inset CommandInset citation
LatexCommand citep
key "DurrantWhyte06ram"
literal "true"
\end_inset
@ -2190,9 +2202,9 @@ reference "fig:example"
\end_inset
, along with covariance ellipses shown in green.
These covariance ellipses in 2D indicate the marginal over position, over
all possible orientations, and show the area which contain 68.26% of the
probability mass (in 1D this would correspond to one standard deviation).
These 5-sigma covariance ellipses in 2D indicate the marginal over position,
over all possible orientations, and show the area which contain 99.9996%
of the probability mass.
The graph shows in a clear manner that the uncertainty on pose
\begin_inset Formula $x_{5}$
\end_inset
@ -3076,6 +3088,7 @@ reference "fig:Victoria-1"
\begin_inset CommandInset citation
LatexCommand citep
key "Kaess09ras"
literal "true"
\end_inset
@ -3088,6 +3101,7 @@ key "Kaess09ras"
\begin_inset CommandInset citation
LatexCommand citep
key "Kaess08tro"
literal "true"
\end_inset
@ -3355,6 +3369,7 @@ iSAM
\begin_inset CommandInset citation
LatexCommand citet
key "Kaess08tro,Kaess12ijrr"
literal "true"
\end_inset
@ -3606,6 +3621,7 @@ subgraph preconditioning
\begin_inset CommandInset citation
LatexCommand citet
key "Dellaert10iros,Jian11iccv"
literal "true"
\end_inset
@ -3638,6 +3654,7 @@ Visual Odometry
\begin_inset CommandInset citation
LatexCommand citet
key "Nister04cvpr2"
literal "true"
\end_inset
@ -3661,6 +3678,7 @@ Visual SLAM
\begin_inset CommandInset citation
LatexCommand citet
key "Davison03iccv"
literal "true"
\end_inset
@ -3711,6 +3729,7 @@ Filtering
\begin_inset CommandInset citation
LatexCommand citep
key "Smith87b"
literal "true"
\end_inset

Binary file not shown.

View File

@ -2668,7 +2668,7 @@ reference "eq:pushforward"
\begin{eqnarray*}
\varphi(a)e^{\yhat} & = & \varphi(ae^{\xhat})\\
a^{-1}e^{\yhat} & = & \left(ae^{\xhat}\right)^{-1}\\
e^{\yhat} & = & -ae^{\xhat}a^{-1}\\
e^{\yhat} & = & ae^{-\xhat}a^{-1}\\
\yhat & = & -\Ad a\xhat
\end{eqnarray*}
@ -3003,8 +3003,8 @@ between
\begin_inset Formula
\begin{align}
\varphi(g,h)e^{\yhat} & =\varphi(ge^{\xhat},h)\nonumber \\
g^{-1}he^{\yhat} & =\left(ge^{\xhat}\right)^{-1}h=-e^{\xhat}g^{-1}h\nonumber \\
e^{\yhat} & =-\left(h^{-1}g\right)e^{\xhat}\left(h^{-1}g\right)^{-1}=-\exp\Ad{\left(h^{-1}g\right)}\xhat\nonumber \\
g^{-1}he^{\yhat} & =\left(ge^{\xhat}\right)^{-1}h=e^{-\xhat}g^{-1}h\nonumber \\
e^{\yhat} & =\left(h^{-1}g\right)e^{-\xhat}\left(h^{-1}g\right)^{-1}=\exp\Ad{\left(h^{-1}g\right)}(-\xhat)\nonumber \\
\yhat & =-\Ad{\left(h^{-1}g\right)}\xhat=-\Ad{\varphi\left(h,g\right)}\xhat\label{eq:Dbetween1}
\end{align}
@ -6674,7 +6674,7 @@ One representation of a line is through 2 vectors
\begin_inset Formula $d$
\end_inset
points from the orgin to the closest point on the line.
points from the origin to the closest point on the line.
\end_layout
\begin_layout Standard

Binary file not shown.

View File

@ -144,6 +144,22 @@ namespace gtsam {
boost::dynamic_pointer_cast<DiscreteConditional>(lookup), max);
}
/* ************************************************************************ */
// sumProduct is just an alias for regular eliminateSequential.
DiscreteBayesNet DiscreteFactorGraph::sumProduct(
OptionalOrderingType orderingType) const {
gttic(DiscreteFactorGraph_sumProduct);
auto bayesNet = eliminateSequential(orderingType);
return *bayesNet;
}
DiscreteBayesNet DiscreteFactorGraph::sumProduct(
const Ordering& ordering) const {
gttic(DiscreteFactorGraph_sumProduct);
auto bayesNet = eliminateSequential(ordering);
return *bayesNet;
}
/* ************************************************************************ */
// The max-product solution below is a bit clunky: the elimination machinery
// does not allow for differently *typed* versions of elimination, so we
@ -153,16 +169,14 @@ namespace gtsam {
DiscreteLookupDAG DiscreteFactorGraph::maxProduct(
OptionalOrderingType orderingType) const {
gttic(DiscreteFactorGraph_maxProduct);
auto bayesNet =
BaseEliminateable::eliminateSequential(orderingType, EliminateForMPE);
auto bayesNet = eliminateSequential(orderingType, EliminateForMPE);
return DiscreteLookupDAG::FromBayesNet(*bayesNet);
}
DiscreteLookupDAG DiscreteFactorGraph::maxProduct(
const Ordering& ordering) const {
gttic(DiscreteFactorGraph_maxProduct);
auto bayesNet =
BaseEliminateable::eliminateSequential(ordering, EliminateForMPE);
auto bayesNet = eliminateSequential(ordering, EliminateForMPE);
return DiscreteLookupDAG::FromBayesNet(*bayesNet);
}

View File

@ -132,11 +132,28 @@ class GTSAM_EXPORT DiscreteFactorGraph
const std::string& s = "DiscreteFactorGraph",
const KeyFormatter& formatter = DefaultKeyFormatter) const override;
/**
* @brief Implement the sum-product algorithm
*
* @param orderingType : one of COLAMD, METIS, NATURAL, CUSTOM
* @return DiscreteBayesNet encoding posterior P(X|Z)
*/
DiscreteBayesNet sumProduct(
OptionalOrderingType orderingType = boost::none) const;
/**
* @brief Implement the sum-product algorithm
*
* @param ordering
* @return DiscreteBayesNet encoding posterior P(X|Z)
*/
DiscreteBayesNet sumProduct(const Ordering& ordering) const;
/**
* @brief Implement the max-product algorithm
*
* @param orderingType : one of COLAMD, METIS, NATURAL, CUSTOM
* @return DiscreteLookupDAG::shared_ptr DAG with lookup tables
* @return DiscreteLookupDAG DAG with lookup tables
*/
DiscreteLookupDAG maxProduct(
OptionalOrderingType orderingType = boost::none) const;
@ -145,7 +162,7 @@ class GTSAM_EXPORT DiscreteFactorGraph
* @brief Implement the max-product algorithm
*
* @param ordering
* @return DiscreteLookupDAG::shared_ptr `DAG with lookup tables
* @return DiscreteLookupDAG `DAG with lookup tables
*/
DiscreteLookupDAG maxProduct(const Ordering& ordering) const;

View File

@ -277,7 +277,12 @@ class DiscreteFactorGraph {
double operator()(const gtsam::DiscreteValues& values) const;
gtsam::DiscreteValues optimize() const;
gtsam::DiscreteBayesNet sumProduct();
gtsam::DiscreteBayesNet sumProduct(gtsam::Ordering::OrderingType type);
gtsam::DiscreteBayesNet sumProduct(const gtsam::Ordering& ordering);
gtsam::DiscreteLookupDAG maxProduct();
gtsam::DiscreteLookupDAG maxProduct(gtsam::Ordering::OrderingType type);
gtsam::DiscreteLookupDAG maxProduct(const gtsam::Ordering& ordering);
gtsam::DiscreteBayesNet eliminateSequential();

View File

@ -154,6 +154,21 @@ TEST(DiscreteFactorGraph, test) {
auto actualMPE = graph.optimize();
EXPECT(assert_equal(mpe, actualMPE));
EXPECT_DOUBLES_EQUAL(9, graph(mpe), 1e-5); // regression
// Test sumProduct alias with all orderings:
auto mpeProbability = expectedBayesNet(mpe);
EXPECT_DOUBLES_EQUAL(0.28125, mpeProbability, 1e-5); // regression
// Using custom ordering
DiscreteBayesNet bayesNet = graph.sumProduct(ordering);
EXPECT_DOUBLES_EQUAL(mpeProbability, bayesNet(mpe), 1e-5);
for (Ordering::OrderingType orderingType :
{Ordering::COLAMD, Ordering::METIS, Ordering::NATURAL,
Ordering::CUSTOM}) {
auto bayesNet = graph.sumProduct(orderingType);
EXPECT_DOUBLES_EQUAL(mpeProbability, bayesNet(mpe), 1e-5);
}
}
/* ************************************************************************* */

View File

@ -111,6 +111,11 @@ size_t mrsymbolIndex(size_t key);
#include <gtsam/inference/Ordering.h>
class Ordering {
/// Type of ordering to use
enum OrderingType {
COLAMD, METIS, NATURAL, CUSTOM
};
// Standard Constructors and Named Constructors
Ordering();
Ordering(const gtsam::Ordering& other);

View File

@ -13,9 +13,11 @@ Author: Frank Dellaert
import unittest
from gtsam import DiscreteFactorGraph, DiscreteKeys, DiscreteValues
from gtsam import DiscreteFactorGraph, DiscreteKeys, DiscreteValues, Ordering
from gtsam.utils.test_case import GtsamTestCase
OrderingType = Ordering.OrderingType
class TestDiscreteFactorGraph(GtsamTestCase):
"""Tests for Discrete Factor Graphs."""
@ -108,14 +110,50 @@ class TestDiscreteFactorGraph(GtsamTestCase):
graph.add([C, A], "0.2 0.8 0.3 0.7")
graph.add([C, B], "0.1 0.9 0.4 0.6")
actualMPE = graph.optimize()
# We know MPE
mpe = DiscreteValues()
mpe[0] = 0
mpe[1] = 1
mpe[2] = 1
expectedMPE = DiscreteValues()
expectedMPE[0] = 0
expectedMPE[1] = 1
expectedMPE[2] = 1
# Use maxProduct
dag = graph.maxProduct(OrderingType.COLAMD)
actualMPE = dag.argmax()
self.assertEqual(list(actualMPE.items()),
list(expectedMPE.items()))
list(mpe.items()))
# All in one
actualMPE2 = graph.optimize()
self.assertEqual(list(actualMPE2.items()),
list(mpe.items()))
def test_sumProduct(self):
"""Test sumProduct."""
# Declare a bunch of keys
C, A, B = (0, 2), (1, 2), (2, 2)
# Create Factor graph
graph = DiscreteFactorGraph()
graph.add([C, A], "0.2 0.8 0.3 0.7")
graph.add([C, B], "0.1 0.9 0.4 0.6")
# We know MPE
mpe = DiscreteValues()
mpe[0] = 0
mpe[1] = 1
mpe[2] = 1
# Use default sumProduct
bayesNet = graph.sumProduct()
mpeProbability = bayesNet(mpe)
self.assertAlmostEqual(mpeProbability, 0.36) # regression
# Use sumProduct
for ordering_type in [OrderingType.COLAMD, OrderingType.METIS, OrderingType.NATURAL,
OrderingType.CUSTOM]:
bayesNet = graph.sumProduct(ordering_type)
self.assertEqual(bayesNet(mpe), mpeProbability)
if __name__ == "__main__":

View File

@ -12,6 +12,13 @@ from mpl_toolkits.mplot3d import Axes3D # pylint: disable=unused-import
import gtsam
from gtsam import Marginals, Point2, Point3, Pose2, Pose3, Values
# For future reference: following
# https://www.xarg.org/2018/04/how-to-plot-a-covariance-error-ellipse/
# we have, in 2D:
# def kk(p): return math.sqrt(-2*math.log(1-p)) # k to get p probability mass
# def pp(k): return 1-math.exp(-float(k**2)/2.0) # p as a function of k
# Some values:
# k = 5 => p = 99.9996 %
def set_axes_equal(fignum: int) -> None:
"""
@ -125,10 +132,7 @@ def plot_point2_on_axes(axes,
if P is not None:
w, v = np.linalg.eig(P)
# "Sigma" value for drawing the uncertainty ellipse. 5 sigma corresponds
# to a 99.9999% confidence, i.e. assuming the estimation has been
# computed properly, there is a 99.999% chance that the true position
# of the point will lie within the uncertainty ellipse.
# 5 sigma corresponds to 99.9996%, see note above
k = 5.0
angle = np.arctan2(v[1, 0], v[0, 0])
@ -205,7 +209,7 @@ def plot_pose2_on_axes(axes,
w, v = np.linalg.eig(gPp)
# k = 2.296
# 5 sigma corresponds to 99.9996%, see note above
k = 5.0
angle = np.arctan2(v[1, 0], v[0, 0])