Merge branch 'develop' into hybrid/error

release/4.3a0
Varun Agrawal 2022-11-03 11:24:01 -04:00
commit 11e4c1ed57
414 changed files with 15314 additions and 9984 deletions

View File

@ -101,8 +101,6 @@ if(GTSAM_BUILD_PYTHON OR GTSAM_INSTALL_MATLAB_TOOLBOX)
# Copy matlab.h to the correct folder.
configure_file(${PROJECT_SOURCE_DIR}/wrap/matlab.h
${PROJECT_BINARY_DIR}/wrap/matlab.h COPYONLY)
# Add the include directories so that matlab.h can be found
include_directories("${PROJECT_BINARY_DIR}" "${GTSAM_EIGEN_INCLUDE_FOR_BUILD}")
add_subdirectory(wrap)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/wrap/cmake")

View File

@ -50,7 +50,7 @@ will run up to 10x faster in Release mode! See the end of this document for
additional debugging tips.
3. GTSAM has Doxygen documentation. To generate, run 'make doc' from your
build directory.
build directory after setting the `GTSAM_BUILD_DOCS` and `GTSAM_BUILD_[HTML|LATEX]` cmake flags.
4. The instructions below install the library to the default system install path and
build all components. From a terminal, starting in the root library folder,

View File

@ -21,6 +21,10 @@ else()
find_dependency(Boost @BOOST_FIND_MINIMUM_VERSION@ COMPONENTS @BOOST_FIND_MINIMUM_COMPONENTS@)
endif()
if(@GTSAM_USE_SYSTEM_EIGEN@)
find_dependency(Eigen3 REQUIRED)
endif()
# Load exports
include(${OUR_CMAKE_DIR}/@PACKAGE_NAME@-exports.cmake)

View File

@ -1,81 +0,0 @@
# - Try to find Eigen3 lib
#
# This module supports requiring a minimum version, e.g. you can do
# find_package(Eigen3 3.1.2)
# to require version 3.1.2 or newer of Eigen3.
#
# Once done this will define
#
# EIGEN3_FOUND - system has eigen lib with correct version
# EIGEN3_INCLUDE_DIR - the eigen include directory
# EIGEN3_VERSION - eigen version
# Copyright (c) 2006, 2007 Montel Laurent, <montel@kde.org>
# Copyright (c) 2008, 2009 Gael Guennebaud, <g.gael@free.fr>
# Copyright (c) 2009 Benoit Jacob <jacob.benoit.1@gmail.com>
# Redistribution and use is allowed according to the terms of the 2-clause BSD license.
if(NOT Eigen3_FIND_VERSION)
if(NOT Eigen3_FIND_VERSION_MAJOR)
set(Eigen3_FIND_VERSION_MAJOR 2)
endif(NOT Eigen3_FIND_VERSION_MAJOR)
if(NOT Eigen3_FIND_VERSION_MINOR)
set(Eigen3_FIND_VERSION_MINOR 91)
endif(NOT Eigen3_FIND_VERSION_MINOR)
if(NOT Eigen3_FIND_VERSION_PATCH)
set(Eigen3_FIND_VERSION_PATCH 0)
endif(NOT Eigen3_FIND_VERSION_PATCH)
set(Eigen3_FIND_VERSION "${Eigen3_FIND_VERSION_MAJOR}.${Eigen3_FIND_VERSION_MINOR}.${Eigen3_FIND_VERSION_PATCH}")
endif(NOT Eigen3_FIND_VERSION)
macro(_eigen3_check_version)
file(READ "${EIGEN3_INCLUDE_DIR}/Eigen/src/Core/util/Macros.h" _eigen3_version_header)
string(REGEX MATCH "define[ \t]+EIGEN_WORLD_VERSION[ \t]+([0-9]+)" _eigen3_world_version_match "${_eigen3_version_header}")
set(EIGEN3_WORLD_VERSION "${CMAKE_MATCH_1}")
string(REGEX MATCH "define[ \t]+EIGEN_MAJOR_VERSION[ \t]+([0-9]+)" _eigen3_major_version_match "${_eigen3_version_header}")
set(EIGEN3_MAJOR_VERSION "${CMAKE_MATCH_1}")
string(REGEX MATCH "define[ \t]+EIGEN_MINOR_VERSION[ \t]+([0-9]+)" _eigen3_minor_version_match "${_eigen3_version_header}")
set(EIGEN3_MINOR_VERSION "${CMAKE_MATCH_1}")
set(EIGEN3_VERSION ${EIGEN3_WORLD_VERSION}.${EIGEN3_MAJOR_VERSION}.${EIGEN3_MINOR_VERSION})
if(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION})
set(EIGEN3_VERSION_OK FALSE)
else(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION})
set(EIGEN3_VERSION_OK TRUE)
endif(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION})
if(NOT EIGEN3_VERSION_OK)
message(STATUS "Eigen3 version ${EIGEN3_VERSION} found in ${EIGEN3_INCLUDE_DIR}, "
"but at least version ${Eigen3_FIND_VERSION} is required")
endif(NOT EIGEN3_VERSION_OK)
endmacro(_eigen3_check_version)
if (EIGEN3_INCLUDE_DIR)
# in cache already
_eigen3_check_version()
set(EIGEN3_FOUND ${EIGEN3_VERSION_OK})
else (EIGEN3_INCLUDE_DIR)
find_path(EIGEN3_INCLUDE_DIR NAMES signature_of_eigen3_matrix_library
PATHS
${CMAKE_INSTALL_PREFIX}/include
${KDE4_INCLUDE_DIR}
PATH_SUFFIXES eigen3 eigen
)
if(EIGEN3_INCLUDE_DIR)
_eigen3_check_version()
endif(EIGEN3_INCLUDE_DIR)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Eigen3 DEFAULT_MSG EIGEN3_INCLUDE_DIR EIGEN3_VERSION_OK)
mark_as_advanced(EIGEN3_INCLUDE_DIR)
endif(EIGEN3_INCLUDE_DIR)

View File

@ -1,10 +1,6 @@
###############################################################################
# Option for using system Eigen or GTSAM-bundled Eigen
# Default: Use system's Eigen if found automatically:
find_package(Eigen3 QUIET)
set(USE_SYSTEM_EIGEN_INITIAL_VALUE ${Eigen3_FOUND})
option(GTSAM_USE_SYSTEM_EIGEN "Find and use system-installed Eigen. If 'off', use the one bundled with GTSAM" ${USE_SYSTEM_EIGEN_INITIAL_VALUE})
unset(USE_SYSTEM_EIGEN_INITIAL_VALUE)
option(GTSAM_USE_SYSTEM_EIGEN "Find and use system-installed Eigen. If 'off', use the one bundled with GTSAM" OFF)
if(NOT GTSAM_USE_SYSTEM_EIGEN)
# This option only makes sense if using the embedded copy of Eigen, it is
@ -14,10 +10,14 @@ endif()
# Switch for using system Eigen or GTSAM-bundled Eigen
if(GTSAM_USE_SYSTEM_EIGEN)
find_package(Eigen3 REQUIRED) # need to find again as REQUIRED
# Since Eigen 3.3.0 a Eigen3Config.cmake is available so use it.
find_package(Eigen3 CONFIG REQUIRED) # need to find again as REQUIRED
# Use generic Eigen include paths e.g. <Eigen/Core>
set(GTSAM_EIGEN_INCLUDE_FOR_INSTALL "${EIGEN3_INCLUDE_DIR}")
# The actual include directory (for BUILD cmake target interface):
# Note: EIGEN3_INCLUDE_DIR points to some random location on some eigen
# versions. So here I use the target itself to get the proper include
# directory (it is generated by cmake, thus has the correct path)
get_target_property(GTSAM_EIGEN_INCLUDE_FOR_BUILD Eigen3::Eigen INTERFACE_INCLUDE_DIRECTORIES)
# check if MKL is also enabled - can have one or the other, but not both!
# Note: Eigen >= v3.2.5 includes our patches
@ -30,9 +30,6 @@ if(GTSAM_USE_SYSTEM_EIGEN)
if(EIGEN_USE_MKL_ALL AND (EIGEN3_VERSION VERSION_EQUAL 3.3.4))
message(FATAL_ERROR "MKL does not work with Eigen 3.3.4 because of a bug in Eigen. See http://eigen.tuxfamily.org/bz/show_bug.cgi?id=1527. Disable GTSAM_USE_SYSTEM_EIGEN to use GTSAM's copy of Eigen, disable GTSAM_WITH_EIGEN_MKL, or upgrade/patch your installation of Eigen.")
endif()
# The actual include directory (for BUILD cmake target interface):
set(GTSAM_EIGEN_INCLUDE_FOR_BUILD "${EIGEN3_INCLUDE_DIR}")
else()
# Use bundled Eigen include path.
# Clear any variables set by FindEigen3
@ -45,7 +42,20 @@ else()
set(GTSAM_EIGEN_INCLUDE_FOR_INSTALL "include/gtsam/3rdparty/Eigen/")
# The actual include directory (for BUILD cmake target interface):
set(GTSAM_EIGEN_INCLUDE_FOR_BUILD "${GTSAM_SOURCE_DIR}/gtsam/3rdparty/Eigen/")
set(GTSAM_EIGEN_INCLUDE_FOR_BUILD "${GTSAM_SOURCE_DIR}/gtsam/3rdparty/Eigen")
add_library(gtsam_eigen3 INTERFACE)
target_include_directories(gtsam_eigen3 INTERFACE
$<BUILD_INTERFACE:${GTSAM_EIGEN_INCLUDE_FOR_BUILD}>
$<INSTALL_INTERFACE:${GTSAM_EIGEN_INCLUDE_FOR_INSTALL}>
)
add_library(Eigen3::Eigen ALIAS gtsam_eigen3)
install(TARGETS gtsam_eigen3 EXPORT GTSAM-exports PUBLIC_HEADER DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
list(APPEND GTSAM_EXPORTED_TARGETS gtsam_eigen3)
set(GTSAM_EXPORTED_TARGETS "${GTSAM_EXPORTED_TARGETS}")
endif()
# Detect Eigen version:

View File

@ -26,6 +26,7 @@ if (GTSAM_BUILD_DOCS)
gtsam/basis
gtsam/discrete
gtsam/geometry
gtsam/hybrid
gtsam/inference
gtsam/linear
gtsam/navigation
@ -33,7 +34,6 @@ if (GTSAM_BUILD_DOCS)
gtsam/sam
gtsam/sfm
gtsam/slam
gtsam/smart
gtsam/symbolic
gtsam
)
@ -42,10 +42,12 @@ if (GTSAM_BUILD_DOCS)
set(gtsam_unstable_doc_subdirs
gtsam_unstable/base
gtsam_unstable/discrete
gtsam_unstable/dynamics
gtsam_unstable/geometry
gtsam_unstable/linear
gtsam_unstable/nonlinear
gtsam_unstable/slam
gtsam_unstable/dynamics
gtsam_unstable/nonlinear
gtsam_unstable/partition
gtsam_unstable/slam
gtsam_unstable
)

File diff suppressed because it is too large Load Diff

View File

@ -18,7 +18,6 @@
<tab type="files" visible="yes" title="" intro=""/>
<tab type="globals" visible="yes" title="" intro=""/>
</tab>
<tab type="dirs" visible="yes" title="" intro=""/>
<tab type="examples" visible="yes" title="" intro=""/>
</navindex>

View File

@ -117,12 +117,10 @@ set_target_properties(gtsam PROPERTIES
VERSION ${gtsam_version}
SOVERSION ${gtsam_soversion})
# Append Eigen include path, set in top-level CMakeLists.txt to either
# Append Eigen include path to either
# system-eigen, or GTSAM eigen path
target_include_directories(gtsam PUBLIC
$<BUILD_INTERFACE:${GTSAM_EIGEN_INCLUDE_FOR_BUILD}>
$<INSTALL_INTERFACE:${GTSAM_EIGEN_INCLUDE_FOR_INSTALL}>
)
target_link_libraries(gtsam PUBLIC Eigen3::Eigen)
# MKL include dir:
if (GTSAM_USE_EIGEN_MKL)
target_include_directories(gtsam PUBLIC ${MKL_INCLUDE_DIR})

View File

@ -62,7 +62,7 @@ namespace gtsam {
* convenience to avoid having lengthy types in the code. Through timing,
* we've seen that the fast_pool_allocator can lead to speedups of several
* percent.
* @addtogroup base
* @ingroup base
*/
template<typename KEY, typename VALUE>
class ConcurrentMap : public ConcurrentMapBase<KEY,VALUE> {

View File

@ -28,7 +28,7 @@ namespace gtsam {
/**
* Disjoint set forest using an STL map data structure underneath
* Uses rank compression and union by rank, iterator version
* @addtogroup base
* @ingroup base
*/
template <class KEY>
class DSFMap {

View File

@ -33,7 +33,7 @@ namespace gtsam {
* A fast implementation of disjoint set forests that uses vector as underly data structure.
* This is the absolute minimal DSF data structure, and only allows size_t keys
* Uses rank compression but not union by rank :-(
* @addtogroup base
* @ingroup base
*/
class GTSAM_EXPORT DSFBase {
@ -59,7 +59,7 @@ public:
/**
* DSFVector additionally keeps a vector of keys to support more expensive operations
* @addtogroup base
* @ingroup base
*/
class GTSAM_EXPORT DSFVector: public DSFBase {

View File

@ -34,7 +34,7 @@ namespace gtsam {
* convenience to avoid having lengthy types in the code. Through timing,
* we've seen that the fast_pool_allocator can lead to speedups of several
* percent.
* @addtogroup base
* @ingroup base
*/
template<typename VALUE>
class FastList: public std::list<VALUE, typename internal::FastDefaultAllocator<VALUE>::type> {

View File

@ -31,7 +31,7 @@ namespace gtsam {
* convenience to avoid having lengthy types in the code. Through timing,
* we've seen that the fast_pool_allocator can lead to speedups of several
* percent.
* @addtogroup base
* @ingroup base
*/
template<typename KEY, typename VALUE>
class FastMap : public std::map<KEY, VALUE, std::less<KEY>,

View File

@ -43,7 +43,7 @@ namespace gtsam {
* fast_pool_allocator instead of the default STL allocator. This is just a
* convenience to avoid having lengthy types in the code. Through timing,
* we've seen that the fast_pool_allocator can lead to speedups of several %.
* @addtogroup base
* @ingroup base
*/
template<typename VALUE>
class FastSet: public std::set<VALUE, std::less<VALUE>,

View File

@ -27,7 +27,7 @@ namespace gtsam {
/**
* FastVector is a type alias to a std::vector with a custom memory allocator.
* The particular allocator depends on GTSAM's cmake configuration.
* @addtogroup base
* @ingroup base
*/
template <typename T>
using FastVector =

View File

@ -47,7 +47,7 @@ namespace gtsam {
* matrix view. firstBlock() determines the block that appears to have index 0 for all operations
* (except re-setting firstBlock()).
*
* @addtogroup base */
* @ingroup base */
class GTSAM_EXPORT SymmetricBlockMatrix
{
public:

View File

@ -51,7 +51,7 @@ namespace gtsam {
* tests and in generic algorithms.
*
* See macros for details on using this structure
* @addtogroup base
* @ingroup base
* @tparam T is the objectype this constrains to be testable - assumes print() and equals()
*/
template <class T>

View File

@ -14,7 +14,7 @@
* @brief Base exception type that uses tbb_allocator if GTSAM is compiled with TBB
* @author Richard Roberts
* @date Aug 21, 2010
* @addtogroup base
* @ingroup base
*/
#pragma once

View File

@ -38,7 +38,7 @@ namespace gtsam {
* row for all operations. To include all rows, rowEnd() should be set to the number of rows in
* the matrix (i.e. one after the last true row index).
*
* @addtogroup base */
* @ingroup base */
class GTSAM_EXPORT VerticalBlockMatrix
{
public:

View File

@ -26,7 +26,7 @@
#include <type_traits>
namespace gtsam {
/// An shorthand alias for accessing the ::type inside std::enable_if that can be used in a template directly
/// An shorthand alias for accessing the \::type inside std::enable_if that can be used in a template directly
template<bool B, class T = void>
using enable_if_t = typename std::enable_if<B, T>::type;
}

View File

@ -147,7 +147,7 @@ namespace gtsam {
size_t id_;
size_t t_;
size_t tWall_;
double t2_ ; ///< cache the \sum t_i^2
double t2_ ; ///< cache the \f$ \sum t_i^2 \f$
size_t tIt_;
size_t tMax_;
size_t tMin_;

View File

@ -14,7 +14,7 @@
* @brief Functions for handling type information
* @author Varun Agrawal
* @date May 18, 2020
* @addtogroup base
* @ingroup base
*/
#include <gtsam/base/types.h>

View File

@ -14,7 +14,7 @@
* @brief Typedefs for easier changing of types
* @author Richard Roberts
* @date Aug 21, 2010
* @addtogroup base
* @ingroup base
*/
#pragma once

View File

@ -78,6 +78,8 @@ using Weights = Eigen::Matrix<double, 1, -1>; /* 1xN vector */
* @tparam M Size of the identity matrix.
* @param w The weights of the polynomial.
* @return Mx(M*N) kronecker product [w(0)*I, w(1)*I, ..., w(N-1)*I]
*
* @ingroup basis
*/
template <size_t M>
Matrix kroneckerProductIdentity(const Weights& w) {
@ -90,7 +92,10 @@ Matrix kroneckerProductIdentity(const Weights& w) {
return result;
}
/// CRTP Base class for function bases
/**
* CRTP Base class for function bases
* @ingroup basis
*/
template <typename DERIVED>
class Basis {
public:

View File

@ -32,6 +32,8 @@ namespace gtsam {
*
* Example, degree 8 Chebyshev polynomial measured at x=0.5:
* EvaluationFactor<Chebyshev2> factor(key, measured, model, 8, 0.5);
*
* @ingroup basis
*/
template <class BASIS>
class EvaluationFactor : public FunctorizedFactor<double, Vector> {
@ -86,6 +88,8 @@ class EvaluationFactor : public FunctorizedFactor<double, Vector> {
*
* @param BASIS: The basis class to use e.g. Chebyshev2
* @param M: Size of the evaluated state vector.
*
* @ingroup basis
*/
template <class BASIS, int M>
class VectorEvaluationFactor
@ -149,6 +153,8 @@ class VectorEvaluationFactor
* VectorComponentFactor<BASIS, P> controlPrior(key, measured, model,
* N, i, t, a, b);
* where N is the degree and i is the component index.
*
* @ingroup basis
*/
template <class BASIS, size_t P>
class VectorComponentFactor

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file ParamaterMatrix.h
* @file ParameterMatrix.h
* @brief Define ParameterMatrix class which is used to store values at
* interpolation points.
* @author Varun Agrawal, Frank Dellaert

View File

@ -31,6 +31,8 @@ namespace gtsam {
* Algebraic Decision Trees fix the range to double
* Just has some nice constructors and some syntactic sugar
* TODO: consider eliminating this class altogether?
*
* @ingroup discrete
*/
template <typename L>
class GTSAM_EXPORT AlgebraicDecisionTree : public DecisionTree<L, double> {

View File

@ -31,6 +31,7 @@ namespace gtsam {
* An assignment from labels to value index (size_t).
* Assigns to each label a value. Implemented as a simple map.
* A discrete factor takes an Assignment and returns a value.
* @ingroup discrete
*/
template <class L>
class Assignment : public std::map<L, size_t> {

View File

@ -38,6 +38,8 @@ namespace gtsam {
* Decision Tree
* L = label for variables
* Y = function range (any algebra), e.g., bool, int, double
*
* @ingroup discrete
*/
template<typename L, typename Y>
class DecisionTree {

View File

@ -36,7 +36,9 @@ namespace gtsam {
class DiscreteConditional;
/**
* A discrete probabilistic factor
* A discrete probabilistic factor.
*
* @ingroup discrete
*/
class GTSAM_EXPORT DecisionTreeFactor : public DiscreteFactor,
public AlgebraicDecisionTree<Key> {

View File

@ -33,7 +33,7 @@ namespace gtsam {
/**
* A Bayes net made from discrete conditional distributions.
* @addtogroup discrete
* @ingroup discrete
*/
class GTSAM_EXPORT DiscreteBayesNet: public BayesNet<DiscreteConditional> {
public:

View File

@ -62,7 +62,10 @@ class GTSAM_EXPORT DiscreteBayesTreeClique
};
/* ************************************************************************* */
/** A Bayes tree representing a Discrete density */
/**
* @brief A Bayes tree representing a Discrete density.
* @ingroup discrete
*/
class GTSAM_EXPORT DiscreteBayesTree
: public BayesTree<DiscreteBayesTreeClique> {
private:

View File

@ -32,6 +32,8 @@ namespace gtsam {
/**
* Discrete Conditional Density
* Derives from DecisionTreeFactor
*
* @ingroup discrete
*/
class GTSAM_EXPORT DiscreteConditional
: public DecisionTreeFactor,

View File

@ -27,6 +27,8 @@ namespace gtsam {
/**
* A prior probability on a set of discrete variables.
* Derives from DiscreteConditional
*
* @ingroup discrete
*/
class GTSAM_EXPORT DiscreteDistribution : public DiscreteConditional {
public:

View File

@ -24,6 +24,10 @@
namespace gtsam {
/**
* @brief Elimination tree for discrete factors.
* @ingroup discrete
*/
class GTSAM_EXPORT DiscreteEliminationTree :
public EliminationTree<DiscreteBayesNet, DiscreteFactorGraph>
{

View File

@ -31,6 +31,8 @@ class DiscreteConditional;
/**
* Base class for discrete probabilistic factors
* The most general one is the derived DecisionTreeFactor
*
* @ingroup discrete
*/
class GTSAM_EXPORT DiscreteFactor: public Factor {

View File

@ -40,7 +40,14 @@ class DiscreteEliminationTree;
class DiscreteBayesTree;
class DiscreteJunctionTree;
/** Main elimination function for DiscreteFactorGraph */
/**
* @brief Main elimination function for DiscreteFactorGraph.
*
* @param factors
* @param keys
* @return GTSAM_EXPORT
* @ingroup discrete
*/
GTSAM_EXPORT std::pair<boost::shared_ptr<DiscreteConditional>, DecisionTreeFactor::shared_ptr>
EliminateDiscrete(const DiscreteFactorGraph& factors, const Ordering& keys);
@ -64,6 +71,7 @@ template<> struct EliminationTraits<DiscreteFactorGraph>
/**
* A Discrete Factor Graph is a factor graph where all factors are Discrete, i.e.
* Factor == DiscreteFactor
* @ingroup discrete
*/
class GTSAM_EXPORT DiscreteFactorGraph
: public FactorGraph<DiscreteFactor>,

View File

@ -44,7 +44,8 @@ namespace gtsam {
* The tree structure and elimination method are exactly analogous to the EliminationTree,
* except that in the JunctionTree, at each node multiple variables are eliminated at a time.
*
* \addtogroup Multifrontal
* \ingroup Multifrontal
* @ingroup discrete
* \nosubgrouping
*/
class GTSAM_EXPORT DiscreteJunctionTree :

View File

@ -31,6 +31,7 @@ namespace gtsam {
/**
* Key type for discrete variables.
* Includes Key and cardinality.
* @ingroup discrete
*/
using DiscreteKey = std::pair<Key,size_t>;

View File

@ -32,6 +32,7 @@ class DiscreteBayesNet;
/**
* @brief DiscreteLookupTable table for max-product
* @ingroup discrete
*
* Inherits from discrete conditional for convenience, but is not normalized.
* Is used in the max-product algorithm.

View File

@ -28,6 +28,7 @@ namespace gtsam {
/**
* A class for computing marginals of variables in a DiscreteFactorGraph
* @ingroup discrete
*/
class DiscreteMarginals {

View File

@ -36,6 +36,7 @@ namespace gtsam {
* Another good thing is we don't need to have the special DiscreteKey which
* stores cardinality of a Discrete variable. It should be handled naturally in
* the new class DiscreteValue, as the variable's type (domain)
* @ingroup discrete
*/
class GTSAM_EXPORT DiscreteValues : public Assignment<Key> {
public:

View File

@ -48,6 +48,8 @@ namespace gtsam {
* (E|T,L) = "F F F 1"
* X|E = "95/5 2/98"
* (D|E,B) = "9/1 2/8 3/7 1/9"
*
* @ingroup discrete
*/
class GTSAM_EXPORT Signature {

View File

@ -16,7 +16,7 @@
*/
/**
* @addtogroup geometry
* @ingroup geometry
*/
#pragma once
@ -63,7 +63,7 @@ void calibrateJacobians(const Cal& calibration, const Point2& pn,
/**
* @brief Common base class for all calibration models.
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT Cal3 {

View File

@ -26,7 +26,7 @@ namespace gtsam {
/**
* @brief Calibration used by Bundler
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT Cal3Bundler : public Cal3 {

View File

@ -29,7 +29,7 @@ namespace gtsam {
* @brief Calibration of a camera with radial distortion that also supports
* Lie-group behaviors for optimization.
* \sa Cal3DS2_Base
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT Cal3DS2 : public Cal3DS2_Base {

View File

@ -27,7 +27,7 @@ namespace gtsam {
/**
* @brief Calibration of a camera with radial distortion
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*
* Uses same distortionmodel as OpenCV, with

View File

@ -30,7 +30,7 @@ namespace gtsam {
/**
* @brief Calibration of a fisheye camera
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*
* Uses same distortionmodel as OpenCV, with

View File

@ -18,7 +18,7 @@
*/
/**
* @addtogroup geometry
* @ingroup geometry
*/
#pragma once
@ -30,7 +30,7 @@ namespace gtsam {
/**
* @brief Calibration of a omni-directional camera with mirror + lens radial
* distortion
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*
* Similar to Cal3DS2, does distortion but has additional mirror parameter xi

View File

@ -16,7 +16,7 @@
*/
/**
* @addtogroup geometry
* @ingroup geometry
*/
#pragma once
@ -28,7 +28,7 @@ namespace gtsam {
/**
* @brief The most common 5DOF 3D->2D calibration
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT Cal3_S2 : public Cal3 {

View File

@ -24,7 +24,7 @@ namespace gtsam {
/**
* @brief The most common 5DOF 3D->2D calibration, stereo version
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT Cal3_S2Stereo : public Cal3_S2 {
@ -38,7 +38,7 @@ class GTSAM_EXPORT Cal3_S2Stereo : public Cal3_S2 {
using shared_ptr = boost::shared_ptr<Cal3_S2Stereo>;
/// @name Standard Constructors
/// @
/// @{
/// default calibration leaves coordinates unchanged
Cal3_S2Stereo() = default;
@ -55,6 +55,8 @@ class GTSAM_EXPORT Cal3_S2Stereo : public Cal3_S2 {
Cal3_S2Stereo(double fov, int w, int h, double b)
: Cal3_S2(fov, w, h), b_(b) {}
/// @}
/**
* Convert intrinsic coordinates xy to image coordinates uv, fixed derivaitves
* @param p point in intrinsic coordinates
@ -82,7 +84,6 @@ class GTSAM_EXPORT Cal3_S2Stereo : public Cal3_S2 {
*/
Vector3 calibrate(const Vector3& p) const { return Cal3_S2::calibrate(p); }
/// @}
/// @name Testable
/// @{

View File

@ -46,7 +46,7 @@ private:
/**
* A pinhole camera class that has a Pose3, functions as base class for all pinhole cameras
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT PinholeBase {
@ -241,7 +241,7 @@ private:
* A Calibrated camera class [R|-R't], calibration K=I.
* If calibration is known, it is more computationally efficient
* to calibrate the measurements rather than try to predict in pixels.
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT CalibratedCamera: public PinholeBase {

View File

@ -38,7 +38,7 @@ GTSAM_EXPORT Line3 transformTo(const Pose3 &wTc, const Line3 &wL,
/**
* A 3D line (R,a,b) : (Rot3,Scalar,Scalar)
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT Line3 {

View File

@ -133,8 +133,6 @@ public:
inline double distance() const {
return d_;
}
/// @}
};
template<> struct traits<OrientedPlane3> : public internal::Manifold<

View File

@ -26,7 +26,7 @@ namespace gtsam {
/**
* A pinhole camera class that has a Pose3 and a Calibration.
* Use PinholePose if you will not be optimizing for Calibration
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
template<typename Calibration>

View File

@ -27,7 +27,7 @@ namespace gtsam {
/**
* A pinhole camera class that has a Pose3 and a *fixed* Calibration.
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
template<typename CALIBRATION>
@ -236,7 +236,7 @@ public:
* A pinhole camera class that has a Pose3 and a *fixed* Calibration.
* Instead of using this class, one might consider calibrating the measurements
* and using CalibratedCamera, which would then be faster.
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
template<typename CALIBRATION>

View File

@ -30,7 +30,7 @@ namespace gtsam {
/**
* A 2D pose (Point2,Rot2)
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class Pose2: public LieGroup<Pose2, 3> {

View File

@ -27,7 +27,6 @@
namespace gtsam {
using std::vector;
using Point3Pairs = vector<Point3Pair>;
/** instantiate concept checks */
GTSAM_CONCEPT_POSE_INST(Pose3)

View File

@ -31,7 +31,7 @@ class Pose2;
/**
* A 3D pose (R,t) : (Rot3,Point3)
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT Pose3: public LieGroup<Pose3, 6> {
@ -83,7 +83,7 @@ public:
* A pose aTb is estimated between pairs (a_point, b_point) such that a_point = aTb * b_point
* Note this allows for noise on the points but in that case the mapping will not be exact.
*/
static boost::optional<Pose3> Align(const std::vector<Point3Pair>& abPointPairs);
static boost::optional<Pose3> Align(const Point3Pairs& abPointPairs);
// Version of Pose3::Align that takes 2 matrices.
static boost::optional<Pose3> Align(const Matrix& a, const Matrix& b);

View File

@ -30,7 +30,7 @@ namespace gtsam {
/**
* Rotation matrix
* NOTE: the angle theta is in radians unless explicitly stated
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT Rot2 : public LieGroup<Rot2, 1> {
@ -86,7 +86,7 @@ namespace gtsam {
static Rot2 atan2(double y, double x);
/**
* Random, generates random angle \in [-p,pi]
* Random, generates random angle \f$\in\f$ [-pi,pi]
* Example:
* std::mt19937 engine(42);
* Unit3 unit = Unit3::Random(engine);

View File

@ -53,7 +53,7 @@ namespace gtsam {
* @brief Rot3 is a 3D rotation represented as a rotation matrix if the
* preprocessor symbol GTSAM_USE_QUATERNIONS is not defined, or as a quaternion
* if it is defined.
* @addtogroup geometry
* @ingroup geometry
*/
class GTSAM_EXPORT Rot3 : public LieGroup<Rot3, 3> {
private:
@ -129,7 +129,7 @@ class GTSAM_EXPORT Rot3 : public LieGroup<Rot3, 3> {
Rot3(double w, double x, double y, double z) : Rot3(Quaternion(w, x, y, z)) {}
/**
* Random, generates a random axis, then random angle \in [-p,pi]
* Random, generates a random axis, then random angle \f$\in\f$ [-pi,pi]
* Example:
* std::mt19937 engine(42);
* Unit3 unit = Unit3::Random(engine);

View File

@ -74,7 +74,7 @@ GTSAM_EXPORT Matrix3 topLeft(const SO4 &Q, OptionalJacobian<9, 6> H = boost::non
/**
* Project to Stiefel manifold of 4*3 orthonormal 3-frames in R^4, i.e., pi(Q)
* -> S \in St(3,4).
* -> \f$ S \in St(3,4) \f$.
*/
GTSAM_EXPORT Matrix43 stiefel(const SO4 &Q, OptionalJacobian<12, 6> H = boost::none);

View File

@ -121,7 +121,8 @@ class SO : public LieGroup<SO<N>, internal::DimensionSO(N)> {
/// currently only defined for SO3.
static SO ClosestTo(const MatrixNN& M);
/// Named constructor that finds chordal mean = argmin_R \sum sqr(|R-R_i|_F),
/// Named constructor that finds chordal mean
/// \f$ mu = argmin_R \sum sqr(|R-R_i|_F) \f$,
/// currently only defined for SO3.
static SO ChordalMean(const std::vector<SO>& rotations);

View File

@ -132,7 +132,7 @@ class GTSAM_EXPORT Similarity2 : public LieGroup<Similarity2, 4> {
* using the algorithm described here:
* http://www5.informatik.uni-erlangen.de/Forschung/Publikationen/2005/Zinsser05-PSR.pdf
*/
static Similarity2 Align(const std::vector<Pose2Pair>& abPosePairs);
static Similarity2 Align(const Pose2Pairs& abPosePairs);
/// @}
/// @name Lie Group

View File

@ -120,7 +120,7 @@ class GTSAM_EXPORT Similarity3 : public LieGroup<Similarity3, 7> {
/**
* Create Similarity3 by aligning at least three point pairs
*/
static Similarity3 Align(const std::vector<Point3Pair>& abPointPairs);
static Similarity3 Align(const Point3Pairs& abPointPairs);
/**
* Create the Similarity3 object that aligns at least two pose pairs.

View File

@ -34,7 +34,7 @@ namespace gtsam {
* Empty calibration. Only needed to play well with other cameras
* (e.g., when templating functions wrt cameras), since other cameras
* have constuctors in the form camera(pose,calibration)
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT EmptyCal {
@ -64,7 +64,7 @@ class GTSAM_EXPORT EmptyCal {
/**
* A spherical camera class that has a Pose3 and measures bearing vectors.
* The camera has an Empty calibration and the only 6 dof are the pose
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT SphericalCamera {
@ -82,7 +82,6 @@ class GTSAM_EXPORT SphericalCamera {
EmptyCal::shared_ptr emptyCal_;
public:
/// @}
/// @name Standard Constructors
/// @{

View File

@ -42,7 +42,7 @@ private:
/**
* A stereo camera class, parameterize by left camera pose and stereo calibration
* @addtogroup geometry
* @ingroup geometry
*/
class GTSAM_EXPORT StereoCamera {

View File

@ -26,7 +26,7 @@ namespace gtsam {
/**
* A 2D stereo point, v will be same for rectified images
* @addtogroup geometry
* @ingroup geometry
* \nosubgrouping
*/
class GTSAM_EXPORT StereoPoint2 {

View File

@ -1123,7 +1123,7 @@ class StereoCamera {
#include <gtsam/geometry/triangulation.h>
class TriangulationResult {
enum Status { VALID, DEGENERATE, BEHIND_CAMERA, OUTLIER, FAR_POINT };
Status status;
gtsam::TriangulationResult::Status status;
TriangulationResult(const gtsam::Point3& p);
const gtsam::Point3& get() const;
static gtsam::TriangulationResult Degenerate();
@ -1142,7 +1142,7 @@ class TriangulationParameters {
bool enableEPI;
double landmarkDistanceThreshold;
double dynamicOutlierRejectionThreshold;
SharedNoiseModel noiseModel;
gtsam::SharedNoiseModel noiseModel;
TriangulationParameters(const double rankTolerance = 1.0,
const bool enableEPI = false,
double landmarkDistanceThreshold = -1,

View File

@ -13,7 +13,7 @@
* @file global_includes.h
* @brief Included from all GTSAM files
* @author Richard Roberts
* @addtogroup base
* @ingroup base
*/
#pragma once

View File

@ -1,7 +1,7 @@
/**
\defgroup LinearSolving Solving of sparse linear systems with least-squares
@{
@{ @}
\defgroup Multifrontal Solving by multifrontal variable elimination (QR and Cholesky)
@{ @}
@ -9,9 +9,48 @@
\defgroup Sequential Solving by sequential variable elimination (QR and Cholesky)
@{ @}
@}
\defgroup SLAM Useful SLAM components
\defgroup base Base
@{ @}
*/
\defgroup basis Basis
@{ @}
\defgroup discrete Discrete
@{ @}
\defgroup geometry Geometry
@{
Geometric primitives.
@}
\defgroup hybrid Hybrid
@{ @}
\defgroup inference Inference
@{ @}
\defgroup linear Linear
@{ @}
\defgroup navigation Navigation
@{ @}
\defgroup nonlinear Nonlinear
@{ @}
\defgroup sam SAM
@{ @}
\defgroup sfm SFM
@{ @}
\defgroup slam SLAM
@{ Useful SLAM components @}
\defgroup symbolic Symbolic
@{ @}
\defgroup isam2 ISAM2
@{ @}
*/

View File

@ -129,7 +129,6 @@ void GaussianMixture::print(const std::string &s,
}
/* ************************************************************************* */
/// Return the DiscreteKey vector as a set.
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &dkeys) {
std::set<DiscreteKey> s;
s.insert(dkeys.begin(), dkeys.end());

View File

@ -45,6 +45,7 @@ namespace gtsam {
* where i indexes the components and k_i is a component-wise normalization
* constant.
*
* @ingroup hybrid
*/
class GTSAM_EXPORT GaussianMixture
: public HybridFactor,
@ -182,6 +183,9 @@ class GTSAM_EXPORT GaussianMixture
Sum add(const Sum &sum) const;
};
/// Return the DiscreteKey vector as a set.
std::set<DiscreteKey> DiscreteKeysAsSet(const DiscreteKeys &dkeys);
// traits
template <>
struct traits<GaussianMixture> : public Testable<GaussianMixture> {};

View File

@ -44,6 +44,7 @@ using GaussianFactorVector = std::vector<gtsam::GaussianFactor::shared_ptr>;
* Represents the underlying Gaussian Mixture as a Decision Tree, where the set
* of discrete variables indexes to the continuous gaussian distribution.
*
* @ingroup hybrid
*/
class GTSAM_EXPORT GaussianMixtureFactor : public HybridFactor {
public:

View File

@ -42,13 +42,100 @@ DecisionTreeFactor::shared_ptr HybridBayesNet::discreteConditionals() const {
}
/* ************************************************************************* */
HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const {
/**
* @brief Helper function to get the pruner functional.
*
* @param decisionTree The probability decision tree of only discrete keys.
* @return std::function<GaussianConditional::shared_ptr(
* const Assignment<Key> &, const GaussianConditional::shared_ptr &)>
*/
std::function<double(const Assignment<Key> &, double)> prunerFunc(
const DecisionTreeFactor &decisionTree,
const HybridConditional &conditional) {
// Get the discrete keys as sets for the decision tree
// and the gaussian mixture.
auto decisionTreeKeySet = DiscreteKeysAsSet(decisionTree.discreteKeys());
auto conditionalKeySet = DiscreteKeysAsSet(conditional.discreteKeys());
auto pruner = [decisionTree, decisionTreeKeySet, conditionalKeySet](
const Assignment<Key> &choices,
double probability) -> double {
// typecast so we can use this to get probability value
DiscreteValues values(choices);
// Case where the gaussian mixture has the same
// discrete keys as the decision tree.
if (conditionalKeySet == decisionTreeKeySet) {
if (decisionTree(values) == 0) {
return 0.0;
} else {
return probability;
}
} else {
std::vector<DiscreteKey> set_diff;
std::set_difference(decisionTreeKeySet.begin(), decisionTreeKeySet.end(),
conditionalKeySet.begin(), conditionalKeySet.end(),
std::back_inserter(set_diff));
const std::vector<DiscreteValues> assignments =
DiscreteValues::CartesianProduct(set_diff);
for (const DiscreteValues &assignment : assignments) {
DiscreteValues augmented_values(values);
augmented_values.insert(assignment.begin(), assignment.end());
// If any one of the sub-branches are non-zero,
// we need this probability.
if (decisionTree(augmented_values) > 0.0) {
return probability;
}
}
// If we are here, it means that all the sub-branches are 0,
// so we prune.
return 0.0;
}
};
return pruner;
}
/* ************************************************************************* */
void HybridBayesNet::updateDiscreteConditionals(
const DecisionTreeFactor::shared_ptr &prunedDecisionTree) {
KeyVector prunedTreeKeys = prunedDecisionTree->keys();
for (size_t i = 0; i < this->size(); i++) {
HybridConditional::shared_ptr conditional = this->at(i);
if (conditional->isDiscrete()) {
// std::cout << demangle(typeid(conditional).name()) << std::endl;
auto discrete = conditional->asDiscreteConditional();
KeyVector frontals(discrete->frontals().begin(),
discrete->frontals().end());
// Apply prunerFunc to the underlying AlgebraicDecisionTree
auto discreteTree =
boost::dynamic_pointer_cast<DecisionTreeFactor::ADT>(discrete);
DecisionTreeFactor::ADT prunedDiscreteTree =
discreteTree->apply(prunerFunc(*prunedDecisionTree, *conditional));
// Create the new (hybrid) conditional
auto prunedDiscrete = boost::make_shared<DiscreteLookupTable>(
frontals.size(), conditional->discreteKeys(), prunedDiscreteTree);
conditional = boost::make_shared<HybridConditional>(prunedDiscrete);
// Add it back to the BayesNet
this->at(i) = conditional;
}
}
}
/* ************************************************************************* */
HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) {
// Get the decision tree of only the discrete keys
auto discreteConditionals = this->discreteConditionals();
const DecisionTreeFactor::shared_ptr discreteFactor =
const DecisionTreeFactor::shared_ptr decisionTree =
boost::make_shared<DecisionTreeFactor>(
discreteConditionals->prune(maxNrLeaves));
this->updateDiscreteConditionals(decisionTree);
/* To Prune, we visitWith every leaf in the GaussianMixture.
* For each leaf, using the assignment we can check the discrete decision tree
* for 0.0 probability, then just set the leaf to a nullptr.
@ -59,7 +146,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const {
HybridBayesNet prunedBayesNetFragment;
// Go through all the conditionals in the
// Bayes Net and prune them as per discreteFactor.
// Bayes Net and prune them as per decisionTree.
for (size_t i = 0; i < this->size(); i++) {
HybridConditional::shared_ptr conditional = this->at(i);
@ -69,7 +156,7 @@ HybridBayesNet HybridBayesNet::prune(size_t maxNrLeaves) const {
// Make a copy of the gaussian mixture and prune it!
auto prunedGaussianMixture =
boost::make_shared<GaussianMixture>(*gaussianMixture);
prunedGaussianMixture->prune(*discreteFactor);
prunedGaussianMixture->prune(*decisionTree);
// Type-erase and add to the pruned Bayes Net fragment.
prunedBayesNetFragment.push_back(

View File

@ -29,6 +29,8 @@ namespace gtsam {
/**
* A hybrid Bayes net is a collection of HybridConditionals, which can have
* discrete conditionals, Gaussian mixtures, or pure Gaussian conditionals.
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
public:
@ -111,7 +113,6 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
*/
VectorValues optimize(const DiscreteValues &assignment) const;
protected:
/**
* @brief Get all the discrete conditionals as a decision tree factor.
*
@ -121,7 +122,7 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
public:
/// Prune the Hybrid Bayes Net such that we have at most maxNrLeaves leaves.
HybridBayesNet prune(size_t maxNrLeaves) const;
HybridBayesNet prune(size_t maxNrLeaves);
/**
* @brief 0.5 * sum of squared Mahalanobis distances
@ -146,6 +147,14 @@ class GTSAM_EXPORT HybridBayesNet : public BayesNet<HybridConditional> {
/// @}
private:
/**
* @brief Update the discrete conditionals with the pruned versions.
*
* @param prunedDecisionTree
*/
void updateDiscreteConditionals(
const DecisionTreeFactor::shared_ptr &prunedDecisionTree);
/** Serialization function */
friend class boost::serialization::access;
template <class ARCHIVE>

View File

@ -146,8 +146,8 @@ VectorValues HybridBayesTree::optimize(const DiscreteValues& assignment) const {
/* ************************************************************************* */
void HybridBayesTree::prune(const size_t maxNrLeaves) {
auto decisionTree = boost::dynamic_pointer_cast<DecisionTreeFactor>(
this->roots_.at(0)->conditional()->inner());
auto decisionTree =
this->roots_.at(0)->conditional()->asDiscreteConditional();
DecisionTreeFactor prunedDecisionTree = decisionTree->prune(maxNrLeaves);
decisionTree->root_ = prunedDecisionTree.root_;

View File

@ -34,8 +34,11 @@ class HybridConditional;
class VectorValues;
/* ************************************************************************* */
/** A clique in a HybridBayesTree
/**
* @brief A clique in a HybridBayesTree
* which is a HybridConditional internally.
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridBayesTreeClique
: public BayesTreeCliqueBase<HybridBayesTreeClique,

View File

@ -54,6 +54,8 @@ namespace gtsam {
*
* A great reference to the type-erasure pattern is Eduaado Madrid's CppCon
* talk (https://www.youtube.com/watch?v=s082Qmd_nHs).
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridConditional
: public HybridFactor,

View File

@ -28,6 +28,8 @@ namespace gtsam {
* A HybridDiscreteFactor is a thin container for DiscreteFactor, which allows
* us to hide the implementation of DiscreteFactor and thus avoid diamond
* inheritance.
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridDiscreteFactor : public HybridFactor {
private:

View File

@ -25,6 +25,8 @@ namespace gtsam {
/**
* Elimination Tree type for Hybrid
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridEliminationTree
: public EliminationTree<HybridBayesNet, HybridGaussianFactorGraph> {

View File

@ -40,6 +40,8 @@ DiscreteKeys CollectDiscreteKeys(const DiscreteKeys &key1,
* - HybridDiscreteFactor
* - GaussianMixtureFactor
* - GaussianMixture
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridFactor : public Factor {
private:

View File

@ -27,6 +27,8 @@ namespace gtsam {
* A HybridGaussianFactor is a layer over GaussianFactor so that we do not have
* a diamond inheritance i.e. an extra factor type that inherits from both
* HybridFactor and GaussianFactor.
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridGaussianFactor : public HybridFactor {
private:

View File

@ -39,7 +39,14 @@ class DecisionTreeFactor;
class JacobianFactor;
/** Main elimination function for HybridGaussianFactorGraph */
/**
* @brief Main elimination function for HybridGaussianFactorGraph.
*
* @param factors The factor graph to eliminate.
* @param keys The elimination ordering.
* @return The conditional on the ordering keys and the remaining factors.
* @ingroup hybrid
*/
GTSAM_EXPORT
std::pair<boost::shared_ptr<HybridConditional>, HybridFactor::shared_ptr>
EliminateHybrid(const HybridGaussianFactorGraph& factors, const Ordering& keys);
@ -68,10 +75,12 @@ struct EliminationTraits<HybridGaussianFactorGraph> {
};
/**
* Gaussian Hybrid Factor Graph
* Hybrid Gaussian Factor Graph
* -----------------------
* This is the linearized version of a hybrid factor graph.
* Everything inside needs to be hybrid factor or hybrid conditional.
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridGaussianFactorGraph
: public HybridFactorGraph,
@ -120,13 +129,13 @@ class GTSAM_EXPORT HybridGaussianFactorGraph
void add(JacobianFactor&& factor);
/// Add a Jacobian factor as a shared ptr.
void add(boost::shared_ptr<JacobianFactor> factor);
void add(JacobianFactor::shared_ptr factor);
/// Add a DecisionTreeFactor to the factor graph.
void add(DecisionTreeFactor&& factor);
/// Add a DecisionTreeFactor as a shared ptr.
void add(boost::shared_ptr<DecisionTreeFactor> factor);
void add(DecisionTreeFactor::shared_ptr factor);
/**
* Add a gaussian factor *pointer* to the internal gaussian factor graph

View File

@ -38,6 +38,35 @@ HybridGaussianISAM::HybridGaussianISAM() {}
HybridGaussianISAM::HybridGaussianISAM(const HybridBayesTree& bayesTree)
: Base(bayesTree) {}
/* ************************************************************************* */
Ordering HybridGaussianISAM::GetOrdering(
HybridGaussianFactorGraph& factors,
const HybridGaussianFactorGraph& newFactors) {
// Get all the discrete keys from the factors
KeySet allDiscrete = factors.discreteKeys();
// Create KeyVector with continuous keys followed by discrete keys.
KeyVector newKeysDiscreteLast;
const KeySet newFactorKeys = newFactors.keys();
// Insert continuous keys first.
for (auto& k : newFactorKeys) {
if (!allDiscrete.exists(k)) {
newKeysDiscreteLast.push_back(k);
}
}
// Insert discrete keys at the end
std::copy(allDiscrete.begin(), allDiscrete.end(),
std::back_inserter(newKeysDiscreteLast));
const VariableIndex index(factors);
// Get an ordering where the new keys are eliminated last
Ordering ordering = Ordering::ColamdConstrainedLast(
index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()),
true);
return ordering;
}
/* ************************************************************************* */
void HybridGaussianISAM::updateInternal(
const HybridGaussianFactorGraph& newFactors,
@ -54,7 +83,7 @@ void HybridGaussianISAM::updateInternal(
}
// Add the removed top and the new factors
FactorGraphType factors;
HybridGaussianFactorGraph factors;
factors += bn;
factors += newFactors;
@ -63,32 +92,12 @@ void HybridGaussianISAM::updateInternal(
factors += boost::make_shared<BayesTreeOrphanWrapper<Node>>(orphan);
}
// Get all the discrete keys from the factors
KeySet allDiscrete = factors.discreteKeys();
// Create KeyVector with continuous keys followed by discrete keys.
KeyVector newKeysDiscreteLast;
// Insert continuous keys first.
for (auto& k : newFactorKeys) {
if (!allDiscrete.exists(k)) {
newKeysDiscreteLast.push_back(k);
}
}
// Insert discrete keys at the end
std::copy(allDiscrete.begin(), allDiscrete.end(),
std::back_inserter(newKeysDiscreteLast));
// Get an ordering where the new keys are eliminated last
const VariableIndex index(factors);
Ordering elimination_ordering;
if (ordering) {
elimination_ordering = *ordering;
} else {
elimination_ordering = Ordering::ColamdConstrainedLast(
index,
KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()),
true);
elimination_ordering = GetOrdering(factors, newFactors);
}
// eliminate all factors (top, added, orphans) into a new Bayes tree

View File

@ -26,6 +26,11 @@
namespace gtsam {
/**
* @brief
*
* @ingroup hybrid
*/
class GTSAM_EXPORT HybridGaussianISAM : public ISAM<HybridBayesTree> {
public:
typedef ISAM<HybridBayesTree> Base;
@ -67,6 +72,17 @@ class GTSAM_EXPORT HybridGaussianISAM : public ISAM<HybridBayesTree> {
const boost::optional<Ordering>& ordering = boost::none,
const HybridBayesTree::Eliminate& function =
HybridBayesTree::EliminationTraitsType::DefaultEliminate);
/**
* @brief Helper method to get an ordering given the existing factors and any
* new factors added.
*
* @param factors The existing factors in the BayesTree.
* @param newFactors New factors added during the update step.
* @return Ordering
*/
static Ordering GetOrdering(HybridGaussianFactorGraph& factors,
const HybridGaussianFactorGraph& newFactors);
};
/// traits

View File

@ -45,7 +45,8 @@ class HybridEliminationTree;
* EliminationTree, except that in the JunctionTree, at each node multiple
* variables are eliminated at a time.
*
* \addtogroup Multifrontal
* \ingroup Multifrontal
* \ingroup hybrid
* \nosubgrouping
*/
class GTSAM_EXPORT HybridJunctionTree

View File

@ -0,0 +1,112 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file HybridSmoother.cpp
* @brief An incremental smoother for hybrid factor graphs
* @author Varun Agrawal
* @date October 2022
*/
#include <gtsam/hybrid/HybridSmoother.h>
#include <algorithm>
#include <unordered_set>
namespace gtsam {
/* ************************************************************************* */
void HybridSmoother::update(HybridGaussianFactorGraph graph,
const Ordering &ordering,
boost::optional<size_t> maxNrLeaves) {
// Add the necessary conditionals from the previous timestep(s).
std::tie(graph, hybridBayesNet_) =
addConditionals(graph, hybridBayesNet_, ordering);
// Eliminate.
auto bayesNetFragment = graph.eliminateSequential(ordering);
/// Prune
if (maxNrLeaves) {
// `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
// all the conditionals with the same keys in bayesNetFragment.
HybridBayesNet prunedBayesNetFragment =
bayesNetFragment->prune(*maxNrLeaves);
// Set the bayes net fragment to the pruned version
bayesNetFragment =
boost::make_shared<HybridBayesNet>(prunedBayesNetFragment);
}
// Add the partial bayes net to the posterior bayes net.
hybridBayesNet_.push_back<HybridBayesNet>(*bayesNetFragment);
}
/* ************************************************************************* */
std::pair<HybridGaussianFactorGraph, HybridBayesNet>
HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
const HybridBayesNet &originalHybridBayesNet,
const Ordering &ordering) const {
HybridGaussianFactorGraph graph(originalGraph);
HybridBayesNet hybridBayesNet(originalHybridBayesNet);
// If we are not at the first iteration, means we have conditionals to add.
if (!hybridBayesNet.empty()) {
// We add all relevant conditional mixtures on the last continuous variable
// in the previous `hybridBayesNet` to the graph
// Conditionals to remove from the bayes net
// since the conditional will be updated.
std::vector<HybridConditional::shared_ptr> conditionals_to_erase;
// New conditionals to add to the graph
gtsam::HybridBayesNet newConditionals;
// NOTE(Varun) Using a for-range loop doesn't work since some of the
// conditionals are invalid pointers
for (size_t i = 0; i < hybridBayesNet.size(); i++) {
auto conditional = hybridBayesNet.at(i);
for (auto &key : conditional->frontals()) {
if (std::find(ordering.begin(), ordering.end(), key) !=
ordering.end()) {
newConditionals.push_back(conditional);
conditionals_to_erase.push_back(conditional);
break;
}
}
}
// Remove conditionals at the end so we don't affect the order in the
// original bayes net.
for (auto &&conditional : conditionals_to_erase) {
auto it = find(hybridBayesNet.begin(), hybridBayesNet.end(), conditional);
hybridBayesNet.erase(it);
}
graph.push_back(newConditionals);
// newConditionals.print("\n\n\nNew Conditionals to add back");
}
return {graph, hybridBayesNet};
}
/* ************************************************************************* */
GaussianMixture::shared_ptr HybridSmoother::gaussianMixture(
size_t index) const {
return boost::dynamic_pointer_cast<GaussianMixture>(
hybridBayesNet_.at(index));
}
/* ************************************************************************* */
const HybridBayesNet &HybridSmoother::hybridBayesNet() const {
return hybridBayesNet_;
}
} // namespace gtsam

View File

@ -0,0 +1,73 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file HybridSmoother.h
* @brief An incremental smoother for hybrid factor graphs
* @author Varun Agrawal
* @date October 2022
*/
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridGaussianFactorGraph.h>
namespace gtsam {
class HybridSmoother {
private:
HybridBayesNet hybridBayesNet_;
HybridGaussianFactorGraph remainingFactorGraph_;
public:
/**
* Given new factors, perform an incremental update.
* The relevant densities in the `hybridBayesNet` will be added to the input
* graph (fragment), and then eliminated according to the `ordering`
* presented. The remaining factor graph contains Gaussian mixture factors
* that are not connected to the variables in the ordering, or a single
* discrete factor on all discrete keys, plus all discrete factors in the
* original graph.
*
* \note If maxComponents is given, we look at the discrete factor resulting
* from this elimination, and prune it and the Gaussian components
* corresponding to the pruned choices.
*
* @param graph The new factors, should be linear only
* @param ordering The ordering for elimination, only continuous vars are
* allowed
* @param maxNrLeaves The maximum number of leaves in the new discrete factor,
* if applicable
*/
void update(HybridGaussianFactorGraph graph, const Ordering& ordering,
boost::optional<size_t> maxNrLeaves = boost::none);
/**
* @brief Add conditionals from previous timestep as part of liquefication.
*
* @param graph The new factor graph for the current time step.
* @param hybridBayesNet The hybrid bayes net containing all conditionals so
* far.
* @param ordering The elimination ordering.
* @return std::pair<HybridGaussianFactorGraph, HybridBayesNet>
*/
std::pair<HybridGaussianFactorGraph, HybridBayesNet> addConditionals(
const HybridGaussianFactorGraph& graph,
const HybridBayesNet& hybridBayesNet, const Ordering& ordering) const;
/// Get the Gaussian Mixture from the Bayes Net posterior at `index`.
GaussianMixture::shared_ptr gaussianMixture(size_t index) const;
/// Return the Bayes Net posterior.
const HybridBayesNet& hybridBayesNet() const;
};
}; // namespace gtsam

View File

@ -131,21 +131,29 @@ struct Switching {
* @param between_sigma The stddev between poses.
* @param prior_sigma The stddev on priors (also used for measurements).
*/
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1)
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1,
std::vector<double> measurements = {})
: K(K) {
// Create DiscreteKeys for binary K modes, modes[0] will not be used.
for (size_t k = 0; k <= K; k++) {
// Create DiscreteKeys for binary K modes.
for (size_t k = 0; k < K; k++) {
modes.emplace_back(M(k), 2);
}
// If measurements are not provided, we just have the robot moving forward.
if (measurements.size() == 0) {
for (size_t k = 0; k < K; k++) {
measurements.push_back(k);
}
}
// Create hybrid factor graph.
// Add a prior on X(1).
auto prior = boost::make_shared<PriorFactor<double>>(
X(1), 0, noiseModel::Isotropic::Sigma(1, prior_sigma));
X(0), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma));
nonlinearFactorGraph.push_nonlinear(prior);
// Add "motion models".
for (size_t k = 1; k < K; k++) {
for (size_t k = 0; k < K - 1; k++) {
KeyVector keys = {X(k), X(k + 1)};
auto motion_models = motionModels(k, between_sigma);
std::vector<NonlinearFactor::shared_ptr> components;
@ -158,17 +166,17 @@ struct Switching {
// Add measurement factors
auto measurement_noise = noiseModel::Isotropic::Sigma(1, prior_sigma);
for (size_t k = 2; k <= K; k++) {
for (size_t k = 1; k < K; k++) {
nonlinearFactorGraph.emplace_nonlinear<PriorFactor<double>>(
X(k), 1.0 * (k - 1), measurement_noise);
X(k), measurements.at(k), measurement_noise);
}
// Add "mode chain"
addModeChain(&nonlinearFactorGraph);
// Create the linearization point.
for (size_t k = 1; k <= K; k++) {
linearizationPoint.insert<double>(X(k), static_cast<double>(k));
for (size_t k = 0; k < K; k++) {
linearizationPoint.insert<double>(X(k), static_cast<double>(k + 1));
}
// The ground truth is robot moving forward
@ -187,11 +195,16 @@ struct Switching {
return {still, moving};
}
// Add "mode chain" to HybridNonlinearFactorGraph
/**
* @brief Add "mode chain" to HybridNonlinearFactorGraph from M(0) to M(K-2).
* E.g. if K=4, we want M0, M1 and M2.
*
* @param fg The nonlinear factor graph to which the mode chain is added.
*/
void addModeChain(HybridNonlinearFactorGraph *fg) {
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1");
fg->push_discrete(prior);
for (size_t k = 1; k < K - 1; k++) {
for (size_t k = 0; k < K - 2; k++) {
auto parents = {modes[k]};
auto conditional = boost::make_shared<DiscreteConditional>(
modes[k + 1], parents, "1/2 3/2");
@ -199,11 +212,16 @@ struct Switching {
}
}
// Add "mode chain" to HybridGaussianFactorGraph
/**
* @brief Add "mode chain" to HybridGaussianFactorGraph from M(0) to M(K-2).
* E.g. if K=4, we want M0, M1 and M2.
*
* @param fg The gaussian factor graph to which the mode chain is added.
*/
void addModeChain(HybridGaussianFactorGraph *fg) {
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1");
fg->push_discrete(prior);
for (size_t k = 1; k < K - 1; k++) {
for (size_t k = 0; k < K - 2; k++) {
auto parents = {modes[k]};
auto conditional = boost::make_shared<DiscreteConditional>(
modes[k + 1], parents, "1/2 3/2");

View File

@ -82,9 +82,9 @@ TEST(HybridBayesNet, Choose) {
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
DiscreteValues assignment;
assignment[M(0)] = 1;
assignment[M(1)] = 1;
assignment[M(2)] = 1;
assignment[M(3)] = 0;
assignment[M(2)] = 0;
GaussianBayesNet gbn = hybridBayesNet->choose(assignment);
@ -120,20 +120,20 @@ TEST(HybridBayesNet, OptimizeAssignment) {
s.linearizedFactorGraph.eliminatePartialSequential(ordering);
DiscreteValues assignment;
assignment[M(0)] = 1;
assignment[M(1)] = 1;
assignment[M(2)] = 1;
assignment[M(3)] = 1;
VectorValues delta = hybridBayesNet->optimize(assignment);
// The linearization point has the same value as the key index,
// e.g. X(1) = 1, X(2) = 2,
// e.g. X(0) = 1, X(1) = 2,
// but the factors specify X(k) = k-1, so delta should be -1.
VectorValues expected_delta;
expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
expected_delta.insert(make_pair(X(4), -Vector1::Ones()));
EXPECT(assert_equal(expected_delta, delta));
}
@ -150,16 +150,16 @@ TEST(HybridBayesNet, Optimize) {
HybridValues delta = hybridBayesNet->optimize();
DiscreteValues expectedAssignment;
expectedAssignment[M(1)] = 1;
expectedAssignment[M(2)] = 0;
expectedAssignment[M(3)] = 1;
expectedAssignment[M(0)] = 1;
expectedAssignment[M(1)] = 0;
expectedAssignment[M(2)] = 1;
EXPECT(assert_equal(expectedAssignment, delta.discrete()));
VectorValues expectedValues;
expectedValues.insert(X(1), -0.999904 * Vector1::Ones());
expectedValues.insert(X(2), -0.99029 * Vector1::Ones());
expectedValues.insert(X(3), -1.00971 * Vector1::Ones());
expectedValues.insert(X(4), -1.0001 * Vector1::Ones());
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
}
@ -175,10 +175,10 @@ TEST(HybridBayesNet, OptimizeMultifrontal) {
HybridValues delta = hybridBayesTree->optimize();
VectorValues expectedValues;
expectedValues.insert(X(1), -0.999904 * Vector1::Ones());
expectedValues.insert(X(2), -0.99029 * Vector1::Ones());
expectedValues.insert(X(3), -1.00971 * Vector1::Ones());
expectedValues.insert(X(4), -1.0001 * Vector1::Ones());
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());
EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
}
@ -256,6 +256,55 @@ TEST(HybridBayesNet, Prune) {
EXPECT(assert_equal(delta.continuous(), pruned_delta.continuous()));
}
/* ****************************************************************************/
// Test bayes net updateDiscreteConditionals
TEST(HybridBayesNet, UpdateDiscreteConditionals) {
Switching s(4);
Ordering hybridOrdering = s.linearizedFactorGraph.getHybridOrdering();
HybridBayesNet::shared_ptr hybridBayesNet =
s.linearizedFactorGraph.eliminateSequential(hybridOrdering);
size_t maxNrLeaves = 3;
auto discreteConditionals = hybridBayesNet->discreteConditionals();
const DecisionTreeFactor::shared_ptr prunedDecisionTree =
boost::make_shared<DecisionTreeFactor>(
discreteConditionals->prune(maxNrLeaves));
EXPECT_LONGS_EQUAL(maxNrLeaves + 2 /*2 zero leaves*/,
prunedDecisionTree->nrLeaves());
auto original_discrete_conditionals =
*(hybridBayesNet->at(4)->asDiscreteConditional());
// Prune!
hybridBayesNet->prune(maxNrLeaves);
// Functor to verify values against the original_discrete_conditionals
auto checker = [&](const Assignment<Key>& assignment,
double probability) -> double {
// typecast so we can use this to get probability value
DiscreteValues choices(assignment);
if (prunedDecisionTree->operator()(choices) == 0) {
EXPECT_DOUBLES_EQUAL(0.0, probability, 1e-9);
} else {
EXPECT_DOUBLES_EQUAL(original_discrete_conditionals(choices), probability,
1e-9);
}
return 0.0;
};
// Get the pruned discrete conditionals as an AlgebraicDecisionTree
auto pruned_discrete_conditionals =
hybridBayesNet->at(4)->asDiscreteConditional();
auto discrete_conditional_tree =
boost::dynamic_pointer_cast<DecisionTreeFactor::ADT>(
pruned_discrete_conditionals);
// The checker functor verifies the values for us.
discrete_conditional_tree->apply(checker);
}
/* ****************************************************************************/
// Test HybridBayesNet serialization.
TEST(HybridBayesNet, Serialization) {

View File

@ -60,9 +60,9 @@ TEST(HybridBayesTree, OptimizeAssignment) {
isam.update(graph1);
DiscreteValues assignment;
assignment[M(0)] = 1;
assignment[M(1)] = 1;
assignment[M(2)] = 1;
assignment[M(3)] = 1;
VectorValues delta = isam.optimize(assignment);
@ -70,16 +70,16 @@ TEST(HybridBayesTree, OptimizeAssignment) {
// e.g. X(1) = 1, X(2) = 2,
// but the factors specify X(k) = k-1, so delta should be -1.
VectorValues expected_delta;
expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
expected_delta.insert(make_pair(X(4), -Vector1::Ones()));
EXPECT(assert_equal(expected_delta, delta));
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= s.K; k++) ordering += X(k);
for (size_t k = 0; k < s.K; k++) ordering += X(k);
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
@ -123,7 +123,7 @@ TEST(HybridBayesTree, Optimize) {
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= s.K; k++) ordering += X(k);
for (size_t k = 0; k < s.K; k++) ordering += X(k);
HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;

View File

@ -0,0 +1,130 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file testHybridEstimation.cpp
* @brief Unit tests for end-to-end Hybrid Estimation
* @author Varun Agrawal
*/
#include <gtsam/geometry/Pose2.h>
#include <gtsam/hybrid/HybridBayesNet.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/HybridNonlinearISAM.h>
#include <gtsam/hybrid/HybridSmoother.h>
#include <gtsam/hybrid/MixtureFactor.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/linear/GaussianBayesNet.h>
#include <gtsam/linear/GaussianFactorGraph.h>
#include <gtsam/linear/JacobianFactor.h>
#include <gtsam/linear/NoiseModel.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
// Include for test suite
#include <CppUnitLite/TestHarness.h>
#include "Switching.h"
using namespace std;
using namespace gtsam;
using symbol_shorthand::X;
Ordering getOrdering(HybridGaussianFactorGraph& factors,
const HybridGaussianFactorGraph& newFactors) {
factors += newFactors;
// Get all the discrete keys from the factors
KeySet allDiscrete = factors.discreteKeys();
// Create KeyVector with continuous keys followed by discrete keys.
KeyVector newKeysDiscreteLast;
const KeySet newFactorKeys = newFactors.keys();
// Insert continuous keys first.
for (auto& k : newFactorKeys) {
if (!allDiscrete.exists(k)) {
newKeysDiscreteLast.push_back(k);
}
}
// Insert discrete keys at the end
std::copy(allDiscrete.begin(), allDiscrete.end(),
std::back_inserter(newKeysDiscreteLast));
const VariableIndex index(factors);
// Get an ordering where the new keys are eliminated last
Ordering ordering = Ordering::ColamdConstrainedLast(
index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()),
true);
return ordering;
}
/****************************************************************************/
// Test approximate inference with an additional pruning step.
TEST(HybridNonlinearISAM, Incremental) {
size_t K = 10;
std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6};
// Ground truth discrete seq
std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0};
Switching switching(K, 1.0, 0.1, measurements);
// HybridNonlinearISAM smoother;
HybridSmoother smoother;
HybridNonlinearFactorGraph graph;
Values initial;
// Add the X(0) prior
graph.push_back(switching.nonlinearFactorGraph.at(0));
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));
HybridGaussianFactorGraph linearized;
HybridGaussianFactorGraph bayesNet;
for (size_t k = 1; k < K; k++) {
std::cout << ">>>>>>>>>>>>>>>>>>> k=" << k << std::endl;
// Motion Model
graph.push_back(switching.nonlinearFactorGraph.at(k));
// Measurement
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));
bayesNet = smoother.hybridBayesNet();
linearized = *graph.linearize(initial);
Ordering ordering = getOrdering(bayesNet, linearized);
smoother.update(linearized, ordering, 3);
graph.resize(0);
}
HybridValues delta = smoother.hybridBayesNet().optimize();
Values result = initial.retract(delta.continuous());
DiscreteValues expected_discrete;
for (size_t k = 0; k < K - 1; k++) {
expected_discrete[M(k)] = discrete_seq[k];
}
EXPECT(assert_equal(expected_discrete, delta.discrete()));
Values expected_continuous;
for (size_t k = 0; k < K; k++) {
expected_continuous.insert(X(k), measurements[k]);
}
EXPECT(assert_equal(expected_continuous, result));
}
/* ************************************************************************* */
int main() {
TestResult tr;
return TestRegistry::runAllTests(tr);
}
/* ************************************************************************* */

View File

@ -531,34 +531,34 @@ TEST(HybridGaussianFactorGraph, Conditionals) {
hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
Ordering ordering;
ordering.push_back(X(1));
ordering.push_back(X(0));
HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering);
hfg.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
hfg.push_back(*bayes_net);
hfg.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
hfg.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
ordering.push_back(X(1));
ordering.push_back(X(2));
ordering.push_back(X(3));
ordering.push_back(M(0));
ordering.push_back(M(1));
ordering.push_back(M(2));
bayes_net = hfg.eliminateSequential(ordering);
HybridValues result = bayes_net->optimize();
Values expected_continuous;
expected_continuous.insert<double>(X(1), 0);
expected_continuous.insert<double>(X(2), 1);
expected_continuous.insert<double>(X(3), 2);
expected_continuous.insert<double>(X(4), 4);
expected_continuous.insert<double>(X(0), 0);
expected_continuous.insert<double>(X(1), 1);
expected_continuous.insert<double>(X(2), 2);
expected_continuous.insert<double>(X(3), 4);
Values result_continuous =
switching.linearizationPoint.retract(result.continuous());
EXPECT(assert_equal(expected_continuous, result_continuous));
DiscreteValues expected_discrete;
expected_discrete[M(0)] = 1;
expected_discrete[M(1)] = 1;
expected_discrete[M(2)] = 1;
EXPECT(assert_equal(expected_discrete, result.discrete()));
}

View File

@ -54,40 +54,40 @@ TEST(HybridGaussianElimination, IncrementalElimination) {
// Create initial factor graph
// * * *
// | | |
// X1 -*- X2 -*- X3
// \*-M1-*/
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
// X0 -*- X1 -*- X2
// \*-M0-*/
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
graph1.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0)
// Run update step
isam.update(graph1);
// Check that after update we have 3 hybrid Bayes net nodes:
// P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2)
// Check that after update we have 2 hybrid Bayes net nodes:
// P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1)
EXPECT_LONGS_EQUAL(3, isam.size());
EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector{X(1)});
EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)}));
EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)}));
EXPECT(isam[X(0)]->conditional()->frontals() == KeyVector{X(0)});
EXPECT(isam[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)}));
EXPECT(isam[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
EXPECT(isam[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)}));
/********************************************************/
// New factor graph for incremental update.
HybridGaussianFactorGraph graph2;
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2)
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1)
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1)
isam.update(graph2);
// Check that after the second update we have
// 1 additional hybrid Bayes net node:
// P(X2, X3 | M1, M2)
// P(X1, X2 | M0, M1)
EXPECT_LONGS_EQUAL(3, isam.size());
EXPECT(isam[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
EXPECT(isam[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)}));
EXPECT(isam[X(2)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
EXPECT(isam[X(2)]->conditional()->parents() == KeyVector({M(0), M(1)}));
}
/* ****************************************************************************/
@ -100,104 +100,104 @@ TEST(HybridGaussianElimination, IncrementalInference) {
// Create initial factor graph
// * * *
// | | |
// X1 -*- X2 -*- X3
// X0 -*- X1 -*- X2
// | |
// *-M1 - * - M2
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X2)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
// *-M0 - * - M1
graph1.push_back(switching.linearizedFactorGraph.at(0)); // P(X0)
graph1.push_back(switching.linearizedFactorGraph.at(1)); // P(X0, X1 | M0)
graph1.push_back(switching.linearizedFactorGraph.at(3)); // P(X1)
graph1.push_back(switching.linearizedFactorGraph.at(5)); // P(M0)
// Run update step
isam.update(graph1);
auto discreteConditional_m1 =
isam[M(1)]->conditional()->asDiscreteConditional();
EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)}));
auto discreteConditional_m0 =
isam[M(0)]->conditional()->asDiscreteConditional();
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
/********************************************************/
// New factor graph for incremental update.
HybridGaussianFactorGraph graph2;
graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X3)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M1, M2)
graph2.push_back(switching.linearizedFactorGraph.at(2)); // P(X1, X2 | M1)
graph2.push_back(switching.linearizedFactorGraph.at(4)); // P(X2)
graph2.push_back(switching.linearizedFactorGraph.at(6)); // P(M0, M1)
isam.update(graph2);
/********************************************************/
// Run batch elimination so we can compare results.
Ordering ordering;
ordering += X(0);
ordering += X(1);
ordering += X(2);
ordering += X(3);
// Now we calculate the actual factors using full elimination
// Now we calculate the expected factors using full elimination
HybridBayesTree::shared_ptr expectedHybridBayesTree;
HybridGaussianFactorGraph::shared_ptr expectedRemainingGraph;
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
// The densities on X(0) should be the same
auto x0_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(0)]->conditional()->inner());
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
// The densities on X(1) should be the same
auto x1_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(1)]->conditional()->inner());
auto actual_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional));
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
// The densities on X(2) should be the same
auto x2_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(2)]->conditional()->inner());
auto actual_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional));
// The densities on X(3) should be the same
auto x3_conditional =
dynamic_pointer_cast<GaussianMixture>(isam[X(3)]->conditional()->inner());
auto actual_x3_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional));
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
// We only perform manual continuous elimination for 0,0.
// The other discrete probabilities on M(2) are calculated the same way
Ordering discrete_ordering;
discrete_ordering += M(0);
discrete_ordering += M(1);
discrete_ordering += M(2);
HybridBayesTree::shared_ptr discreteBayesTree =
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
DiscreteValues m00;
m00[M(1)] = 0, m00[M(2)] = 0;
m00[M(0)] = 0, m00[M(1)] = 0;
DiscreteConditional decisionTree =
*(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional();
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
double m00_prob = decisionTree(m00);
auto discreteConditional = isam[M(2)]->conditional()->asDiscreteConditional();
auto discreteConditional = isam[M(1)]->conditional()->asDiscreteConditional();
// Test if the probability values are as expected with regression tests.
DiscreteValues assignment;
EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 0;
assignment[M(2)] = 0;
EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
assignment[M(1)] = 1;
assignment[M(2)] = 0;
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 0;
assignment[M(2)] = 1;
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 1;
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 1;
assignment[M(2)] = 1;
EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
// Check if the clique conditional generated from incremental elimination
// matches that of batch elimination.
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
(*expectedChordal)[M(2)]->conditional()->inner());
(*expectedChordal)[M(1)]->conditional()->inner());
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
isam[M(2)]->conditional()->inner());
isam[M(1)]->conditional()->inner());
EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
}
@ -208,13 +208,13 @@ TEST(HybridGaussianElimination, Approx_inference) {
HybridGaussianISAM incrementalHybrid;
HybridGaussianFactorGraph graph1;
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 1; i < 4; i++) {
graph1.push_back(switching.linearizedFactorGraph.at(i));
}
// Add the Gaussian factors, 1 prior on X(1),
// 3 measurements on X(2), X(3), X(4)
// Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3)
graph1.push_back(switching.linearizedFactorGraph.at(0));
for (size_t i = 4; i <= 7; i++) {
graph1.push_back(switching.linearizedFactorGraph.at(i));
@ -222,7 +222,7 @@ TEST(HybridGaussianElimination, Approx_inference) {
// Create ordering.
Ordering ordering;
for (size_t j = 1; j <= 4; j++) {
for (size_t j = 0; j < 4; j++) {
ordering += X(j);
}
@ -271,26 +271,26 @@ TEST(HybridGaussianElimination, Approx_inference) {
1 1 1 Leaf 0.5
*/
auto discreteConditional_m1 = *dynamic_pointer_cast<DiscreteConditional>(
incrementalHybrid[M(1)]->conditional()->inner());
EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)}));
auto discreteConditional_m0 = *dynamic_pointer_cast<DiscreteConditional>(
incrementalHybrid[M(0)]->conditional()->inner());
EXPECT(discreteConditional_m0.keys() == KeyVector({M(0), M(1), M(2)}));
// Get the number of elements which are greater than 0.
auto count = [](const double &value, int count) {
return value > 0 ? count + 1 : count;
};
// Check that the number of leaves after pruning is 5.
EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0));
EXPECT_LONGS_EQUAL(5, discreteConditional_m0.fold(count, 0));
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
// bayes net, at the same positions.
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
unprunedHybridBayesTree->clique(X(4))->conditional()->inner());
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
incrementalHybrid[X(4)]->conditional()->inner());
incrementalHybrid[X(3)]->conditional()->inner());
std::vector<std::pair<DiscreteValues, double>> assignments =
discreteConditional_m1.enumerate();
discreteConditional_m0.enumerate();
// Loop over all assignments and check the pruned components
for (auto &&av : assignments) {
const DiscreteValues &assignment = av.first;
@ -314,13 +314,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
HybridGaussianFactorGraph graph1;
/***** Run Round 1 *****/
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 1; i < 4; i++) {
graph1.push_back(switching.linearizedFactorGraph.at(i));
}
// Add the Gaussian factors, 1 prior on X(1),
// 3 measurements on X(2), X(3), X(4)
// Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3)
graph1.push_back(switching.linearizedFactorGraph.at(0));
for (size_t i = 5; i <= 7; i++) {
graph1.push_back(switching.linearizedFactorGraph.at(i));
@ -335,13 +335,13 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
// each with 2, 4, 8, and 5 (pruned) leaves respetively.
EXPECT_LONGS_EQUAL(4, incrementalHybrid.size());
EXPECT_LONGS_EQUAL(
2, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents());
2, incrementalHybrid[X(0)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
3, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents());
3, incrementalHybrid[X(1)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, incrementalHybrid[X(2)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
/***** Run Round 2 *****/
HybridGaussianFactorGraph graph2;
@ -356,9 +356,9 @@ TEST(HybridGaussianElimination, Incremental_approximate) {
// with 5 (pruned) leaves.
CHECK_EQUAL(5, incrementalHybrid.size());
EXPECT_LONGS_EQUAL(
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
5, incrementalHybrid[X(3)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, incrementalHybrid[X(5)]->conditional()->asMixture()->nrComponents());
5, incrementalHybrid[X(4)]->conditional()->asMixture()->nrComponents());
}
/* ************************************************************************/
@ -370,7 +370,7 @@ TEST(HybridGaussianISAM, NonTrivial) {
/*************** Run Round 1 ***************/
HybridNonlinearFactorGraph fg;
// Add a prior on pose x1 at the origin.
// Add a prior on pose x0 at the origin.
// A prior factor consists of a mean and
// a noise model (covariance matrix)
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin

View File

@ -263,7 +263,7 @@ TEST(HybridFactorGraph, EliminationTree) {
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
for (size_t k = 0; k < self.K; k++) ordering += X(k);
// Create elimination tree.
HybridEliminationTree etree(self.linearizedFactorGraph, ordering);
@ -271,9 +271,9 @@ TEST(HybridFactorGraph, EliminationTree) {
}
/****************************************************************************
*Test elimination function by eliminating x1 in *-x1-*-x2 graph.
*Test elimination function by eliminating x0 in *-x0-*-x1 graph.
*/
TEST(GaussianElimination, Eliminate_x1) {
TEST(GaussianElimination, Eliminate_x0) {
Switching self(3);
// Gather factors on x1, has a simple Gaussian and a mixture factor.
@ -283,9 +283,9 @@ TEST(GaussianElimination, Eliminate_x1) {
// Add first hybrid factor
factors.push_back(self.linearizedFactorGraph[1]);
// Eliminate x1
// Eliminate x0
Ordering ordering;
ordering += X(1);
ordering += X(0);
auto result = EliminateHybrid(factors, ordering);
CHECK(result.first);
@ -296,20 +296,20 @@ TEST(GaussianElimination, Eliminate_x1) {
}
/****************************************************************************
* Test elimination function by eliminating x2 in x1-*-x2-*-x3 chain.
* m1/ \m2
* Test elimination function by eliminating x1 in x0-*-x1-*-x2 chain.
* m0/ \m1
*/
TEST(HybridsGaussianElimination, Eliminate_x2) {
TEST(HybridsGaussianElimination, Eliminate_x1) {
Switching self(3);
// Gather factors on x2, will be two mixture factors (with x1 and x3, resp.).
// Gather factors on x1, will be two mixture factors (with x0 and x2, resp.).
HybridGaussianFactorGraph factors;
factors.push_back(self.linearizedFactorGraph[1]); // involves m1
factors.push_back(self.linearizedFactorGraph[2]); // involves m2
factors.push_back(self.linearizedFactorGraph[1]); // involves m0
factors.push_back(self.linearizedFactorGraph[2]); // involves m1
// Eliminate x2
// Eliminate x1
Ordering ordering;
ordering += X(2);
ordering += X(1);
std::pair<HybridConditional::shared_ptr, HybridFactor::shared_ptr> result =
EliminateHybrid(factors, ordering);
@ -326,28 +326,28 @@ TEST(HybridsGaussianElimination, Eliminate_x2) {
GaussianFactorGraph::shared_ptr batchGFG(double between,
Values linearizationPoint) {
NonlinearFactorGraph graph;
graph.addPrior<double>(X(1), 0, Isotropic::Sigma(1, 0.1));
graph.addPrior<double>(X(0), 0, Isotropic::Sigma(1, 0.1));
auto between_x1_x2 = boost::make_shared<MotionModel>(
X(1), X(2), between, Isotropic::Sigma(1, 1.0));
auto between_x0_x1 = boost::make_shared<MotionModel>(
X(0), X(1), between, Isotropic::Sigma(1, 1.0));
graph.push_back(between_x1_x2);
graph.push_back(between_x0_x1);
return graph.linearize(linearizationPoint);
}
/****************************************************************************
* Test elimination function by eliminating x1 and x2 in graph.
* Test elimination function by eliminating x0 and x1 in graph.
*/
TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
Switching self(2, 1.0, 0.1);
auto factors = self.linearizedFactorGraph;
// Eliminate x1
// Eliminate x0
Ordering ordering;
ordering += X(0);
ordering += X(1);
ordering += X(2);
HybridConditional::shared_ptr hybridConditionalMixture;
HybridFactor::shared_ptr factorOnModes;
@ -359,7 +359,7 @@ TEST(HybridGaussianElimination, EliminateHybrid_2_Variable) {
dynamic_pointer_cast<GaussianMixture>(hybridConditionalMixture->inner());
CHECK(gaussianConditionalMixture);
// Frontals = [x1, x2]
// Frontals = [x0, x1]
EXPECT_LONGS_EQUAL(2, gaussianConditionalMixture->nrFrontals());
// 1 parent, which is the mode
EXPECT_LONGS_EQUAL(1, gaussianConditionalMixture->nrParents());
@ -387,7 +387,7 @@ TEST(HybridFactorGraph, Partial_Elimination) {
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
for (size_t k = 0; k < self.K; k++) ordering += X(k);
// Eliminate partially.
HybridBayesNet::shared_ptr hybridBayesNet;
@ -397,18 +397,18 @@ TEST(HybridFactorGraph, Partial_Elimination) {
CHECK(hybridBayesNet);
EXPECT_LONGS_EQUAL(3, hybridBayesNet->size());
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)});
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)}));
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)});
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)});
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)}));
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)});
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)}));
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)});
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)}));
CHECK(remainingFactorGraph);
EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size());
EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(1)}));
EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(2), M(1)}));
EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(1), M(2)}));
EXPECT(remainingFactorGraph->at(0)->keys() == KeyVector({M(0)}));
EXPECT(remainingFactorGraph->at(1)->keys() == KeyVector({M(1), M(0)}));
EXPECT(remainingFactorGraph->at(2)->keys() == KeyVector({M(0), M(1)}));
}
/****************************************************************************
@ -427,7 +427,7 @@ TEST(HybridFactorGraph, Full_Elimination) {
{
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
for (size_t k = 0; k < self.K; k++) ordering += X(k);
// Eliminate partially.
std::tie(hybridBayesNet_partial, remainingFactorGraph_partial) =
@ -440,15 +440,15 @@ TEST(HybridFactorGraph, Full_Elimination) {
discrete_fg.push_back(df->inner());
}
ordering.clear();
for (size_t k = 1; k < self.K; k++) ordering += M(k);
for (size_t k = 0; k < self.K - 1; k++) ordering += M(k);
discreteBayesNet =
*discrete_fg.eliminateSequential(ordering, EliminateForMPE);
}
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
for (size_t k = 1; k < self.K; k++) ordering += M(k);
for (size_t k = 0; k < self.K; k++) ordering += X(k);
for (size_t k = 0; k < self.K - 1; k++) ordering += M(k);
// Eliminate partially.
HybridBayesNet::shared_ptr hybridBayesNet =
@ -456,23 +456,23 @@ TEST(HybridFactorGraph, Full_Elimination) {
CHECK(hybridBayesNet);
EXPECT_LONGS_EQUAL(5, hybridBayesNet->size());
// p(x1 | x2, m1)
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(1)});
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(2), M(1)}));
// p(x2 | x3, m1, m2)
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(2)});
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(3), M(1), M(2)}));
// p(x3 | m1, m2)
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(3)});
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(1), M(2)}));
// P(m1 | m2)
EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)});
EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)}));
// p(x0 | x1, m0)
EXPECT(hybridBayesNet->at(0)->frontals() == KeyVector{X(0)});
EXPECT(hybridBayesNet->at(0)->parents() == KeyVector({X(1), M(0)}));
// p(x1 | x2, m0, m1)
EXPECT(hybridBayesNet->at(1)->frontals() == KeyVector{X(1)});
EXPECT(hybridBayesNet->at(1)->parents() == KeyVector({X(2), M(0), M(1)}));
// p(x2 | m0, m1)
EXPECT(hybridBayesNet->at(2)->frontals() == KeyVector{X(2)});
EXPECT(hybridBayesNet->at(2)->parents() == KeyVector({M(0), M(1)}));
// P(m0 | m1)
EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(0)});
EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(1)}));
EXPECT(
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(3)->inner())
->equals(*discreteBayesNet.at(0)));
// P(m2)
EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)});
// P(m1)
EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(1)});
EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents());
EXPECT(
dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(4)->inner())
@ -489,7 +489,7 @@ TEST(HybridFactorGraph, Printing) {
// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= self.K; k++) ordering += X(k);
for (size_t k = 0; k < self.K; k++) ordering += X(k);
// Eliminate partially.
HybridBayesNet::shared_ptr hybridBayesNet;
@ -499,14 +499,37 @@ TEST(HybridFactorGraph, Printing) {
string expected_hybridFactorGraph = R"(
size: 7
factor 0: Continuous [x1]
factor 0: Continuous [x0]
A[x1] = [
A[x0] = [
10
]
b = [ -10 ]
No noise model
factor 1: Hybrid [x1 x2; m1]{
factor 1: Hybrid [x0 x1; m0]{
Choice(m0)
0 Leaf :
A[x0] = [
-1
]
A[x1] = [
1
]
b = [ -1 ]
No noise model
1 Leaf :
A[x0] = [
-1
]
A[x1] = [
1
]
b = [ -0 ]
No noise model
}
factor 2: Hybrid [x1 x2; m1]{
Choice(m1)
0 Leaf :
A[x1] = [
@ -529,54 +552,31 @@ factor 1: Hybrid [x1 x2; m1]{
No noise model
}
factor 2: Hybrid [x2 x3; m2]{
Choice(m2)
0 Leaf :
A[x2] = [
-1
]
A[x3] = [
1
]
b = [ -1 ]
No noise model
factor 3: Continuous [x1]
1 Leaf :
A[x2] = [
-1
A[x1] = [
10
]
A[x3] = [
1
]
b = [ -0 ]
b = [ -10 ]
No noise model
}
factor 3: Continuous [x2]
factor 4: Continuous [x2]
A[x2] = [
10
]
b = [ -10 ]
No noise model
factor 4: Continuous [x3]
A[x3] = [
10
]
b = [ -10 ]
No noise model
factor 5: Discrete [m1]
P( m1 ):
factor 5: Discrete [m0]
P( m0 ):
Leaf 0.5
factor 6: Discrete [m2 m1]
P( m2 | m1 ):
Choice(m2)
0 Choice(m1)
factor 6: Discrete [m1 m0]
P( m1 | m0 ):
Choice(m1)
0 Choice(m0)
0 0 Leaf 0.33333333
0 1 Leaf 0.6
1 Choice(m1)
1 Choice(m0)
1 0 Leaf 0.66666667
1 1 Leaf 0.4
@ -586,71 +586,71 @@ factor 6: Discrete [m2 m1]
// Expected output for hybridBayesNet.
string expected_hybridBayesNet = R"(
size: 3
factor 0: Hybrid P( x1 | x2 m1)
Discrete Keys = (m1, 2),
Choice(m1)
0 Leaf p(x1 | x2)
factor 0: Hybrid P( x0 | x1 m0)
Discrete Keys = (m0, 2),
Choice(m0)
0 Leaf p(x0 | x1)
R = [ 10.0499 ]
S[x2] = [ -0.0995037 ]
S[x1] = [ -0.0995037 ]
d = [ -9.85087 ]
No noise model
1 Leaf p(x1 | x2)
1 Leaf p(x0 | x1)
R = [ 10.0499 ]
S[x2] = [ -0.0995037 ]
S[x1] = [ -0.0995037 ]
d = [ -9.95037 ]
No noise model
factor 1: Hybrid P( x2 | x3 m1 m2)
Discrete Keys = (m1, 2), (m2, 2),
Choice(m2)
0 Choice(m1)
0 0 Leaf p(x2 | x3)
factor 1: Hybrid P( x1 | x2 m0 m1)
Discrete Keys = (m0, 2), (m1, 2),
Choice(m1)
0 Choice(m0)
0 0 Leaf p(x1 | x2)
R = [ 10.099 ]
S[x3] = [ -0.0990196 ]
S[x2] = [ -0.0990196 ]
d = [ -9.99901 ]
No noise model
0 1 Leaf p(x2 | x3)
0 1 Leaf p(x1 | x2)
R = [ 10.099 ]
S[x3] = [ -0.0990196 ]
S[x2] = [ -0.0990196 ]
d = [ -9.90098 ]
No noise model
1 Choice(m1)
1 0 Leaf p(x2 | x3)
1 Choice(m0)
1 0 Leaf p(x1 | x2)
R = [ 10.099 ]
S[x3] = [ -0.0990196 ]
S[x2] = [ -0.0990196 ]
d = [ -10.098 ]
No noise model
1 1 Leaf p(x2 | x3)
1 1 Leaf p(x1 | x2)
R = [ 10.099 ]
S[x3] = [ -0.0990196 ]
S[x2] = [ -0.0990196 ]
d = [ -10 ]
No noise model
factor 2: Hybrid P( x3 | m1 m2)
Discrete Keys = (m1, 2), (m2, 2),
Choice(m2)
0 Choice(m1)
0 0 Leaf p(x3)
factor 2: Hybrid P( x2 | m0 m1)
Discrete Keys = (m0, 2), (m1, 2),
Choice(m1)
0 Choice(m0)
0 0 Leaf p(x2)
R = [ 10.0494 ]
d = [ -10.1489 ]
No noise model
0 1 Leaf p(x3)
0 1 Leaf p(x2)
R = [ 10.0494 ]
d = [ -10.1479 ]
No noise model
1 Choice(m1)
1 0 Leaf p(x3)
1 Choice(m0)
1 0 Leaf p(x2)
R = [ 10.0494 ]
d = [ -10.0504 ]
No noise model
1 1 Leaf p(x3)
1 1 Leaf p(x2)
R = [ 10.0494 ]
d = [ -10.0494 ]
No noise model
@ -669,7 +669,7 @@ factor 2: Hybrid P( x3 | m1 m2)
TEST(HybridFactorGraph, DefaultDecisionTree) {
HybridNonlinearFactorGraph fg;
// Add a prior on pose x1 at the origin.
// Add a prior on pose x0 at the origin.
// A prior factor consists of a mean and a noise model (covariance matrix)
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin
auto priorNoise = noiseModel::Diagonal::Sigmas(

View File

@ -55,47 +55,47 @@ TEST(HybridNonlinearISAM, IncrementalElimination) {
// Create initial factor graph
// * * *
// | | |
// X1 -*- X2 -*- X3
// \*-M1-*/
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1)
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2)
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1)
// X0 -*- X1 -*- X2
// \*-M0-*/
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0)
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0)
graph1.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1)
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0)
initial.insert<double>(X(1), 1);
initial.insert<double>(X(2), 2);
initial.insert<double>(X(3), 3);
initial.insert<double>(X(0), 1);
initial.insert<double>(X(1), 2);
initial.insert<double>(X(2), 3);
// Run update step
isam.update(graph1, initial);
// Check that after update we have 3 hybrid Bayes net nodes:
// P(X1 | X2, M1) and P(X2, X3 | M1, M2), P(M1, M2)
// P(X0 | X1, M0) and P(X1, X2 | M0, M1), P(M0, M1)
HybridGaussianISAM bayesTree = isam.bayesTree();
EXPECT_LONGS_EQUAL(3, bayesTree.size());
EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector{X(1)});
EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({X(2), M(1)}));
EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(1), M(2)}));
EXPECT(bayesTree[X(0)]->conditional()->frontals() == KeyVector{X(0)});
EXPECT(bayesTree[X(0)]->conditional()->parents() == KeyVector({X(1), M(0)}));
EXPECT(bayesTree[X(1)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
EXPECT(bayesTree[X(1)]->conditional()->parents() == KeyVector({M(0), M(1)}));
/********************************************************/
// New factor graph for incremental update.
HybridNonlinearFactorGraph graph2;
initial = Values();
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2)
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3)
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2)
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1)
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2)
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1)
isam.update(graph2, initial);
bayesTree = isam.bayesTree();
// Check that after the second update we have
// 1 additional hybrid Bayes net node:
// P(X2, X3 | M1, M2)
// P(X1, X2 | M0, M1)
EXPECT_LONGS_EQUAL(3, bayesTree.size());
EXPECT(bayesTree[X(3)]->conditional()->frontals() == KeyVector({X(2), X(3)}));
EXPECT(bayesTree[X(3)]->conditional()->parents() == KeyVector({M(1), M(2)}));
EXPECT(bayesTree[X(2)]->conditional()->frontals() == KeyVector({X(1), X(2)}));
EXPECT(bayesTree[X(2)]->conditional()->parents() == KeyVector({M(0), M(1)}));
}
/* ****************************************************************************/
@ -109,35 +109,35 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
// Create initial factor graph
// * * *
// | | |
// X1 -*- X2 -*- X3
// X0 -*- X1 -*- X2
// | |
// *-M1 - * - M2
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X1)
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X1, X2 | M1)
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X2)
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M1)
// *-M0 - * - M1
graph1.push_back(switching.nonlinearFactorGraph.at(0)); // P(X0)
graph1.push_back(switching.nonlinearFactorGraph.at(1)); // P(X0, X1 | M0)
graph1.push_back(switching.nonlinearFactorGraph.at(3)); // P(X1)
graph1.push_back(switching.nonlinearFactorGraph.at(5)); // P(M0)
initial.insert<double>(X(1), 1);
initial.insert<double>(X(2), 2);
initial.insert<double>(X(0), 1);
initial.insert<double>(X(1), 2);
// Run update step
isam.update(graph1, initial);
HybridGaussianISAM bayesTree = isam.bayesTree();
auto discreteConditional_m1 =
bayesTree[M(1)]->conditional()->asDiscreteConditional();
EXPECT(discreteConditional_m1->keys() == KeyVector({M(1)}));
auto discreteConditional_m0 =
bayesTree[M(0)]->conditional()->asDiscreteConditional();
EXPECT(discreteConditional_m0->keys() == KeyVector({M(0)}));
/********************************************************/
// New factor graph for incremental update.
HybridNonlinearFactorGraph graph2;
initial = Values();
initial.insert<double>(X(3), 3);
initial.insert<double>(X(2), 3);
graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X2, X3 | M2)
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X3)
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M1, M2)
graph2.push_back(switching.nonlinearFactorGraph.at(2)); // P(X1, X2 | M1)
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // P(X2)
graph2.push_back(switching.nonlinearFactorGraph.at(6)); // P(M0, M1)
isam.update(graph2, initial);
bayesTree = isam.bayesTree();
@ -145,9 +145,9 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
/********************************************************/
// Run batch elimination so we can compare results.
Ordering ordering;
ordering += X(0);
ordering += X(1);
ordering += X(2);
ordering += X(3);
// Now we calculate the actual factors using full elimination
HybridBayesTree::shared_ptr expectedHybridBayesTree;
@ -155,67 +155,67 @@ TEST(HybridNonlinearISAM, IncrementalInference) {
std::tie(expectedHybridBayesTree, expectedRemainingGraph) =
switching.linearizedFactorGraph.eliminatePartialMultifrontal(ordering);
// The densities on X(1) should be the same
auto x0_conditional = dynamic_pointer_cast<GaussianMixture>(
bayesTree[X(0)]->conditional()->inner());
auto expected_x0_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(0)]->conditional()->inner());
EXPECT(assert_equal(*x0_conditional, *expected_x0_conditional));
// The densities on X(1) should be the same
auto x1_conditional = dynamic_pointer_cast<GaussianMixture>(
bayesTree[X(1)]->conditional()->inner());
auto actual_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
auto expected_x1_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(1)]->conditional()->inner());
EXPECT(assert_equal(*x1_conditional, *actual_x1_conditional));
EXPECT(assert_equal(*x1_conditional, *expected_x1_conditional));
// The densities on X(2) should be the same
auto x2_conditional = dynamic_pointer_cast<GaussianMixture>(
bayesTree[X(2)]->conditional()->inner());
auto actual_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
auto expected_x2_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x2_conditional, *actual_x2_conditional));
// The densities on X(3) should be the same
auto x3_conditional = dynamic_pointer_cast<GaussianMixture>(
bayesTree[X(3)]->conditional()->inner());
auto actual_x3_conditional = dynamic_pointer_cast<GaussianMixture>(
(*expectedHybridBayesTree)[X(2)]->conditional()->inner());
EXPECT(assert_equal(*x3_conditional, *actual_x3_conditional));
EXPECT(assert_equal(*x2_conditional, *expected_x2_conditional));
// We only perform manual continuous elimination for 0,0.
// The other discrete probabilities on M(2) are calculated the same way
// The other discrete probabilities on M(1) are calculated the same way
Ordering discrete_ordering;
discrete_ordering += M(0);
discrete_ordering += M(1);
discrete_ordering += M(2);
HybridBayesTree::shared_ptr discreteBayesTree =
expectedRemainingGraph->eliminateMultifrontal(discrete_ordering);
DiscreteValues m00;
m00[M(1)] = 0, m00[M(2)] = 0;
m00[M(0)] = 0, m00[M(1)] = 0;
DiscreteConditional decisionTree =
*(*discreteBayesTree)[M(2)]->conditional()->asDiscreteConditional();
*(*discreteBayesTree)[M(1)]->conditional()->asDiscreteConditional();
double m00_prob = decisionTree(m00);
auto discreteConditional =
bayesTree[M(2)]->conditional()->asDiscreteConditional();
bayesTree[M(1)]->conditional()->asDiscreteConditional();
// Test if the probability values are as expected with regression tests.
DiscreteValues assignment;
EXPECT(assert_equal(m00_prob, 0.0619233, 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 0;
assignment[M(2)] = 0;
EXPECT(assert_equal(m00_prob, (*discreteConditional)(assignment), 1e-5));
assignment[M(1)] = 1;
assignment[M(2)] = 0;
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 0;
assignment[M(2)] = 1;
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
EXPECT(assert_equal(0.183743, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 0;
assignment[M(1)] = 1;
EXPECT(assert_equal(0.204159, (*discreteConditional)(assignment), 1e-5));
assignment[M(0)] = 1;
assignment[M(1)] = 1;
assignment[M(2)] = 1;
EXPECT(assert_equal(0.2, (*discreteConditional)(assignment), 1e-5));
// Check if the clique conditional generated from incremental elimination
// matches that of batch elimination.
auto expectedChordal = expectedRemainingGraph->eliminateMultifrontal();
auto expectedConditional = dynamic_pointer_cast<DecisionTreeFactor>(
(*expectedChordal)[M(2)]->conditional()->inner());
(*expectedChordal)[M(1)]->conditional()->inner());
auto actualConditional = dynamic_pointer_cast<DecisionTreeFactor>(
bayesTree[M(2)]->conditional()->inner());
bayesTree[M(1)]->conditional()->inner());
EXPECT(assert_equal(*actualConditional, *expectedConditional, 1e-6));
}
@ -227,22 +227,22 @@ TEST(HybridNonlinearISAM, Approx_inference) {
HybridNonlinearFactorGraph graph1;
Values initial;
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 1; i < 4; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i));
}
// Add the Gaussian factors, 1 prior on X(1),
// 3 measurements on X(2), X(3), X(4)
// Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3)
graph1.push_back(switching.nonlinearFactorGraph.at(0));
for (size_t i = 4; i <= 7; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i));
initial.insert<double>(X(i - 3), i - 3);
initial.insert<double>(X(i - 4), i - 3);
}
// Create ordering.
Ordering ordering;
for (size_t j = 1; j <= 4; j++) {
for (size_t j = 0; j < 4; j++) {
ordering += X(j);
}
@ -292,26 +292,26 @@ TEST(HybridNonlinearISAM, Approx_inference) {
1 1 1 Leaf 0.5
*/
auto discreteConditional_m1 = *dynamic_pointer_cast<DiscreteConditional>(
bayesTree[M(1)]->conditional()->inner());
EXPECT(discreteConditional_m1.keys() == KeyVector({M(1), M(2), M(3)}));
auto discreteConditional_m0 = *dynamic_pointer_cast<DiscreteConditional>(
bayesTree[M(0)]->conditional()->inner());
EXPECT(discreteConditional_m0.keys() == KeyVector({M(0), M(1), M(2)}));
// Get the number of elements which are greater than 0.
auto count = [](const double &value, int count) {
return value > 0 ? count + 1 : count;
};
// Check that the number of leaves after pruning is 5.
EXPECT_LONGS_EQUAL(5, discreteConditional_m1.fold(count, 0));
EXPECT_LONGS_EQUAL(5, discreteConditional_m0.fold(count, 0));
// Check that the hybrid nodes of the bayes net match those of the pre-pruning
// bayes net, at the same positions.
auto &unprunedLastDensity = *dynamic_pointer_cast<GaussianMixture>(
unprunedHybridBayesTree->clique(X(4))->conditional()->inner());
unprunedHybridBayesTree->clique(X(3))->conditional()->inner());
auto &lastDensity = *dynamic_pointer_cast<GaussianMixture>(
bayesTree[X(4)]->conditional()->inner());
bayesTree[X(3)]->conditional()->inner());
std::vector<std::pair<DiscreteValues, double>> assignments =
discreteConditional_m1.enumerate();
discreteConditional_m0.enumerate();
// Loop over all assignments and check the pruned components
for (auto &&av : assignments) {
const DiscreteValues &assignment = av.first;
@ -336,18 +336,18 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
Values initial;
/***** Run Round 1 *****/
// Add the 3 hybrid factors, x1-x2, x2-x3, x3-x4
// Add the 3 hybrid factors, x0-x1, x1-x2, x2-x3
for (size_t i = 1; i < 4; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i));
}
// Add the Gaussian factors, 1 prior on X(1),
// 3 measurements on X(2), X(3), X(4)
// Add the Gaussian factors, 1 prior on X(0),
// 3 measurements on X(1), X(2), X(3)
graph1.push_back(switching.nonlinearFactorGraph.at(0));
initial.insert<double>(X(1), 1);
initial.insert<double>(X(0), 1);
for (size_t i = 5; i <= 7; i++) {
graph1.push_back(switching.nonlinearFactorGraph.at(i));
initial.insert<double>(X(i - 3), i - 3);
initial.insert<double>(X(i - 4), i - 3);
}
// Run update with pruning
@ -361,20 +361,20 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
// each with 2, 4, 8, and 5 (pruned) leaves respetively.
EXPECT_LONGS_EQUAL(4, bayesTree.size());
EXPECT_LONGS_EQUAL(
2, bayesTree[X(1)]->conditional()->asMixture()->nrComponents());
2, bayesTree[X(0)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
3, bayesTree[X(2)]->conditional()->asMixture()->nrComponents());
3, bayesTree[X(1)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, bayesTree[X(2)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
/***** Run Round 2 *****/
HybridGaussianFactorGraph graph2;
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x4-x5
graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x5 measurement
graph2.push_back(switching.nonlinearFactorGraph.at(4)); // x3-x4
graph2.push_back(switching.nonlinearFactorGraph.at(8)); // x4 measurement
initial = Values();
initial.insert<double>(X(5), 5);
initial.insert<double>(X(4), 5);
// Run update with pruning a second time.
incrementalHybrid.update(graph2, initial);
@ -386,9 +386,9 @@ TEST(HybridNonlinearISAM, Incremental_approximate) {
// with 5 (pruned) leaves.
CHECK_EQUAL(5, bayesTree.size());
EXPECT_LONGS_EQUAL(
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
5, bayesTree[X(3)]->conditional()->asMixture()->nrComponents());
EXPECT_LONGS_EQUAL(
5, bayesTree[X(5)]->conditional()->asMixture()->nrComponents());
5, bayesTree[X(4)]->conditional()->asMixture()->nrComponents());
}
/* ************************************************************************/
@ -401,7 +401,7 @@ TEST(HybridNonlinearISAM, NonTrivial) {
HybridNonlinearFactorGraph fg;
HybridNonlinearISAM inc;
// Add a prior on pose x1 at the origin.
// Add a prior on pose x0 at the origin.
// A prior factor consists of a mean and
// a noise model (covariance matrix)
Pose2 prior(0.0, 0.0, 0.0); // prior mean is at origin

View File

@ -27,7 +27,7 @@ namespace gtsam {
/**
* A BayesNet is a tree of conditionals, stored in elimination order.
* @addtogroup inference
* @ingroup inference
*/
template <class CONDITIONAL>
class BayesNet : public FactorGraph<CONDITIONAL> {

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file BayesTree-inl.h
* @file BayesTree-inst.h
* @brief Bayes Tree is a tree of cliques of a Bayes Chain
* @author Frank Dellaert
* @author Michael Kaess

Some files were not shown because too many files have changed in this diff Show More