diff --git a/CMakeLists.txt b/CMakeLists.txt index b1678a5c2..876363f9d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -100,10 +100,15 @@ endif() ############################################################################### # Find boost +# To change the path for boost, you will need to set: +# BOOST_ROOT: path to install prefix for boost +# Boost_NO_SYSTEM_PATHS: set to true to keep the find script from ignoring BOOST_ROOT + # If using Boost shared libs, set up auto linking for shared libs if(MSVC AND NOT Boost_USE_STATIC_LIBS) add_definitions(-DBOOST_ALL_DYN_LINK) endif() +add_definitions(-DBOOST_ALL_NO_LIB) find_package(Boost 1.43 COMPONENTS serialization system filesystem thread program_options date_time regex timer chrono) @@ -115,7 +120,9 @@ endif() option(GTSAM_DISABLE_NEW_TIMERS "Disables using Boost.chrono for timing" OFF) # Allow for not using the timer libraries on boost < 1.48 (GTSAM timing code falls back to old timer library) -set(GTSAM_BOOST_LIBRARIES ${Boost_SERIALIZATION_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${Boost_FILESYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${Boost_DATE_TIME_LIBRARY} ${Boost_REGEX_LIBRARY}) +set(GTSAM_BOOST_LIBRARIES + ${Boost_SERIALIZATION_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${Boost_FILESYSTEM_LIBRARY} + ${Boost_THREAD_LIBRARY} ${Boost_DATE_TIME_LIBRARY} ${Boost_REGEX_LIBRARY}) if (GTSAM_DISABLE_NEW_TIMERS) message("WARNING: GTSAM timing instrumentation manually disabled") add_definitions(-DGTSAM_DISABLE_NEW_TIMERS) @@ -123,7 +130,7 @@ else() if(Boost_TIMER_LIBRARY) list(APPEND GTSAM_BOOST_LIBRARIES ${Boost_TIMER_LIBRARY} ${Boost_CHRONO_LIBRARY}) else() - message("WARNING: Boost older than 1.48 was found, GTSAM timing instrumentation will use the older, less accurate, Boost timer library.") + message("WARNING: GTSAM timing instrumentation will use the older, less accurate, Boost timer library because boost older than 1.48 was found.") endif() endif() @@ -156,12 +163,32 @@ configure_file(gtsam/3rdparty/gtsam_eigen_includes.h.in gtsam/3rdparty/gtsam_eig install(FILES ${CMAKE_BINARY_DIR}/gtsam/3rdparty/gtsam_eigen_includes.h DESTINATION include/gtsam/3rdparty) +############################################################################### +# Find TBB +set(ENV{TBB_ARCH_PLATFORM} "intel64/vc11") +set(ENV{TBB_BIN_DIR} "C:/Program Files/Intel/TBB/bin") +set(ENV{TBB_TARGET_ARCH} intel64) +set(ENV{TBB_TARGET_VS} vc11) +find_package(TBB REQUIRED) +set(TBB_LIBS "") +if(TBB_DEBUG_LIBRARIES) + foreach(lib ${TBB_LIBRARIES}) + list(APPEND TBB_LIBS optimized "${lib}") + endforeach() + foreach(lib ${TBB_DEBUG_LIBRARIES}) + list(APPEND TBB_LIBS debug "${lib}") + endforeach() +else() + set(TBB_LIBS ${TBB_LIBRARIES}) +endif() + + ############################################################################### # Global compile options # Include boost - use 'BEFORE' so that a specific boost specified to CMake # takes precedence over a system-installed one. -include_directories(BEFORE ${Boost_INCLUDE_DIR}) +include_directories(BEFORE ${TBB_INCLUDE_DIRS} ${Boost_INCLUDE_DIR}) # Add includes for source directories 'BEFORE' boost and any system include # paths so that the compiler uses GTSAM headers in our source directory instead @@ -175,7 +202,9 @@ include_directories(BEFORE if(MSVC) add_definitions(-D_CRT_SECURE_NO_WARNINGS -D_SCL_SECURE_NO_WARNINGS) - add_definitions(/wd4251 /wd4275 /wd4251) # Disable non-DLL-exported base class warnings + add_definitions(/wd4251 /wd4275 /wd4251 /wd4661) # Disable non-DLL-exported base class and other warnings +else() + list(APPEND CMAKE_CXX_FLAGS "-std=c++11") # FIXME: we should probably avoid these extensions endif() if(GTSAM_ENABLE_CONSISTENCY_CHECKS) diff --git a/doc/CodingGuidelines.docx b/doc/CodingGuidelines.docx new file mode 100644 index 000000000..d466b1ed4 Binary files /dev/null and b/doc/CodingGuidelines.docx differ diff --git a/doc/CodingGuidelines.lyx b/doc/CodingGuidelines.lyx new file mode 100644 index 000000000..6d0b6eb1e --- /dev/null +++ b/doc/CodingGuidelines.lyx @@ -0,0 +1,481 @@ +#LyX 2.0 created this file. For more info see http://www.lyx.org/ +\lyxformat 413 +\begin_document +\begin_header +\textclass article +\begin_preamble +\usepackage{color} + +\definecolor{mygreen}{rgb}{0,0.6,0} +\definecolor{mygray}{rgb}{0.5,0.5,0.5} +\definecolor{mymauve}{rgb}{0.58,0,0.82} + +\lstset{ % + backgroundcolor=\color{white}, % choose the background color; you must add \usepackage{color} or \usepackage{xcolor} + basicstyle=\footnotesize, % the size of the fonts that are used for the code + breakatwhitespace=false, % sets if automatic breaks should only happen at whitespace + breaklines=true, % sets automatic line breaking + captionpos=b, % sets the caption-position to bottom + commentstyle=\color{mygreen}, % comment style +% deletekeywords={...}, % if you want to delete keywords from the given language + escapeinside={\%*}{*)}, % if you want to add LaTeX within your code + extendedchars=true, % lets you use non-ASCII characters; for 8-bits encodings only, does not work with UTF-8 + frame=single, % adds a frame around the code + keepspaces=true, % keeps spaces in text, useful for keeping indentation of code (possibly needs columns=flexible) + keywordstyle=\color{blue}, % keyword style + language=C++, % the language of the code + morekeywords={*,...}, % if you want to add more keywords to the set + numbers=left, % where to put the line-numbers; possible values are (none, left, right) + numbersep=5pt, % how far the line-numbers are from the code + numberstyle=\tiny\color{mygray}, % the style that is used for the line-numbers + rulecolor=\color{black}, % if not set, the frame-color may be changed on line-breaks within not-black text (e.g. comments (green here)) + showspaces=false, % show spaces everywhere adding particular underscores; it overrides 'showstringspaces' + showstringspaces=false, % underline spaces within strings only + showtabs=false, % show tabs within strings adding particular underscores + stepnumber=2, % the step between two line-numbers. If it's 1, each line will be numbered + stringstyle=\color{mymauve}, % string literal style + tabsize=2, % sets default tabsize to 2 spaces + title=\lstname % show the filename of files included with \lstinputlisting; also try caption instead of title +} +\end_preamble +\use_default_options true +\maintain_unincluded_children false +\language english +\language_package default +\inputencoding auto +\fontencoding global +\font_roman lmodern +\font_sans lmss +\font_typewriter lmtt +\font_default_family default +\use_non_tex_fonts false +\font_sc false +\font_osf false +\font_sf_scale 100 +\font_tt_scale 100 + +\graphics default +\default_output_format default +\output_sync 0 +\bibtex_command default +\index_command default +\paperfontsize default +\spacing single +\use_hyperref false +\papersize default +\use_geometry false +\use_amsmath 1 +\use_esint 1 +\use_mhchem 1 +\use_mathdots 1 +\cite_engine basic +\use_bibtopic false +\use_indices false +\paperorientation portrait +\suppress_date false +\use_refstyle 1 +\index Index +\shortcut idx +\color #008000 +\end_index +\secnumdepth 3 +\tocdepth 3 +\paragraph_separation indent +\paragraph_indentation default +\quotes_language english +\papercolumns 1 +\papersides 1 +\paperpagestyle default +\tracking_changes false +\output_changes false +\html_math_output 0 +\html_css_as_file 0 +\html_be_strict false +\end_header + +\begin_body + +\begin_layout Section +Template Classes +\end_layout + +\begin_layout Standard +Templated classes are great for writing generic code for multiple types + (e.g. + the same elimination algorithm code for symbolic, discrete, and Gaussian + elimination) without the drawbacks of virtual inheritance (which include + rigid class interfaces, downcasting from returned base class pointers, + and additional runtime overhead). + Depending on how they're used, though, templates can result in very slow + compile times, large binary files, and hard-to-use code. + This section describes the +\begin_inset Quotes eld +\end_inset + +best practices +\begin_inset Quotes erd +\end_inset + + we have developed for gaining the benefits of templates without the drawbacks. +\end_layout + +\begin_layout Standard +If you need to write generic code or classes, here are several programming + patterns we have found to work very well: +\end_layout + +\begin_layout Subsection +The +\begin_inset Quotes eld +\end_inset + +Templated Base, Specialized Derived +\begin_inset Quotes erd +\end_inset + + Pattern +\end_layout + +\begin_layout Standard +This pattern is for when you have a generic class containing algorithm or + data structure code that will be specialized to several types. + The templated base class should never be used directly, instead only the + specializations should be used. + Some specialized types can be pre-compiled into the library, but the option + remains to specialize new types in external libraries or projects. +\end_layout + +\begin_layout Subsubsection +Basic Class Structure +\end_layout + +\begin_layout Standard +We'll use +\family typewriter +FactorGraph +\family default + as an example. + It is templated on the factor type stored in it and has several specializations. + The templated base class +\family typewriter +FactorGraph +\family default + is divided into a header file ( +\family typewriter +.h +\family default +) and an +\begin_inset Quotes eld +\end_inset + +instantiation +\begin_inset Quotes erd +\end_inset + + file ( +\family typewriter +-inst.h +\family default +). + The basic class structure is as follows. +\begin_inset listings +lstparams "basicstyle={\scriptsize\ttfamily},language={C++}" +inline false +status open + +\begin_layout Plain Layout + +// File FactorGraph.h +\end_layout + +\begin_layout Plain Layout + +\end_layout + +\begin_layout Plain Layout + +%* +\backslash +bfseries{ +\backslash +emph{ +\backslash +color{red}{// Include a minimal set of headers. + Do not include any '-inst.h' files (this is the key to fast compiles).}}}*) +\end_layout + +\begin_layout Plain Layout + +#include +\end_layout + +\begin_layout Plain Layout + +... +\end_layout + +\begin_layout Plain Layout + +\end_layout + +\begin_layout Plain Layout + +namespace gtsam { +\end_layout + +\begin_layout Plain Layout + + /** Class description */ +\end_layout + +\begin_layout Plain Layout + + template +\end_layout + +\begin_layout Plain Layout + + class FactorGraph +\end_layout + +\begin_layout Plain Layout + + { +\end_layout + +\begin_layout Plain Layout + + %* +\backslash +bfseries{ +\backslash +emph{ +\backslash +color{red}{// Make 'private' any typedefs that must be redefined in derived + classes. + E.g. + 'This' in the context of the derived class should refer to the derived + class. + These typedefs will be used only by the generic code in this base class.}}}*) +\end_layout + +\begin_layout Plain Layout + + private: +\end_layout + +\begin_layout Plain Layout + + typedef FactorGraph This; ///< Typedef for this class +\end_layout + +\begin_layout Plain Layout + + typedef boost::shared_ptr shared_ptr; ///< Shared pointer to + this +\end_layout + +\begin_layout Plain Layout + + +\end_layout + +\begin_layout Plain Layout + + %* +\backslash +bfseries{ +\backslash +emph{ +\backslash +color{red}{// Make 'public' the typedefs that will be valid in the derived + class.}}}*) +\end_layout + +\begin_layout Plain Layout + + public: +\end_layout + +\begin_layout Plain Layout + + typedef FACTOR FactorType; ///< Factor type stored in this graph +\end_layout + +\begin_layout Plain Layout + + typedef boost::shared_ptr sharedFactor; ///< Shared pointer + to a factor +\end_layout + +\begin_layout Plain Layout + + ... +\end_layout + +\begin_layout Plain Layout + +\end_layout + +\begin_layout Plain Layout + + %* +\backslash +bfseries{ +\backslash +emph{ +\backslash +color{red}{// Normally, data is 'protected' so the derived class can access + it.}}}*) +\end_layout + +\begin_layout Plain Layout + + protected: +\end_layout + +\begin_layout Plain Layout + + /** Collection of factors */ +\end_layout + +\begin_layout Plain Layout + + std::vector factors_; +\end_layout + +\begin_layout Plain Layout + +\end_layout + +\begin_layout Plain Layout + + %* +\backslash +bfseries{ +\backslash +emph{ +\backslash +color{red}{// Make 'protected' all constructors, named constructors, or + methods returning the base class type. + These are not public - the derived class will call them and properly convert + returned base classes to the derived class.}}}*) +\end_layout + +\begin_layout Plain Layout + + /// @name Standard Constructors +\end_layout + +\begin_layout Plain Layout + + /// @{ +\end_layout + +\begin_layout Plain Layout + +\end_layout + +\begin_layout Plain Layout + + /** Default constructor */ +\end_layout + +\begin_layout Plain Layout + + FactorGraphUnordered() {} +\end_layout + +\begin_layout Plain Layout + +\end_layout + +\begin_layout Plain Layout + + /** Named constructor from iterator over factors */ +\end_layout + +\begin_layout Plain Layout + + template +\end_layout + +\begin_layout Plain Layout + + static This FromIterator(ITERATOR firstFactor, ITERATOR lastFactor); +\end_layout + +\begin_layout Plain Layout + + /// @} +\end_layout + +\begin_layout Plain Layout + +\end_layout + +\begin_layout Plain Layout + + %* +\backslash +bfseries{ +\backslash +emph{ +\backslash +color{red}{// Make 'public' standard methods that will be available in the + derived class's API.}}}*) +\end_layout + +\begin_layout Plain Layout + + public: +\end_layout + +\begin_layout Plain Layout + + /// @name Adding Factors +\end_layout + +\begin_layout Plain Layout + + /// @{ +\end_layout + +\begin_layout Plain Layout + + /** ... + */ +\end_layout + +\begin_layout Plain Layout + + void reserve(size_t size); +\end_layout + +\begin_layout Plain Layout + + ... +\end_layout + +\begin_layout Plain Layout + + /// @} +\end_layout + +\begin_layout Plain Layout + + }; +\end_layout + +\begin_layout Plain Layout + +} +\end_layout + +\end_inset + + +\end_layout + +\end_body +\end_document diff --git a/examples/LocalizationExample.cpp b/examples/LocalizationExample.cpp index dfe3d1e99..035139a5d 100644 --- a/examples/LocalizationExample.cpp +++ b/examples/LocalizationExample.cpp @@ -27,7 +27,7 @@ #include // We will use simple integer Keys to refer to the robot poses. -#include +#include // As in OdometryExample.cpp, we use a BetweenFactor to model odometry measurements. #include diff --git a/examples/Pose2SLAMExample.cpp b/examples/Pose2SLAMExample.cpp index cbe97ae12..1220481f5 100644 --- a/examples/Pose2SLAMExample.cpp +++ b/examples/Pose2SLAMExample.cpp @@ -29,7 +29,7 @@ #include // We will use simple integer Keys to refer to the robot poses. -#include +#include // In GTSAM, measurement functions are represented as 'factors'. Several common factors // have been provided with the library for solving robotics/SLAM/Bundle Adjustment problems. diff --git a/examples/Pose2SLAMwSPCG.cpp b/examples/Pose2SLAMwSPCG.cpp index 36f2552a1..09af6351d 100644 --- a/examples/Pose2SLAMwSPCG.cpp +++ b/examples/Pose2SLAMwSPCG.cpp @@ -33,7 +33,7 @@ // Each variable in the system (poses) must be identified with a unique key. // We can either use simple integer keys (1, 2, 3, ...) or symbols (X1, X2, L1). // Here we will use simple integer keys -#include +#include // In GTSAM, measurement functions are represented as 'factors'. Several common factors // have been provided with the library for solving robotics/SLAM/Bundle Adjustment problems. diff --git a/examples/SolverComparer.cpp b/examples/SolverComparer.cpp index 19281ce0f..b328ef66c 100644 --- a/examples/SolverComparer.cpp +++ b/examples/SolverComparer.cpp @@ -26,6 +26,7 @@ #include #include +#include #include #include #include @@ -489,15 +490,14 @@ void runPerturb() // Perturb values VectorValues noise; - Ordering ordering = *initial.orderingArbitrary(); BOOST_FOREACH(const Values::KeyValuePair& key_val, initial) { Vector noisev(key_val.value.dim()); for(Vector::Index i = 0; i < noisev.size(); ++i) noisev(i) = normal(rng); - noise.insert(ordering[key_val.key], noisev); + noise.insert(key_val.key, noisev); } - Values perturbed = initial.retract(noise, ordering); + Values perturbed = initial.retract(noise); // Write results try { diff --git a/examples/elaboratePoint2KalmanFilter.cpp b/examples/elaboratePoint2KalmanFilter.cpp index 3c18b220a..52cf83c93 100644 --- a/examples/elaboratePoint2KalmanFilter.cpp +++ b/examples/elaboratePoint2KalmanFilter.cpp @@ -24,7 +24,6 @@ #include #include #include -#include #include #include #include diff --git a/gtsam.h b/gtsam.h index c8bb81aae..468590ae7 100644 --- a/gtsam.h +++ b/gtsam.h @@ -795,203 +795,47 @@ virtual class StereoCamera : gtsam::Value { }; //************************************************************************* -// inference +// Symbolic //************************************************************************* -#include -class Permutation { + +#include +virtual class SymbolicFactor { // Standard Constructors and Named Constructors - Permutation(); - Permutation(size_t nVars); - static gtsam::Permutation Identity(size_t nVars); - - // Testable - void print(string s) const; - bool equals(const gtsam::Permutation& rhs, double tol) const; - - // Standard interface - size_t at(size_t variable) const; - size_t size() const; - bool empty() const; - void resize(size_t newSize); - gtsam::Permutation* permute(const gtsam::Permutation& permutation) const; - gtsam::Permutation* inverse() const; - // FIXME: Cannot currently wrap std::vector - //static gtsam::Permutation PullToFront(const vector& toFront, size_t size, bool filterDuplicates); - //static gtsam::Permutation PushToBack(const vector& toBack, size_t size, bool filterDuplicates = false); -}; - -class IndexFactor { - // Standard Constructors and Named Constructors - IndexFactor(const gtsam::IndexFactor& f); - IndexFactor(const gtsam::IndexConditional& c); - IndexFactor(); - IndexFactor(size_t j); - IndexFactor(size_t j1, size_t j2); - IndexFactor(size_t j1, size_t j2, size_t j3); - IndexFactor(size_t j1, size_t j2, size_t j3, size_t j4); - IndexFactor(size_t j1, size_t j2, size_t j3, size_t j4, size_t j5); - IndexFactor(size_t j1, size_t j2, size_t j3, size_t j4, size_t j5, size_t j6); - // FIXME: Must wrap std::set for this to work - //IndexFactor(const std::set& js); - void permuteWithInverse(const gtsam::Permutation& inversePermutation); + SymbolicFactor(const gtsam::SymbolicFactor& f); + SymbolicFactor(); + SymbolicFactor(size_t j); + SymbolicFactor(size_t j1, size_t j2); + SymbolicFactor(size_t j1, size_t j2, size_t j3); + SymbolicFactor(size_t j1, size_t j2, size_t j3, size_t j4); + SymbolicFactor(size_t j1, size_t j2, size_t j3, size_t j4, size_t j5); + SymbolicFactor(size_t j1, size_t j2, size_t j3, size_t j4, size_t j5, size_t j6); + static gtsam::SymbolicFactor FromKeys(const gtsam::KeyVector& js); // From Factor size_t size() const; void print(string s) const; - bool equals(const gtsam::IndexFactor& other, double tol) const; - // FIXME: Need to wrap std::vector - //std::vector& keys(); -}; - -class IndexConditional { - // Standard Constructors and Named Constructors - IndexConditional(); - IndexConditional(size_t key); - IndexConditional(size_t key, size_t parent); - IndexConditional(size_t key, size_t parent1, size_t parent2); - IndexConditional(size_t key, size_t parent1, size_t parent2, size_t parent3); - // FIXME: Must wrap std::vector for this to work - //IndexFactor(size_t key, const std::vector& parents); - //IndexConditional(const std::vector& keys, size_t nrFrontals); - //template static shared_ptr FromKeys(const KEYS& keys, size_t nrFrontals); - - // Testable - void print(string s) const; - bool equals(const gtsam::IndexConditional& other, double tol) const; - - // Standard interface - size_t nrFrontals() const; - size_t nrParents() const; - gtsam::IndexFactor* toFactor() const; - - //Advanced interface - void permuteWithInverse(const gtsam::Permutation& inversePermutation); -}; - -#include -template -virtual class BayesNet { - // Testable - void print(string s) const; - bool equals(const This& other, double tol) const; - - // Standard interface - size_t size() const; - void printStats(string s) const; - void saveGraph(string s) const; - CONDITIONAL* front() const; - CONDITIONAL* back() const; - void push_back(CONDITIONAL* conditional); - void push_front(CONDITIONAL* conditional); - void push_back(This& conditional); - void push_front(This& conditional); - void pop_front(); - void permuteWithInverse(const gtsam::Permutation& inversePermutation); -}; - -#include -template -virtual class BayesTree { - - //Constructors - BayesTree(); - - // Testable - void print(string s); - bool equals(const This& other, double tol) const; - - //Standard Interface - //size_t findParentClique(const gtsam::IndexVector& parents) const; - size_t size(); - size_t nrNodes() const; - void saveGraph(string s) const; - CLIQUE* root() const; - void clear(); - void deleteCachedShortcuts(); - void insert(const CLIQUE* subtree); - size_t numCachedSeparatorMarginals() const; - CLIQUE* clique(size_t j) const; -}; - -template -virtual class BayesTreeClique { - BayesTreeClique(); - BayesTreeClique(CONDITIONAL* conditional); -// BayesTreeClique(const std::pair& result) : Base(result) {} - - bool equals(const This& other, double tol) const; - void print(string s) const; - void printTree() const; // Default indent of "" - void printTree(string indent) const; - size_t numCachedSeparatorMarginals() const; - - CONDITIONAL* conditional() const; - bool isRoot() const; - size_t treeSize() const; -// const std::list& children() const { return children_; } -// derived_ptr parent() const { return parent_.lock(); } - - void permuteWithInverse(const gtsam::Permutation& inversePermutation); - - // FIXME: need wrapped versions graphs, BayesNet -// BayesNet shortcut(derived_ptr root, Eliminate function) const; -// FactorGraph marginal(derived_ptr root, Eliminate function) const; -// FactorGraph joint(derived_ptr C2, derived_ptr root, Eliminate function) const; - - void deleteCachedShortcuts(); -}; - -#include -typedef gtsam::BayesNet SymbolicBayesNetBase; - -virtual class SymbolicBayesNet : gtsam::SymbolicBayesNetBase { - // Standard Constructors and Named Constructors - SymbolicBayesNet(); - SymbolicBayesNet(const gtsam::SymbolicBayesNet& bn); - SymbolicBayesNet(const gtsam::IndexConditional* conditional); - - // Standard interface - //TODO:Throws parse error - //void push_back(const gtsam::SymbolicBayesNet bn); - //TODO:Throws parse error - //void push_front(const gtsam::SymbolicBayesNet bn); - - //Advanced Interface - void pop_front(); - void permuteWithInverse(const gtsam::Permutation& inversePermutation); -}; - -typedef gtsam::BayesTreeClique SymbolicBayesTreeClique; -typedef gtsam::BayesTree SymbolicBayesTreeBase; -virtual class SymbolicBayesTree : gtsam::SymbolicBayesTreeBase { - // Standard Constructors and Named Constructors - SymbolicBayesTree(); - SymbolicBayesTree(const gtsam::SymbolicBayesNet& bn); - SymbolicBayesTree(const gtsam::SymbolicBayesTree& other); - // FIXME: wrap needs to understand std::list - //SymbolicBayesTree(const gtsam::SymbolicBayesNet& bayesNet, std::list subtrees); - - // Standard interface - size_t findParentClique(const gtsam::IndexConditional& parents) const; - // TODO: There are many other BayesTree member functions which might be of use + bool equals(const gtsam::SymbolicFactor& other, double tol) const; + gtsam::KeyVector keys(); }; +#include class SymbolicFactorGraph { - // Standard Constructors and Named Constructors SymbolicFactorGraph(); SymbolicFactorGraph(const gtsam::SymbolicBayesNet& bayesNet); SymbolicFactorGraph(const gtsam::SymbolicBayesTree& bayesTree); // From FactorGraph - void push_back(gtsam::IndexFactor* factor); + void push_back(gtsam::SymbolicFactor* factor); void print(string s) const; bool equals(const gtsam::SymbolicFactorGraph& rhs, double tol) const; size_t size() const; - bool exists(size_t i) const; // Standard interface - // FIXME: Must wrap FastSet for this to work - //FastSet keys() const; + gtsam::KeySet keys() const; + void push_back(gtsam::SymbolicFactor* factor); + void push_back(const gtsam::SymbolicFactorGraph& graph); + void push_back(const gtsam::SymbolicBayesNet& bayesNet); + void push_back(const gtsam::SymbolicBayesTree& bayesTree); //Advanced Interface void push_factor(size_t key); @@ -999,41 +843,114 @@ class SymbolicFactorGraph { void push_factor(size_t key1, size_t key2, size_t key3); void push_factor(size_t key1, size_t key2, size_t key3, size_t key4); - pair eliminateFrontals(size_t nFrontals) const; - pair eliminateOne(size_t j); + gtsam::SymbolicBayesNet* eliminateSequential(); + gtsam::SymbolicBayesNet* eliminateSequential(const gtsam::Ordering& ordering); + gtsam::SymbolicBayesTree* eliminateMultifrontal(); + gtsam::SymbolicBayesTree* eliminateMultifrontal(const gtsam::Ordering& ordering); + pair eliminatePartialSequential( + const gtsam::Ordering& ordering); + pair eliminatePartialSequential( + const gtsam::KeyVector& keys); + pair eliminatePartialMultifrontal( + const gtsam::Ordering& ordering); + pair eliminatePartialMultifrontal( + const gtsam::KeyVector& keys); + gtsam::SymbolicBayesNet* marginalMultifrontalBayesNet(const gtsam::Ordering& variables); + gtsam::SymbolicBayesNet* marginalMultifrontalBayesNet(const gtsam::KeyVector& variables); + gtsam::SymbolicBayesNet* marginalMultifrontalBayesNet(const gtsam::Ordering& variables, + const gtsam::Ordering& marginalizedVariableOrdering); + gtsam::SymbolicBayesNet* marginalMultifrontalBayesNet(const gtsam::KeyVector& variables, + const gtsam::Ordering& marginalizedVariableOrdering); + gtsam::SymbolicFactorGraph* marginal(const gtsam::KeyVector& variables); }; -#include -class SymbolicSequentialSolver { +#include +virtual class SymbolicConditional : gtsam::SymbolicFactor { // Standard Constructors and Named Constructors - SymbolicSequentialSolver(const gtsam::SymbolicFactorGraph& factorGraph); - SymbolicSequentialSolver(const gtsam::SymbolicFactorGraph* factorGraph, const gtsam::VariableIndex* variableIndex); + SymbolicConditional(); + SymbolicConditional(const gtsam::SymbolicConditional& other); + SymbolicConditional(size_t key); + SymbolicConditional(size_t key, size_t parent); + SymbolicConditional(size_t key, size_t parent1, size_t parent2); + SymbolicConditional(size_t key, size_t parent1, size_t parent2, size_t parent3); + static gtsam::SymbolicConditional FromKeys(const gtsam::KeyVector& keys, size_t nrFrontals); // Testable void print(string s) const; - bool equals(const gtsam::SymbolicSequentialSolver& rhs, double tol) const; + bool equals(const gtsam::SymbolicConditional& other, double tol) const; // Standard interface - gtsam::SymbolicBayesNet* eliminate() const; - gtsam::IndexFactor* marginalFactor(size_t j) const; + size_t nrFrontals() const; + size_t nrParents() const; }; -#include -class SymbolicMultifrontalSolver { - // Standard Constructors and Named Constructors - SymbolicMultifrontalSolver(const gtsam::SymbolicFactorGraph& factorGraph); - SymbolicMultifrontalSolver(const gtsam::SymbolicFactorGraph* factorGraph, const gtsam::VariableIndex* variableIndex); - +#include +class SymbolicBayesNet { + SymbolicBayesNet(); + SymbolicBayesNet(const gtsam::SymbolicBayesNet& other); // Testable void print(string s) const; - bool equals(const gtsam::SymbolicMultifrontalSolver& rhs, double tol) const; + bool equals(const gtsam::SymbolicBayesNet& other, double tol) const; // Standard interface - gtsam::SymbolicBayesTree* eliminate() const; - gtsam::IndexFactor* marginalFactor(size_t j) const; + size_t size() const; + void saveGraph(string s) const; + gtsam::SymbolicConditional* front() const; + gtsam::SymbolicConditional* back() const; + void push_back(gtsam::SymbolicConditional* conditional); + void push_back(const gtsam::SymbolicBayesNet& bayesNet); }; -#include +#include +class SymbolicBayesTree { + + //Constructors + SymbolicBayesTree(); + SymbolicBayesTree(const gtsam::SymbolicBayesTree& other); + + // Testable + void print(string s); + bool equals(const gtsam::SymbolicBayesTree& other, double tol) const; + + //Standard Interface + //size_t findParentClique(const gtsam::IndexVector& parents) const; + size_t size(); + void saveGraph(string s) const; + void clear(); + void deleteCachedShortcuts(); + size_t numCachedSeparatorMarginals() const; + + gtsam::SymbolicConditional* marginalFactor(size_t key) const; + gtsam::SymbolicFactorGraph* joint(size_t key1, size_t key2) const; + gtsam::SymbolicBayesNet* jointBayesNet(size_t key1, size_t key2) const; +}; + +// class SymbolicBayesTreeClique { +// BayesTreeClique(); +// BayesTreeClique(CONDITIONAL* conditional); +// // BayesTreeClique(const std::pair& result) : Base(result) {} +// +// bool equals(const This& other, double tol) const; +// void print(string s) const; +// void printTree() const; // Default indent of "" +// void printTree(string indent) const; +// size_t numCachedSeparatorMarginals() const; +// +// CONDITIONAL* conditional() const; +// bool isRoot() const; +// size_t treeSize() const; +// // const std::list& children() const { return children_; } +// // derived_ptr parent() const { return parent_.lock(); } +// +// // FIXME: need wrapped versions graphs, BayesNet +// // BayesNet shortcut(derived_ptr root, Eliminate function) const; +// // FactorGraph marginal(derived_ptr root, Eliminate function) const; +// // FactorGraph joint(derived_ptr C2, derived_ptr root, Eliminate function) const; +// +// void deleteCachedShortcuts(); +// }; + +#include class VariableIndex { // Standard Constructors and Named Constructors VariableIndex(); @@ -1042,11 +959,8 @@ class VariableIndex { //VariableIndex(const T& factorGraph, size_t nVariables); //VariableIndex(const T& factorGraph); VariableIndex(const gtsam::SymbolicFactorGraph& factorGraph); - VariableIndex(const gtsam::SymbolicFactorGraph& factorGraph, size_t nVariables); VariableIndex(const gtsam::GaussianFactorGraph& factorGraph); - VariableIndex(const gtsam::GaussianFactorGraph& factorGraph, size_t nVariables); VariableIndex(const gtsam::NonlinearFactorGraph& factorGraph); - VariableIndex(const gtsam::NonlinearFactorGraph& factorGraph, size_t nVariables); VariableIndex(const gtsam::VariableIndex& other); // Testable @@ -1057,7 +971,6 @@ class VariableIndex { size_t size() const; size_t nFactors() const; size_t nEntries() const; - void permuteInPlace(const gtsam::Permutation& permutation); }; //************************************************************************* @@ -1194,16 +1107,14 @@ class Sampler { Vector sampleNewModel(gtsam::noiseModel::Diagonal* model); }; +#include class VectorValues { //Constructors VectorValues(); VectorValues(const gtsam::VectorValues& other); - VectorValues(size_t nVars, size_t varDim); //Named Constructors static gtsam::VectorValues Zero(const gtsam::VectorValues& model); - static gtsam::VectorValues Zero(size_t nVars, size_t varDim); - static gtsam::VectorValues SameStructure(const gtsam::VectorValues& other); //Standard Interface size_t size() const; @@ -1212,128 +1123,48 @@ class VectorValues { void print(string s) const; bool equals(const gtsam::VectorValues& expected, double tol) const; void insert(size_t j, Vector value); - Vector asVector() const; + Vector vector() const; Vector at(size_t j) const; + void update(const gtsam::VectorValues& values); //Advanced Interface - void resizeLike(const gtsam::VectorValues& other); - void resize(size_t nVars, size_t varDim); void setZero(); gtsam::VectorValues add(const gtsam::VectorValues& c) const; - gtsam::VectorValues scale(double a, const gtsam::VectorValues& c) const; + void addInPlace(const gtsam::VectorValues& c); + gtsam::VectorValues subtract(const gtsam::VectorValues& c) const; + gtsam::VectorValues scale(double a) const; + void scaleInPlace(double a); - //FIXME: Parse errors with vector() - //const Vector& vector() const; - //Vector& vector(); - bool hasSameStructure(const gtsam::VectorValues& other) const; - double dot(const gtsam::VectorValues& V) const; - double norm() const; - - // enabling serialization functionality - void serialize() const; -}; - -class GaussianConditional { - //Constructors - GaussianConditional(size_t key, Vector d, Matrix R, Vector sigmas); - GaussianConditional(size_t key, Vector d, Matrix R, size_t name1, Matrix S, - Vector sigmas); - GaussianConditional(size_t key, Vector d, Matrix R, size_t name1, Matrix S, - size_t name2, Matrix T, Vector sigmas); - - //Standard Interface - void print(string s) const; - bool equals(const gtsam::GaussianConditional &cg, double tol) const; - size_t dim() const; - - //Advanced Interface - Matrix information() const; - Matrix augmentedInformation() const; - gtsam::JacobianFactor* toFactor() const; - void solveInPlace(gtsam::VectorValues& x) const; - void solveTransposeInPlace(gtsam::VectorValues& gy) const; - void scaleFrontalsBySigma(gtsam::VectorValues& gy) const; + bool hasSameStructure(const gtsam::VectorValues& other) const; + double dot(const gtsam::VectorValues& V) const; + double norm() const; + double squaredNorm() const; // enabling serialization functionality void serialize() const; }; -class GaussianDensity { - //Constructors - GaussianDensity(size_t key, Vector d, Matrix R, Vector sigmas); - - //Standard Interface - void print(string s) const; - Vector mean() const; - Matrix information() const; - Matrix covariance() const; -}; - -typedef gtsam::BayesNet GaussianBayesNetBase; -virtual class GaussianBayesNet : gtsam::GaussianBayesNetBase { - //Constructors - GaussianBayesNet(); - GaussianBayesNet(const gtsam::GaussianConditional* conditional); -}; - -//Non-Class methods found in GaussianBayesNet.h -//FIXME: No MATLAB documentation for these functions! -//gtsam::GaussianBayesNet scalarGaussian(size_t key, double mu, double sigma); -//gtsam::GaussianBayesNet simpleGaussian(size_t key, const Vector& mu, double sigma); -//void push_front(gtsam::GaussianBayesNet& bn, size_t key, Vector d, Matrix R, size_t name1, Matrix S, Vector sigmas); -//void push_front(gtsam::GaussianBayesNet& bn, size_t key, Vector d, Matrix R, size_t name1, Matrix S, size_t name2, Matrix T, Vector sigmas); -//gtsam::VectorValues* allocateVectorValues(const gtsam::GaussianBayesNet& bn); -//gtsam::VectorValues optimize(const gtsam::GaussianBayesNet& bn); -//void optimizeInPlace(const gtsam::GaussianBayesNet& bn, gtsam::VectorValues& x); -//gtsam::VectorValues optimizeGradientSearch(const gtsam::GaussianBayesNet& bn); -//void optimizeGradientSearchInPlace(const gtsam::GaussianBayesNet& bn, gtsam::VectorValues& grad); -//gtsam::VectorValues backSubstitute(const gtsam::GaussianBayesNet& bn, const gtsam::VectorValues& gx); -//gtsam::VectorValues backSubstituteTranspose(const gtsam::GaussianBayesNet& bn, const gtsam::VectorValues& gx); -//pair matrix(const gtsam::GaussianBayesNet& bn); -double determinant(const gtsam::GaussianBayesNet& bayesNet); -//gtsam::VectorValues gradient(const gtsam::GaussianBayesNet& bayesNet, const gtsam::VectorValues& x0); -//void gradientAtZero(const gtsam::GaussianBayesNet& bayesNet, const gtsam::VectorValues& g); - -#include -typedef gtsam::BayesTreeClique GaussianBayesTreeClique; -typedef gtsam::BayesTree GaussianBayesTreeBase; -virtual class GaussianBayesTree : gtsam::GaussianBayesTreeBase { - // Standard Constructors and Named Constructors - GaussianBayesTree(); - GaussianBayesTree(const gtsam::GaussianBayesNet& bn); - GaussianBayesTree(const gtsam::GaussianBayesNet& other); - bool equals(const gtsam::GaussianBayesTree& other, double tol) const; - void print(string s); - size_t size() const; - size_t nrNodes() const; - bool empty() const; - gtsam::GaussianBayesTreeClique* root() const; - gtsam::GaussianBayesTreeClique* clique(size_t j) const; - size_t numCachedSeparatorMarginals() const; - void saveGraph(string s) const; -}; - -// namespace functions for GaussianBayesTree -gtsam::VectorValues optimize(const gtsam::GaussianBayesTree& bayesTree); -gtsam::VectorValues optimizeGradientSearch(const gtsam::GaussianBayesTree& bayesTree); -gtsam::VectorValues gradient(const gtsam::GaussianBayesTree& bayesTree, const gtsam::VectorValues& x0); -gtsam::VectorValues* allocateVectorValues(const gtsam::GaussianBayesTree& bt); -double determinant(const gtsam::GaussianBayesTree& bayesTree); - +#include virtual class GaussianFactor { void print(string s) const; bool equals(const gtsam::GaussianFactor& lf, double tol) const; double error(const gtsam::VectorValues& c) const; + gtsam::GaussianFactor* clone() const; gtsam::GaussianFactor* negate() const; Matrix augmentedInformation() const; Matrix information() const; + Matrix augmentedJacobian() const; + pair jacobian() const; size_t size() const; + bool empty() const; }; +#include virtual class JacobianFactor : gtsam::GaussianFactor { //Constructors JacobianFactor(); + JacobianFactor(const gtsam::GaussianFactor& factor); JacobianFactor(Vector b_in); JacobianFactor(size_t i1, Matrix A1, Vector b, const gtsam::noiseModel::Diagonal* model); @@ -1341,7 +1172,6 @@ virtual class JacobianFactor : gtsam::GaussianFactor { const gtsam::noiseModel::Diagonal* model); JacobianFactor(size_t i1, Matrix A1, size_t i2, Matrix A2, size_t i3, Matrix A3, Vector b, const gtsam::noiseModel::Diagonal* model); - JacobianFactor(const gtsam::GaussianFactor& factor); //Testable void print(string s) const; @@ -1353,38 +1183,32 @@ virtual class JacobianFactor : gtsam::GaussianFactor { double error(const gtsam::VectorValues& c) const; //Standard Interface - bool empty() const; Matrix getA() const; Vector getb() const; size_t rows() const; size_t cols() const; - size_t numberOfRows() const; bool isConstrained() const; - pair matrix() const; - Matrix matrix_augmented() const; + pair jacobianUnweighted() const; + Matrix augmentedJacobianUnweighted() const; - gtsam::GaussianConditional* eliminateFirst(); - gtsam::GaussianConditional* eliminate(size_t nrFrontals); - gtsam::GaussianFactor* negate() const; void transposeMultiplyAdd(double alpha, const Vector& e, gtsam::VectorValues& x) const; gtsam::JacobianFactor whiten() const; - gtsam::GaussianConditional* eliminateFirst(); - gtsam::GaussianConditional* eliminate(size_t nFrontals); - gtsam::GaussianConditional* splitConditional(size_t nFrontals); + + pair eliminate(const gtsam::Ordering& keys) const; void setModel(bool anyConstrained, const Vector& sigmas); - void assertInvariants() const; - //gtsam::SharedDiagonal& get_model(); + gtsam::noiseModel::Diagonal* get_model() const; // enabling serialization functionality void serialize() const; }; +#include virtual class HessianFactor : gtsam::GaussianFactor { //Constructors - HessianFactor(const gtsam::HessianFactor& gf); HessianFactor(); + HessianFactor(const gtsam::GaussianFactor& factor); HessianFactor(size_t j, Matrix G, Vector g, double f); HessianFactor(size_t j, Vector mu, Matrix Sigma); HessianFactor(size_t j1, size_t j2, Matrix G11, Matrix G12, Vector g1, Matrix G22, @@ -1392,8 +1216,6 @@ virtual class HessianFactor : gtsam::GaussianFactor { HessianFactor(size_t j1, size_t j2, size_t j3, Matrix G11, Matrix G12, Matrix G13, Vector g1, Matrix G22, Matrix G23, Vector g2, Matrix G33, Vector g3, double f); - HessianFactor(const gtsam::GaussianConditional& cg); - HessianFactor(const gtsam::GaussianFactor& factor); //Testable size_t size() const; @@ -1408,32 +1230,28 @@ virtual class HessianFactor : gtsam::GaussianFactor { double constantTerm() const; Vector linearTerm() const; - //Advanced Interface - void partialCholesky(size_t nrFrontals); - gtsam::GaussianConditional* splitEliminatedFactor(size_t nrFrontals); - void assertInvariants() const; - // enabling serialization functionality void serialize() const; }; +#include class GaussianFactorGraph { GaussianFactorGraph(); - GaussianFactorGraph(const gtsam::GaussianBayesNet& CBN); + GaussianFactorGraph(const gtsam::GaussianBayesNet& bayesNet); + GaussianFactorGraph(const gtsam::GaussianBayesTree& bayesTree); // From FactorGraph void print(string s) const; bool equals(const gtsam::GaussianFactorGraph& lfgraph, double tol) const; size_t size() const; gtsam::GaussianFactor* at(size_t idx) const; - bool exists(size_t idx) const; - - // Inference - pair eliminateFrontals(size_t nFrontals) const; - pair eliminateOne(size_t j) const; + gtsam::KeySet keys() const; // Building the graph void push_back(gtsam::GaussianFactor* factor); + void push_back(const gtsam::GaussianFactorGraph& graph); + void push_back(const gtsam::GaussianBayesNet& bayesNet); + void push_back(const gtsam::GaussianBayesTree& bayesTree); void add(const gtsam::GaussianFactor& factor); void add(Vector b); void add(size_t key1, Matrix A1, Vector b, const gtsam::noiseModel::Diagonal* model); @@ -1442,18 +1260,16 @@ class GaussianFactorGraph { void add(size_t key1, Matrix A1, size_t key2, Matrix A2, size_t key3, Matrix A3, Vector b, const gtsam::noiseModel::Diagonal* model); - //Permutations - void permuteWithInverse(const gtsam::Permutation& inversePermutation); - // error and probability double error(const gtsam::VectorValues& c) const; double probPrime(const gtsam::VectorValues& c) const; - // combining - static gtsam::GaussianFactorGraph combine2( - const gtsam::GaussianFactorGraph& lfg1, - const gtsam::GaussianFactorGraph& lfg2); - void combine(const gtsam::GaussianFactorGraph& lfg); + // Optimizing and linear algebra + gtsam::VectorValues optimize() const; + gtsam::VectorValues optimize(const gtsam::Ordering& ordering) const; + gtsam::VectorValues optimizeGradientSearch() const; + gtsam::VectorValues gradient(const gtsam::VectorValues& x0) const; + gtsam::VectorValues gradientAtZero() const; // Conversion to matrices Matrix sparseJacobian_() const; @@ -1466,13 +1282,93 @@ class GaussianFactorGraph { void serialize() const; }; -//Non-Class functions in GaussianFactorGraph.h -/*void multiplyInPlace(const gtsam::GaussianFactorGraph& fg, const gtsam::VectorValues& x, gtsam::Errors& e); -gtsam::VectorValues gradient(const gtsam::GaussianFactorGraph& fg, const gtsam::VectorValues& x0); -void gradientAtZero(const gtsam::GaussianFactorGraph& fg, gtsam::VectorValues& g); -void residual(const gtsam::GaussianFactorGraph& fg, const gtsam::VectorValues& x, gtsam::VectorValues& r); -void multiply(const gtsam::GaussianFactorGraph& fg, const gtsam::VectorValues& x, gtsam::VectorValues& r); -void transposeMultiply(const gtsam::GaussianFactorGraph& fg, const gtsam::VectorValues& x, gtsam::VectorValues& r);*/ +#include +virtual class GaussianConditional : gtsam::GaussianFactor { + //Constructors + GaussianConditional(size_t key, Vector d, Matrix R, const gtsam::noiseModel::Diagonal* sigmas); + GaussianConditional(size_t key, Vector d, Matrix R, size_t name1, Matrix S, + const gtsam::noiseModel::Diagonal* sigmas); + GaussianConditional(size_t key, Vector d, Matrix R, size_t name1, Matrix S, + size_t name2, Matrix T, const gtsam::noiseModel::Diagonal* sigmas); + + //Standard Interface + void print(string s) const; + bool equals(const gtsam::GaussianConditional &cg, double tol) const; + + //Advanced Interface + gtsam::VectorValues solve(const gtsam::VectorValues& parents) const; + gtsam::VectorValues solveOtherRHS(const gtsam::VectorValues& parents, const gtsam::VectorValues& rhs) const; + void solveTransposeInPlace(gtsam::VectorValues& gy) const; + void scaleFrontalsBySigma(gtsam::VectorValues& gy) const; + + // enabling serialization functionality + void serialize() const; +}; + +#include +virtual class GaussianDensity : gtsam::GaussianConditional { + //Constructors + GaussianDensity(size_t key, Vector d, Matrix R, const gtsam::noiseModel::Diagonal* sigmas); + + //Standard Interface + void print(string s) const; + bool equals(const gtsam::GaussianDensity &cg, double tol) const; + Vector mean() const; + Matrix covariance() const; +}; + +#include +virtual class GaussianBayesNet { + //Constructors + GaussianBayesNet(); + GaussianBayesNet(const gtsam::GaussianConditional* conditional); + + // Testable + void print(string s) const; + bool equals(const gtsam::GaussianBayesNet& other, double tol) const; + + // Standard interface + size_t size() const; + gtsam::GaussianConditional* front() const; + gtsam::GaussianConditional* back() const; + void push_back(gtsam::GaussianConditional* conditional); + void push_back(const gtsam::GaussianBayesNet& bayesNet); + + gtsam::VectorValues optimize() const; + gtsam::VectorValues optimizeGradientSearch() const; + gtsam::VectorValues gradient(const gtsam::VectorValues& x0) const; + gtsam::VectorValues gradientAtZero() const; + double error(const gtsam::VectorValues& x) const; + double determinant() const; + double logDeterminant() const; + gtsam::VectorValues backSubstitute(const gtsam::VectorValues& gx) const; + gtsam::VectorValues backSubstituteTranspose(const gtsam::VectorValues& gx) const; +}; + +#include +virtual class GaussianBayesTree { + // Standard Constructors and Named Constructors + GaussianBayesTree(); + GaussianBayesTree(const gtsam::GaussianBayesTree& other); + bool equals(const gtsam::GaussianBayesTree& other, double tol) const; + void print(string s); + size_t size() const; + bool empty() const; + size_t numCachedSeparatorMarginals() const; + void saveGraph(string s) const; + + gtsam::VectorValues optimize() const; + gtsam::VectorValues optimizeGradientSearch() const; + gtsam::VectorValues gradient(const gtsam::VectorValues& x0) const; + gtsam::VectorValues gradientAtZero() const; + double error(const gtsam::VectorValues& x) const; + double determinant() const; + double logDeterminant() const; + Matrix marginalCovariance(size_t key) const; + gtsam::GaussianConditional* marginalFactor(size_t key) const; + gtsam::GaussianFactorGraph* joint(size_t key1, size_t key2) const; + gtsam::GaussianBayesNet* jointBayesNet(size_t key1, size_t key2) const; +}; class Errors { //Constructors @@ -1484,50 +1380,16 @@ class Errors { bool equals(const gtsam::Errors& expected, double tol) const; }; -//Non-Class functions for Errors -//double dot(const gtsam::Errors& A, const gtsam::Errors& b); - -virtual class GaussianISAM : gtsam::GaussianBayesTree { +class GaussianISAM { //Constructor GaussianISAM(); //Standard Interface + void update(const gtsam::GaussianFactorGraph& newFactors); void saveGraph(string s) const; - gtsam::GaussianFactor* marginalFactor(size_t j) const; - gtsam::GaussianBayesNet* marginalBayesNet(size_t key) const; - Matrix marginalCovariance(size_t key) const; - gtsam::GaussianBayesNet* jointBayesNet(size_t key1, size_t key2) const; void clear(); }; -#include -class GaussianSequentialSolver { - //Constructors - GaussianSequentialSolver(const gtsam::GaussianFactorGraph& graph, - bool useQR); - - //Standard Interface - void replaceFactors(const gtsam::GaussianFactorGraph* factorGraph); - gtsam::GaussianBayesNet* eliminate() const; - gtsam::VectorValues* optimize() const; - gtsam::GaussianFactor* marginalFactor(size_t j) const; - Matrix marginalCovariance(size_t j) const; -}; - -#include -class GaussianMultifrontalSolver { - //Constructors - GaussianMultifrontalSolver(const gtsam::GaussianFactorGraph& graph, - bool useQR); - - //Standard Interface - void replaceFactors(const gtsam::GaussianFactorGraph* factorGraph); - gtsam::GaussianBayesTree* eliminate() const; - gtsam::VectorValues* optimize() const; - gtsam::GaussianFactor* marginalFactor(size_t j) const; - Matrix marginalCovariance(size_t j) const; -}; - #include virtual class IterativeOptimizationParameters { string getKernel() const ; @@ -1566,8 +1428,8 @@ virtual class SubgraphSolverParameters : gtsam::ConjugateGradientParameters { }; class SubgraphSolver { - SubgraphSolver(const gtsam::GaussianFactorGraph &A, const gtsam::SubgraphSolverParameters ¶meters); - SubgraphSolver(const gtsam::GaussianFactorGraph &Ab1, const gtsam::GaussianFactorGraph &Ab2, const gtsam::SubgraphSolverParameters ¶meters); + SubgraphSolver(const gtsam::GaussianFactorGraph &A, const gtsam::SubgraphSolverParameters ¶meters, const gtsam::Ordering& ordering); + SubgraphSolver(const gtsam::GaussianFactorGraph &Ab1, const gtsam::GaussianFactorGraph &Ab2, const gtsam::SubgraphSolverParameters ¶meters, const gtsam::Ordering& ordering); gtsam::VectorValues optimize() const; }; @@ -1625,10 +1487,11 @@ unsigned char mrsymbolChr(size_t key); unsigned char mrsymbolLabel(size_t key); size_t mrsymbolIndex(size_t key); -#include +#include class Ordering { // Standard Constructors and Named Constructors Ordering(); + Ordering(const gtsam::Ordering& other); // Testable void print(string s) const; @@ -1637,12 +1500,7 @@ class Ordering { // Standard interface size_t size() const; size_t at(size_t key) const; - size_t key(size_t index) const; - bool exists(size_t key) const; - void insert(size_t key, size_t order); void push_back(size_t key); - void permuteInPlace(const gtsam::Permutation& permutation); - void permuteInPlace(const gtsam::Permutation& selector, const gtsam::Permutation& permutation); // enabling serialization functionality void serialize() const; @@ -1660,18 +1518,15 @@ class NonlinearFactorGraph { void remove(size_t i); size_t nrFactors() const; gtsam::NonlinearFactor* at(size_t idx) const; - bool exists(size_t idx) const; void push_back(const gtsam::NonlinearFactorGraph& factors); + void push_back(gtsam::NonlinearFactor* factor); // NonlinearFactorGraph double error(const gtsam::Values& values) const; double probPrime(const gtsam::Values& values) const; - void add(const gtsam::NonlinearFactor* factor); - gtsam::Ordering* orderingCOLAMD(const gtsam::Values& values) const; + gtsam::Ordering orderingCOLAMD() const; // Ordering* orderingCOLAMDConstrained(const gtsam::Values& c, const std::map& constraints) const; - gtsam::GaussianFactorGraph* linearize(const gtsam::Values& values, - const gtsam::Ordering& ordering) const; - gtsam::SymbolicFactorGraph* symbolic(const gtsam::Ordering& ordering) const; + gtsam::GaussianFactorGraph* linearize(const gtsam::Values& values) const; gtsam::NonlinearFactorGraph clone() const; // enabling serialization functionality @@ -1689,7 +1544,7 @@ virtual class NonlinearFactor { double error(const gtsam::Values& c) const; size_t dim() const; bool active(const gtsam::Values& c) const; - gtsam::GaussianFactor* linearize(const gtsam::Values& c, const gtsam::Ordering& ordering) const; + gtsam::GaussianFactor* linearize(const gtsam::Values& c) const; gtsam::NonlinearFactor* clone() const; // gtsam::NonlinearFactor* rekey(const gtsam::KeyVector& newKeys) const; //FIXME: Conversion from KeyVector to std::vector does not happen }; @@ -1725,19 +1580,17 @@ class Values { gtsam::Value at(size_t j) const; gtsam::KeyList keys() const; - gtsam::VectorValues zeroVectors(const gtsam::Ordering& ordering) const; - gtsam::Ordering* orderingArbitrary(size_t firstVar) const; + gtsam::VectorValues zeroVectors() const; - gtsam::Values retract(const gtsam::VectorValues& delta, const gtsam::Ordering& ordering) const; - gtsam::VectorValues localCoordinates(const gtsam::Values& cp, const gtsam::Ordering& ordering) const; - void localCoordinates(const gtsam::Values& cp, const gtsam::Ordering& ordering, gtsam::VectorValues& delta) const; + gtsam::Values retract(const gtsam::VectorValues& delta) const; + gtsam::VectorValues localCoordinates(const gtsam::Values& cp) const; // enabling serialization functionality void serialize() const; }; // Actually a FastList -#include +#include class KeyList { KeyList(); KeyList(const gtsam::KeyList& other); @@ -1761,7 +1614,6 @@ class KeyList { }; // Actually a FastSet -#include class KeySet { KeySet(); KeySet(const gtsam::KeySet& other); @@ -1783,8 +1635,7 @@ class KeySet { bool count(size_t key) const; // returns true if value exists }; -// Actually a FastVector -#include +// Actually a vector class KeyVector { KeyVector(); KeyVector(const gtsam::KeyVector& other); @@ -1826,29 +1677,20 @@ class JointMarginal { #include virtual class LinearContainerFactor : gtsam::NonlinearFactor { - LinearContainerFactor(gtsam::GaussianFactor* factor, const gtsam::Ordering& ordering, - const gtsam::Values& linearizationPoint); - LinearContainerFactor(gtsam::GaussianFactor* factor, const gtsam::Values& linearizationPoint); - - LinearContainerFactor(gtsam::GaussianFactor* factor, const gtsam::Ordering& ordering); + LinearContainerFactor(gtsam::GaussianFactor* factor, const gtsam::Values& linearizationPoint); LinearContainerFactor(gtsam::GaussianFactor* factor); gtsam::GaussianFactor* factor() const; // const boost::optional& linearizationPoint() const; - gtsam::GaussianFactor* order(const gtsam::Ordering& ordering) const; - gtsam::GaussianFactor* negate(const gtsam::Ordering& ordering) const; - gtsam::NonlinearFactor* negate() const; - bool isJacobian() const; gtsam::JacobianFactor* toJacobian() const; gtsam::HessianFactor* toHessian() const; static gtsam::NonlinearFactorGraph convertLinearGraph(const gtsam::GaussianFactorGraph& linear_graph, - const gtsam::Ordering& ordering, const gtsam::Values& linearizationPoint); + const gtsam::Values& linearizationPoint); - static gtsam::NonlinearFactorGraph convertLinearGraph(const gtsam::GaussianFactorGraph& linear_graph, - const gtsam::Ordering& ordering); + static gtsam::NonlinearFactorGraph convertLinearGraph(const gtsam::GaussianFactorGraph& linear_graph); // enabling serialization functionality void serializable() const; @@ -1858,7 +1700,7 @@ virtual class LinearContainerFactor : gtsam::NonlinearFactor { #include // Uses partial QR approach by default -pair summarize( +gtsam::GaussianFactorGraph summarize( const gtsam::NonlinearFactorGraph& graph, const gtsam::Values& values, const gtsam::KeySet& saved_keys); @@ -2035,17 +1877,14 @@ class ISAM2Params { void setEnablePartialRelinearizationCheck(bool enablePartialRelinearizationCheck); }; -virtual class ISAM2Clique { +class ISAM2Clique { //Constructors - ISAM2Clique(const gtsam::GaussianConditional* conditional); + ISAM2Clique(); //Standard Interface Vector gradientContribution() const; - gtsam::ISAM2Clique* clone() const; void print(string s); - - void permuteWithInverse(const gtsam::Permutation& inversePermutation); }; class ISAM2Result { @@ -2059,8 +1898,7 @@ class ISAM2Result { size_t getCliques() const; }; -typedef gtsam::BayesTree ISAM2BayesTree; -virtual class ISAM2 : gtsam::ISAM2BayesTree { +class ISAM2 { ISAM2(); ISAM2(const gtsam::ISAM2Params& params); ISAM2(const gtsam::ISAM2& other); @@ -2084,7 +1922,6 @@ virtual class ISAM2 : gtsam::ISAM2BayesTree { Matrix marginalCovariance(size_t key) const; gtsam::VectorValues getDelta() const; gtsam::NonlinearFactorGraph getFactorsUnsafe() const; - gtsam::Ordering getOrdering() const; gtsam::VariableIndex getVariableIndex() const; gtsam::ISAM2Params params() const; }; @@ -2102,13 +1939,10 @@ class NonlinearISAM { int reorderCounter() const; void update(const gtsam::NonlinearFactorGraph& newFactors, const gtsam::Values& initialValues); void reorder_relinearize(); - void addKey(size_t key); - void setOrdering(const gtsam::Ordering& new_ordering); // These might be expensive as instead of a reference the wrapper will make a copy gtsam::GaussianISAM bayesTree() const; gtsam::Values getLinearizationPoint() const; - gtsam::Ordering getOrdering() const; gtsam::NonlinearFactorGraph getFactorsUnsafe() const; }; diff --git a/gtsam/CMakeLists.txt b/gtsam/CMakeLists.txt index 36b6d3509..d7a6ba86e 100644 --- a/gtsam/CMakeLists.txt +++ b/gtsam/CMakeLists.txt @@ -5,7 +5,8 @@ set (gtsam_subdirs base geometry inference - discrete + symbolic + #discrete linear nonlinear slam @@ -77,13 +78,21 @@ set(gtsam_srcs ${base_srcs} ${geometry_srcs} ${inference_srcs} - ${discrete_srcs} + ${symbolic_srcs} + #${discrete_srcs} ${linear_srcs} ${nonlinear_srcs} ${slam_srcs} ${navigation_srcs} ${gtsam_core_headers} ) + +# Generate and install config and dllexport files +configure_file(config.h.in config.h) +set(library_name GTSAM) # For substitution in dllexport.h.in +configure_file("${PROJECT_SOURCE_DIR}/cmake/dllexport.h.in" "dllexport.h") +list(APPEND gtsam_srcs "${CMAKE_BINARY_DIR}/gtsam/config.h" "${CMAKE_BINARY_DIR}/gtsam/dllexport.h") +install(FILES "${CMAKE_BINARY_DIR}/gtsam/config.h" "${CMAKE_BINARY_DIR}/gtsam/dllexport.h" DESTINATION include/gtsam) # Versions set(gtsam_version ${GTSAM_VERSION_MAJOR}.${GTSAM_VERSION_MINOR}.${GTSAM_VERSION_PATCH}) @@ -95,7 +104,7 @@ message(STATUS "Install prefix: ${CMAKE_INSTALL_PREFIX}") if (GTSAM_BUILD_STATIC_LIBRARY) message(STATUS "Building GTSAM - static") add_library(gtsam-static STATIC ${gtsam_srcs}) - target_link_libraries(gtsam-static ${GTSAM_BOOST_LIBRARIES}) + target_link_libraries(gtsam-static ${GTSAM_BOOST_LIBRARIES} ${TBB_LIBS}) set_target_properties(gtsam-static PROPERTIES OUTPUT_NAME gtsam CLEAN_DIRECT_OUTPUT 1 @@ -114,7 +123,7 @@ endif () if (GTSAM_BUILD_SHARED_LIBRARY) message(STATUS "Building GTSAM - shared") add_library(gtsam-shared SHARED ${gtsam_srcs}) - target_link_libraries(gtsam-shared ${GTSAM_BOOST_LIBRARIES}) + target_link_libraries(gtsam-shared ${GTSAM_BOOST_LIBRARIES} ${TBB_LIBS}) set_target_properties(gtsam-shared PROPERTIES OUTPUT_NAME gtsam CLEAN_DIRECT_OUTPUT 1 @@ -142,10 +151,6 @@ if(MSVC) set_property(SOURCE "${CMAKE_CURRENT_SOURCE_DIR}/slam/serialization.cpp" APPEND PROPERTY COMPILE_FLAGS "/bigobj") endif() - -# Generate and install config file -configure_file(config.h.in config.h) -install(FILES ${CMAKE_BINARY_DIR}/gtsam/config.h DESTINATION include/gtsam) # Create the matlab toolbox for the gtsam library if (GTSAM_INSTALL_MATLAB_TOOLBOX) diff --git a/gtsam/base/FastMap.h b/gtsam/base/FastMap.h index 7cea45e88..5015a77d8 100644 --- a/gtsam/base/FastMap.h +++ b/gtsam/base/FastMap.h @@ -58,6 +58,9 @@ public: return std::map(this->begin(), this->end()); } + /** Handy 'exists' function */ + bool exists(const KEY& e) const { return this->find(e) != this->end(); } + private: /** Serialization function */ friend class boost::serialization::access; diff --git a/gtsam/base/FastSet.h b/gtsam/base/FastSet.h index be2864f8f..4c8227c22 100644 --- a/gtsam/base/FastSet.h +++ b/gtsam/base/FastSet.h @@ -91,6 +91,9 @@ public: return std::set(this->begin(), this->end()); } + /** Handy 'exists' function */ + bool exists(const VALUE& e) const { return this->find(e) != this->end(); } + /** Print to implement Testable */ void print(const std::string& str = "") const { FastSetTestableHelper::print(*this, str); } diff --git a/gtsam/base/Matrix.cpp b/gtsam/base/Matrix.cpp index 187280bbf..47b012739 100644 --- a/gtsam/base/Matrix.cpp +++ b/gtsam/base/Matrix.cpp @@ -23,6 +23,7 @@ #include #include +#include #include #include @@ -587,7 +588,7 @@ void vector_scale_inplace(const Vector& v, Matrix& A, bool inf_mask) { if (inf_mask) { for (size_t i=0; i 1; + const DenseIndex effectiveRows = transposeMatrix ? matrix.cols() : matrix.rows(); + + if(matrix.rows() > 0 && matrix.cols() > 0) + { + stringstream matrixPrinted; + if(transposeMatrix) + matrixPrinted << matrix.transpose(); + else + matrixPrinted << matrix; + const std::string matrixStr = matrixPrinted.str(); + boost::tokenizer > tok(matrixStr, boost::char_separator("\n")); + + DenseIndex row = 0; + BOOST_FOREACH(const std::string& line, tok) + { + assert(row < effectiveRows); + if(row > 0) + ss << padding; + ss << "[ " << line << " ]"; + if(row < effectiveRows - 1) + ss << "\n"; + ++ row; + } + } else { + ss << "Empty (" << matrix.rows() << "x" << matrix.cols() << ")"; + } + return ss.str(); +} + + } // namespace gtsam diff --git a/gtsam/base/Matrix.h b/gtsam/base/Matrix.h index 0c0b54c5d..267638bdf 100644 --- a/gtsam/base/Matrix.h +++ b/gtsam/base/Matrix.h @@ -497,6 +497,8 @@ Eigen::Matrix Cayley(const Eigen::Matrix& A) { return (FMat::Identity() - A)*(FMat::Identity() + A).inverse(); } +std::string formatMatrixIndented(const std::string& label, const Matrix& matrix, bool makeVectorHorizontal = false); + } // namespace gtsam #include diff --git a/gtsam/base/SymmetricBlockMatrix.cpp b/gtsam/base/SymmetricBlockMatrix.cpp new file mode 100644 index 000000000..beabc7755 --- /dev/null +++ b/gtsam/base/SymmetricBlockMatrix.cpp @@ -0,0 +1,50 @@ +/* ---------------------------------------------------------------------------- + +* GTSAM Copyright 2010, Georgia Tech Research Corporation, +* Atlanta, Georgia 30332-0415 +* All Rights Reserved +* Authors: Frank Dellaert, et al. (see THANKS for the full author list) + +* See LICENSE for the license information + +* -------------------------------------------------------------------------- */ + +/** +* @file SymmetricBlockMatrix.cpp +* @brief Access to matrices via blocks of pre-defined sizes. Used in GaussianFactor and GaussianConditional. +* @author Richard Roberts +* @date Sep 18, 2010 +*/ + +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + SymmetricBlockMatrix SymmetricBlockMatrix::LikeActiveViewOf(const SymmetricBlockMatrix& other) + { + SymmetricBlockMatrix result; + result.variableColOffsets_.resize(other.nBlocks() + 1); + for(size_t i = 0; i < result.variableColOffsets_.size(); ++i) + result.variableColOffsets_[i] = + other.variableColOffsets_[other.blockStart_ + i] - other.variableColOffsets_[other.blockStart_]; + result.matrix_.resize(other.cols(), other.cols()); + result.assertInvariants(); + return result; + } + + /* ************************************************************************* */ + SymmetricBlockMatrix SymmetricBlockMatrix::LikeActiveViewOf(const VerticalBlockMatrix& other) + { + SymmetricBlockMatrix result; + result.variableColOffsets_.resize(other.nBlocks() + 1); + for(size_t i = 0; i < result.variableColOffsets_.size(); ++i) + result.variableColOffsets_[i] = + other.variableColOffsets_[other.blockStart_ + i] - other.variableColOffsets_[other.blockStart_]; + result.matrix_.resize(other.cols(), other.cols()); + result.assertInvariants(); + return result; + } + +} diff --git a/gtsam/base/SymmetricBlockMatrix.h b/gtsam/base/SymmetricBlockMatrix.h new file mode 100644 index 000000000..8b4db01eb --- /dev/null +++ b/gtsam/base/SymmetricBlockMatrix.h @@ -0,0 +1,229 @@ +/* ---------------------------------------------------------------------------- + +* GTSAM Copyright 2010, Georgia Tech Research Corporation, +* Atlanta, Georgia 30332-0415 +* All Rights Reserved +* Authors: Frank Dellaert, et al. (see THANKS for the full author list) + +* See LICENSE for the license information + +* -------------------------------------------------------------------------- */ + +/** +* @file SymmetricBlockMatrix.h +* @brief Access to matrices via blocks of pre-defined sizes. Used in GaussianFactor and GaussianConditional. +* @author Richard Roberts +* @date Sep 18, 2010 +*/ +#pragma once + +#include + +namespace gtsam { + + // Forward declarations + class VerticalBlockMatrix; + + /** + * This class stores a dense matrix and allows it to be accessed as a collection of blocks. When + * constructed, the caller must provide the dimensions of the blocks. + * + * The block structure is symmetric, but the underlying matrix does not necessarily need to be. + * + * This class also has a parameter that can be changed after construction to change the apparent + * matrix view. firstBlock() determines the block that appears to have index 0 for all operations + * (except re-setting firstBlock()). + * + * @addtogroup base */ + class SymmetricBlockMatrix + { + public: + typedef SymmetricBlockMatrix This; + typedef Eigen::Block Block; + typedef Eigen::Block constBlock; + + protected: + Matrix matrix_; ///< The full matrix + std::vector variableColOffsets_; ///< the starting columns of each block (0-based) + + DenseIndex blockStart_; ///< Changes apparent matrix view, see main class comment. + + public: + /** Construct from an empty matrix (asserts that the matrix is empty) */ + SymmetricBlockMatrix() : + blockStart_(0) + { + variableColOffsets_.push_back(0); + assertInvariants(); + } + + /** Construct from a container of the sizes of each block. */ + template + SymmetricBlockMatrix(const CONTAINER& dimensions) : + blockStart_(0) + { + fillOffsets(dimensions.begin(), dimensions.end()); + matrix_.resize(variableColOffsets_.back(), variableColOffsets_.back()); + assertInvariants(); + } + + /** + * Construct from iterator over the sizes of each vertical block. */ + template + SymmetricBlockMatrix(ITERATOR firstBlockDim, ITERATOR lastBlockDim) : + blockStart_(0) + { + fillOffsets(firstBlockDim, lastBlockDim); + matrix_.resize(variableColOffsets_.back(), variableColOffsets_.back()); + assertInvariants(); + } + + /** Construct from a container of the sizes of each vertical block and a pre-prepared matrix. */ + template + SymmetricBlockMatrix(const CONTAINER& dimensions, const Matrix& matrix) : + matrix_(matrix), blockStart_(0) + { + fillOffsets(dimensions.begin(), dimensions.end()); + if(matrix_.rows() != matrix_.cols()) + throw std::invalid_argument("Requested to create a SymmetricBlockMatrix from a non-square matrix."); + if(variableColOffsets_.back() != matrix_.cols()) + throw std::invalid_argument("Requested to create a SymmetricBlockMatrix with dimensions that do not sum to the total size of the provided matrix."); + assertInvariants(); + } + + /** Copy the block structure, but do not copy the matrix data. If blockStart() has been + * modified, this copies the structure of the corresponding matrix view. In the destination + * SymmetricBlockMatrix, blockStart() will be 0. */ + static SymmetricBlockMatrix LikeActiveViewOf(const SymmetricBlockMatrix& other); + + /** Copy the block structure, but do not copy the matrix data. If blockStart() has been + * modified, this copies the structure of the corresponding matrix view. In the destination + * SymmetricBlockMatrix, blockStart() will be 0. */ + static SymmetricBlockMatrix LikeActiveViewOf(const VerticalBlockMatrix& other); + + /** Row size */ + DenseIndex rows() const { assertInvariants(); return variableColOffsets_.back() - variableColOffsets_[blockStart_]; } + + /** Column size */ + DenseIndex cols() const { return rows(); } + + + /** Block count */ + DenseIndex nBlocks() const { assertInvariants(); return variableColOffsets_.size() - 1 - blockStart_; } + + Block operator()(DenseIndex i_block, DenseIndex j_block) { + return range(i_block, i_block+1, j_block, j_block+1); + } + + constBlock operator()(DenseIndex i_block, DenseIndex j_block) const { + return range(i_block, i_block+1, j_block, j_block+1); + } + + Block range(DenseIndex i_startBlock, DenseIndex i_endBlock, DenseIndex j_startBlock, DenseIndex j_endBlock) { + assertInvariants(); + DenseIndex i_actualStartBlock = i_startBlock + blockStart_; + DenseIndex i_actualEndBlock = i_endBlock + blockStart_; + DenseIndex j_actualStartBlock = j_startBlock + blockStart_; + DenseIndex j_actualEndBlock = j_endBlock + blockStart_; + checkBlock(i_actualStartBlock); + checkBlock(j_actualStartBlock); + if(i_startBlock != 0 || i_endBlock != 0) { + checkBlock(i_actualStartBlock); + assert(i_actualEndBlock < (DenseIndex)variableColOffsets_.size()); + } + if(j_startBlock != 0 || j_endBlock != 0) { + checkBlock(j_actualStartBlock); + assert(j_actualEndBlock < (DenseIndex)variableColOffsets_.size()); + } + return matrix_.block( + variableColOffsets_[i_actualStartBlock], variableColOffsets_[j_actualStartBlock], + variableColOffsets_[i_actualEndBlock] - variableColOffsets_[i_actualStartBlock], + variableColOffsets_[j_actualEndBlock] - variableColOffsets_[j_actualStartBlock]); + } + + constBlock range(DenseIndex i_startBlock, DenseIndex i_endBlock, DenseIndex j_startBlock, DenseIndex j_endBlock) const { + assertInvariants(); + DenseIndex i_actualStartBlock = i_startBlock + blockStart_; + DenseIndex i_actualEndBlock = i_endBlock + blockStart_; + DenseIndex j_actualStartBlock = j_startBlock + blockStart_; + DenseIndex j_actualEndBlock = j_endBlock + blockStart_; + if(i_startBlock != 0 || i_endBlock != 0) { + checkBlock(i_actualStartBlock); + assert(i_actualEndBlock < (DenseIndex)variableColOffsets_.size()); + } + if(j_startBlock != 0 || j_endBlock != 0) { + checkBlock(j_actualStartBlock); + assert(j_actualEndBlock < (DenseIndex)variableColOffsets_.size()); + } + return matrix_.block( + variableColOffsets_[i_actualStartBlock], variableColOffsets_[j_actualStartBlock], + variableColOffsets_[i_actualEndBlock] - variableColOffsets_[i_actualStartBlock], + variableColOffsets_[j_actualEndBlock] - variableColOffsets_[j_actualStartBlock]); + } + + /** Return the full matrix, *not* including any portions excluded by firstBlock(). */ + Block full() { + return range(0,nBlocks(), 0,nBlocks()); + } + + /** Return the full matrix, *not* including any portions excluded by firstBlock(). */ + constBlock full() const { + return range(0,nBlocks(), 0,nBlocks()); + } + + /** Access to full matrix, including any portions excluded by firstBlock() to other operations. */ + const Matrix& matrix() const { return matrix_; } + + /** Access to full matrix, including any portions excluded by firstBlock() to other operations. */ + Matrix& matrix() { return matrix_; } + + DenseIndex offset(DenseIndex block) const { + assertInvariants(); + DenseIndex actualBlock = block + blockStart_; + checkBlock(actualBlock); + return variableColOffsets_[actualBlock]; + } + + DenseIndex& blockStart() { return blockStart_; } + DenseIndex blockStart() const { return blockStart_; } + + protected: + void assertInvariants() const { + assert(matrix_.rows() == matrix_.cols()); + assert(matrix_.cols() == variableColOffsets_.back()); + assert(blockStart_ < (DenseIndex)variableColOffsets_.size()); + } + + void checkBlock(DenseIndex block) const { + assert(matrix_.rows() == matrix_.cols()); + assert(matrix_.cols() == variableColOffsets_.back()); + assert(block < (DenseIndex)variableColOffsets_.size()-1); + assert(variableColOffsets_[block] < matrix_.cols() && variableColOffsets_[block+1] <= matrix_.cols()); + } + + template + void fillOffsets(ITERATOR firstBlockDim, ITERATOR lastBlockDim) { + variableColOffsets_.resize((lastBlockDim-firstBlockDim)+1); + variableColOffsets_[0] = 0; + DenseIndex j=0; + for(ITERATOR dim=firstBlockDim; dim!=lastBlockDim; ++dim) { + variableColOffsets_[j+1] = variableColOffsets_[j] + *dim; + ++ j; + } + } + + friend class VerticalBlockMatrix; + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(matrix_); + ar & BOOST_SERIALIZATION_NVP(variableColOffsets_); + ar & BOOST_SERIALIZATION_NVP(blockStart_); + } + }; + + +} diff --git a/gtsam/base/TestableAssertions.h b/gtsam/base/TestableAssertions.h index 42001b6f4..12138d192 100644 --- a/gtsam/base/TestableAssertions.h +++ b/gtsam/base/TestableAssertions.h @@ -231,8 +231,6 @@ bool assert_container_equal(const std::vector >& expected, template bool assert_container_equal(const V& expected, const V& actual, double tol = 1e-9) { bool match = true; - if (expected.size() != actual.size()) - match = false; typename V::const_iterator itExp = expected.begin(), itAct = actual.begin(); @@ -243,6 +241,8 @@ bool assert_container_equal(const V& expected, const V& actual, double tol = 1e- break; } } + if(itExp != expected.end() || itAct != actual.end()) + match = false; } if(!match) { std::cout << "expected: " << std::endl; diff --git a/gtsam/base/Vector.cpp b/gtsam/base/Vector.cpp index 7a71eb125..2a0192930 100644 --- a/gtsam/base/Vector.cpp +++ b/gtsam/base/Vector.cpp @@ -165,7 +165,7 @@ bool equal_with_abs_tol(const Vector& vec1, const Vector& vec2, double tol) { if (vec1.size()!=vec2.size()) return false; size_t m = vec1.size(); for(size_t i=0; i tol) return false; @@ -178,7 +178,7 @@ bool equal_with_abs_tol(const SubVector& vec1, const SubVector& vec2, double tol if (vec1.size()!=vec2.size()) return false; size_t m = vec1.size(); for(size_t i=0; i tol) return false; @@ -387,7 +387,7 @@ double weightedPseudoinverse(const Vector& a, const Vector& weights, // Slow version with error checking pair weightedPseudoinverse(const Vector& a, const Vector& weights) { - int m = weights.size(); + DenseIndex m = weights.size(); if (a.size() != m) throw invalid_argument("a and weights have different sizes!"); Vector pseudo(m); diff --git a/gtsam/base/VerticalBlockMatrix.cpp b/gtsam/base/VerticalBlockMatrix.cpp new file mode 100644 index 000000000..0361264fe --- /dev/null +++ b/gtsam/base/VerticalBlockMatrix.cpp @@ -0,0 +1,52 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file VerticalBlockMatrix.cpp + * @brief A matrix with column blocks of pre-defined sizes. Used in JacobianFactor and + * GaussianConditional. + * @author Richard Roberts + * @date Sep 18, 2010 */ + +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + VerticalBlockMatrix VerticalBlockMatrix::LikeActiveViewOf(const VerticalBlockMatrix& other) + { + VerticalBlockMatrix result; + result.variableColOffsets_.resize(other.nBlocks() + 1); + for(size_t i = 0; i < result.variableColOffsets_.size(); ++i) + result.variableColOffsets_[i] = + other.variableColOffsets_[other.blockStart_ + i] - other.variableColOffsets_[other.blockStart_]; + result.matrix_.resize(other.rows(), result.variableColOffsets_.back()); + result.rowEnd_ = other.rows(); + result.assertInvariants(); + return result; + } + + /* ************************************************************************* */ + VerticalBlockMatrix VerticalBlockMatrix::LikeActiveViewOf(const SymmetricBlockMatrix& other, DenseIndex height) + { + VerticalBlockMatrix result; + result.variableColOffsets_.resize(other.nBlocks() + 1); + for(size_t i = 0; i < result.variableColOffsets_.size(); ++i) + result.variableColOffsets_[i] = + other.variableColOffsets_[other.blockStart_ + i] - other.variableColOffsets_[other.blockStart_]; + result.matrix_.resize(height, result.variableColOffsets_.back()); + result.rowEnd_ = height; + result.assertInvariants(); + return result; + } + +} diff --git a/gtsam/base/VerticalBlockMatrix.h b/gtsam/base/VerticalBlockMatrix.h new file mode 100644 index 000000000..7677e817e --- /dev/null +++ b/gtsam/base/VerticalBlockMatrix.h @@ -0,0 +1,229 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file VerticalBlockMatrix.h + * @brief A matrix with column blocks of pre-defined sizes. Used in JacobianFactor and + * GaussianConditional. + * @author Richard Roberts + * @date Sep 18, 2010 */ +#pragma once + +#include + +namespace gtsam { + + // Forward declarations + class SymmetricBlockMatrix; + + /** + * This class stores a dense matrix and allows it to be accessed as a collection of vertical + * blocks. The dimensions of the blocks are provided when constructing this class. + * + * This class also has three parameters that can be changed after construction that change the + * apparent view of the matrix without any reallocation or data copying. firstBlock() determines + * the block that has index 0 for all operations (except for re-setting firstBlock()). rowStart() + * determines the apparent first row of the matrix for all operations (except for setting + * rowStart() and rowEnd()). rowEnd() determines the apparent exclusive (one-past-the-last) last + * row for all operations. To include all rows, rowEnd() should be set to the number of rows in + * the matrix (i.e. one after the last true row index). + * + * @addtogroup base */ + class GTSAM_EXPORT VerticalBlockMatrix + { + public: + typedef VerticalBlockMatrix This; + typedef Eigen::Block Block; + typedef Eigen::Block constBlock; + + protected: + Matrix matrix_; ///< The full matrix + std::vector variableColOffsets_; ///< the starting columns of each block (0-based) + + DenseIndex rowStart_; ///< Changes apparent matrix view, see main class comment. + DenseIndex rowEnd_; ///< Changes apparent matrix view, see main class comment. + DenseIndex blockStart_; ///< Changes apparent matrix view, see main class comment. + + public: + + /** Construct an empty VerticalBlockMatrix */ + VerticalBlockMatrix() : + rowStart_(0), rowEnd_(0), blockStart_(0) + { + variableColOffsets_.push_back(0); + assertInvariants(); + } + + /** Construct from a container of the sizes of each vertical block. */ + template + VerticalBlockMatrix(const CONTAINER& dimensions, DenseIndex height) : + rowStart_(0), rowEnd_(height), blockStart_(0) + { + fillOffsets(dimensions.begin(), dimensions.end()); + matrix_.resize(height, variableColOffsets_.back()); + assertInvariants(); + } + + /** Construct from a container of the sizes of each vertical block and a pre-prepared matrix. */ + template + VerticalBlockMatrix(const CONTAINER& dimensions, const Matrix& matrix) : + matrix_(matrix), rowStart_(0), rowEnd_(matrix.rows()), blockStart_(0) + { + fillOffsets(dimensions.begin(), dimensions.end()); + if(variableColOffsets_.back() != matrix_.cols()) + throw std::invalid_argument("Requested to create a VerticalBlockMatrix with dimensions that do not sum to the total columns of the provided matrix."); + assertInvariants(); + } + + /** + * Construct from iterator over the sizes of each vertical block. */ + template + VerticalBlockMatrix(ITERATOR firstBlockDim, ITERATOR lastBlockDim, DenseIndex height) : + rowStart_(0), rowEnd_(height), blockStart_(0) + { + fillOffsets(firstBlockDim, lastBlockDim); + matrix_.resize(height, variableColOffsets_.back()); + assertInvariants(); + } + + /** Copy the block structure and resize the underlying matrix, but do not copy the matrix data. + * If blockStart(), rowStart(), and/or rowEnd() have been modified, this copies the structure of + * the corresponding matrix view. In the destination VerticalBlockView, blockStart() and + * rowStart() will thus be 0, rowEnd() will be cols() of the source VerticalBlockView, and the + * underlying matrix will be the size of the view of the source matrix. */ + static VerticalBlockMatrix LikeActiveViewOf(const VerticalBlockMatrix& rhs); + + /** Copy the block structure, but do not copy the matrix data. If blockStart() has been + * modified, this copies the structure of the corresponding matrix view. In the destination + * VerticalBlockMatrix, blockStart() will be 0. */ + static VerticalBlockMatrix LikeActiveViewOf(const SymmetricBlockMatrix& rhs, DenseIndex height); + + /// Row size + DenseIndex rows() const { assertInvariants(); return rowEnd_ - rowStart_; } + + /// Column size + DenseIndex cols() const { assertInvariants(); return variableColOffsets_.back() - variableColOffsets_[blockStart_]; } + + /// Block count + DenseIndex nBlocks() const { assertInvariants(); return variableColOffsets_.size() - 1 - blockStart_; } + + /** Access a single block in the underlying matrix with read/write access */ + Block operator()(DenseIndex block) { return range(block, block+1); } + + /** Access a const block view */ + const constBlock operator()(DenseIndex block) const { return range(block, block+1); } + + /** access ranges of blocks at a time */ + Block range(DenseIndex startBlock, DenseIndex endBlock) { + assertInvariants(); + DenseIndex actualStartBlock = startBlock + blockStart_; + DenseIndex actualEndBlock = endBlock + blockStart_; + if(startBlock != 0 || endBlock != 0) { + checkBlock(actualStartBlock); + assert(actualEndBlock < (DenseIndex)variableColOffsets_.size()); + } + const DenseIndex startCol = variableColOffsets_[actualStartBlock]; + const DenseIndex rangeCols = variableColOffsets_[actualEndBlock] - startCol; + return matrix_.block(rowStart_, startCol, this->rows(), rangeCols); + } + + const constBlock range(DenseIndex startBlock, DenseIndex endBlock) const { + assertInvariants(); + DenseIndex actualStartBlock = startBlock + blockStart_; + DenseIndex actualEndBlock = endBlock + blockStart_; + if(startBlock != 0 || endBlock != 0) { + checkBlock(actualStartBlock); + assert(actualEndBlock < (DenseIndex)variableColOffsets_.size()); + } + const DenseIndex startCol = variableColOffsets_[actualStartBlock]; + const DenseIndex rangeCols = variableColOffsets_[actualEndBlock] - startCol; + return ((const Matrix&)matrix_).block(rowStart_, startCol, this->rows(), rangeCols); + } + + /** Return the full matrix, *not* including any portions excluded by rowStart(), rowEnd(), and firstBlock() */ + Block full() { return range(0, nBlocks()); } + + /** Return the full matrix, *not* including any portions excluded by rowStart(), rowEnd(), and firstBlock() */ + const constBlock full() const { return range(0, nBlocks()); } + + DenseIndex offset(DenseIndex block) const { + assertInvariants(); + DenseIndex actualBlock = block + blockStart_; + checkBlock(actualBlock); + return variableColOffsets_[actualBlock]; + } + + /** Get or set the apparent first row of the underlying matrix for all operations */ + DenseIndex& rowStart() { return rowStart_; } + + /** Get or set the apparent last row (exclusive, i.e. rows() == rowEnd() - rowStart()) of the underlying matrix for all operations */ + DenseIndex& rowEnd() { return rowEnd_; } + + /** Get or set the apparent first block for all operations */ + DenseIndex& firstBlock() { return blockStart_; } + + /** Get the apparent first row of the underlying matrix for all operations */ + DenseIndex rowStart() const { return rowStart_; } + + /** Get the apparent last row (exclusive, i.e. rows() == rowEnd() - rowStart()) of the underlying matrix for all operations */ + DenseIndex rowEnd() const { return rowEnd_; } + + /** Get the apparent first block for all operations */ + DenseIndex firstBlock() const { return blockStart_; } + + /** Access to full matrix (*including* any portions excluded by rowStart(), rowEnd(), and firstBlock()) */ + const Matrix& matrix() const { return matrix_; } + + /** Non-const access to full matrix (*including* any portions excluded by rowStart(), rowEnd(), and firstBlock()) */ + Matrix& matrix() { return matrix_; } + + protected: + void assertInvariants() const { + assert(matrix_.cols() == variableColOffsets_.back()); + assert(blockStart_ < (DenseIndex)variableColOffsets_.size()); + assert(rowStart_ <= matrix_.rows()); + assert(rowEnd_ <= matrix_.rows()); + assert(rowStart_ <= rowEnd_); + } + + void checkBlock(DenseIndex block) const { + assert(matrix_.cols() == variableColOffsets_.back()); + assert(block < (DenseIndex)variableColOffsets_.size() - 1); + assert(variableColOffsets_[block] < matrix_.cols() && variableColOffsets_[block+1] <= matrix_.cols()); + } + + template + void fillOffsets(ITERATOR firstBlockDim, ITERATOR lastBlockDim) { + variableColOffsets_.resize((lastBlockDim-firstBlockDim)+1); + variableColOffsets_[0] = 0; + DenseIndex j=0; + for(ITERATOR dim=firstBlockDim; dim!=lastBlockDim; ++dim) { + variableColOffsets_[j+1] = variableColOffsets_[j] + *dim; + ++ j; + } + } + + friend class SymmetricBlockMatrix; + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(matrix_); + ar & BOOST_SERIALIZATION_NVP(variableColOffsets_); + ar & BOOST_SERIALIZATION_NVP(rowStart_); + ar & BOOST_SERIALIZATION_NVP(rowEnd_); + ar & BOOST_SERIALIZATION_NVP(blockStart_); + } + }; + +} diff --git a/gtsam/base/blockMatrices.h b/gtsam/base/blockMatrices.h deleted file mode 100644 index 973d63247..000000000 --- a/gtsam/base/blockMatrices.h +++ /dev/null @@ -1,627 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file blockMatrices.h - * @brief Access to matrices via blocks of pre-defined sizes. Used in GaussianFactor and GaussianConditional. - * @author Richard Roberts - * @date Sep 18, 2010 - */ -#pragma once - -#include - -namespace gtsam { - -template class SymmetricBlockView; - -/** - * This class stores a *reference* to a matrix and allows it to be accessed as - * a collection of vertical blocks. It also provides for accessing individual - * columns from those blocks. When constructed or resized, the caller must - * provide the dimensions of the blocks, as well as an underlying matrix - * storage object. This class will resize the underlying matrix such that it - * is consistent with the given block dimensions. - * - * This class also has three parameters that can be changed after construction - * that change the apparent view of the matrix. firstBlock() determines the - * block that has index 0 for all operations (except for re-setting - * firstBlock()). rowStart() determines the apparent first row of the matrix - * for all operations (except for setting rowStart() and rowEnd()). rowEnd() - * determines the apparent *exclusive* last row for all operations. To include - * all rows, rowEnd() should be set to the number of rows in the matrix (i.e. - * one after the last true row index). - * - * @addtogroup base - */ -template -class VerticalBlockView { -public: - typedef MATRIX FullMatrix; - typedef Eigen::Block Block; - typedef Eigen::Block constBlock; - - // columns of blocks - typedef Eigen::VectorBlock Column; - typedef Eigen::VectorBlock constColumn; - -protected: - FullMatrix& matrix_; // the reference to the full matrix - std::vector variableColOffsets_; // the starting columns of each block (0-based) - - // Changes apparent matrix view, see main class comment. - size_t rowStart_; // 0 initially - size_t rowEnd_; // the number of row - 1, initially - size_t blockStart_; // 0 initially - -public: - /** Construct from an empty matrix (asserts that the matrix is empty) */ - VerticalBlockView(FullMatrix& matrix) : - matrix_(matrix), rowStart_(0), rowEnd_(matrix_.rows()), blockStart_(0) { - fillOffsets((size_t*)0, (size_t*)0); - assertInvariants(); - } - - /** - * Construct from a non-empty matrix and copy the block structure from - * another block view. - */ - template - VerticalBlockView(FullMatrix& matrix, const RHS& rhs) : - matrix_(matrix) { - if((size_t) matrix_.rows() != rhs.rows() || (size_t) matrix_.cols() != rhs.cols()) - throw std::invalid_argument( - "In VerticalBlockView<>(FullMatrix& matrix, const RHS& rhs), matrix and rhs must\n" - "already be of the same size. If not, construct the VerticalBlockView from an\n" - "empty matrix and then use copyStructureFrom(const RHS& rhs) to resize the matrix\n" - "and set up the block structure."); - copyStructureFrom(rhs); - assertInvariants(); - } - - /** Construct from iterators over the sizes of each vertical block */ - template - VerticalBlockView(FullMatrix& matrix, ITERATOR firstBlockDim, ITERATOR lastBlockDim) : - matrix_(matrix), rowStart_(0), rowEnd_(matrix_.rows()), blockStart_(0) { - fillOffsets(firstBlockDim, lastBlockDim); - assertInvariants(); - } - - /** - * Construct from a vector of the sizes of each vertical block, resize the - * matrix so that its height is matrixNewHeight and its width fits the given - * block dimensions. - */ - template - VerticalBlockView(FullMatrix& matrix, ITERATOR firstBlockDim, ITERATOR lastBlockDim, size_t matrixNewHeight) : - matrix_(matrix), rowStart_(0), rowEnd_(matrixNewHeight), blockStart_(0) { - fillOffsets(firstBlockDim, lastBlockDim); - matrix_.resize(matrixNewHeight, variableColOffsets_.back()); - assertInvariants(); - } - - /** Row size - */ - size_t rows() const { assertInvariants(); return rowEnd_ - rowStart_; } - - /** Column size - */ - size_t cols() const { assertInvariants(); return variableColOffsets_.back() - variableColOffsets_[blockStart_]; } - - - /** Block count - */ - size_t nBlocks() const { assertInvariants(); return variableColOffsets_.size() - 1 - blockStart_; } - - - /** Access a single block in the underlying matrix with read/write access */ - inline Block operator()(size_t block) { - return range(block, block+1); - } - - /** Access a const block view */ - inline const constBlock operator()(size_t block) const { - return range(block, block+1); - } - - /** access ranges of blocks at a time */ - inline Block range(size_t startBlock, size_t endBlock) { - assertInvariants(); - size_t actualStartBlock = startBlock + blockStart_; - size_t actualEndBlock = endBlock + blockStart_; - if(startBlock != 0 && endBlock != 0) - checkBlock(actualStartBlock); - assert(actualEndBlock < variableColOffsets_.size()); - const size_t& startCol = variableColOffsets_[actualStartBlock]; - const size_t& endCol = variableColOffsets_[actualEndBlock]; - return matrix_.block(rowStart_, startCol, rowEnd_-rowStart_, endCol-startCol); - } - - inline const constBlock range(size_t startBlock, size_t endBlock) const { - assertInvariants(); - size_t actualStartBlock = startBlock + blockStart_; - size_t actualEndBlock = endBlock + blockStart_; - if(startBlock != 0 && endBlock != 0) - checkBlock(actualStartBlock); - assert(actualEndBlock < variableColOffsets_.size()); - const size_t& startCol = variableColOffsets_[actualStartBlock]; - const size_t& endCol = variableColOffsets_[actualEndBlock]; - return ((const FullMatrix&)matrix_).block(rowStart_, startCol, rowEnd_-rowStart_, endCol-startCol); - } - - /** Return the full matrix, *not* including any portions excluded by rowStart(), rowEnd(), and firstBlock() */ - inline Block full() { - return range(0,nBlocks()); - } - - /** Return the full matrix, *not* including any portions excluded by rowStart(), rowEnd(), and firstBlock() */ - inline const constBlock full() const { - return range(0,nBlocks()); - } - - /** get a single column out of a block */ - Column column(size_t block, size_t columnOffset) { - assertInvariants(); - size_t actualBlock = block + blockStart_; - checkBlock(actualBlock); - assert(variableColOffsets_[actualBlock] + columnOffset < variableColOffsets_[actualBlock+1]); - return matrix_.col(variableColOffsets_[actualBlock] + columnOffset).segment(rowStart_, rowEnd_-rowStart_); - } - - /** get a single column out of a block */ - const constColumn column(size_t block, size_t columnOffset) const { - assertInvariants(); - size_t actualBlock = block + blockStart_; - checkBlock(actualBlock); - assert(variableColOffsets_[actualBlock] + columnOffset < (size_t) matrix_.cols()); - return ((const FullMatrix&)matrix_).col(variableColOffsets_[actualBlock] + columnOffset).segment(rowStart_, rowEnd_-rowStart_); - } - - size_t offset(size_t block) const { - assertInvariants(); - size_t actualBlock = block + blockStart_; - checkBlock(actualBlock); - return variableColOffsets_[actualBlock]; - } - - /** Get or set the apparent first row of the underlying matrix for all operations */ - size_t& rowStart() { return rowStart_; } - - /** Get or set the apparent last row (exclusive, i.e. rows() == rowEnd() - rowStart()) of the underlying matrix for all operations */ - size_t& rowEnd() { return rowEnd_; } - - /** Get or set the apparent first block for all operations */ - size_t& firstBlock() { return blockStart_; } - - /** Get the apparent first row of the underlying matrix for all operations */ - size_t rowStart() const { return rowStart_; } - - /** Get the apparent last row (exclusive, i.e. rows() == rowEnd() - rowStart()) of the underlying matrix for all operations */ - size_t rowEnd() const { return rowEnd_; } - - /** Get the apparent first block for all operations */ - size_t firstBlock() const { return blockStart_; } - - /** access to full matrix (*including* any portions excluded by rowStart(), rowEnd(), and firstBlock()) */ - const FullMatrix& fullMatrix() const { return matrix_; } - - /** - * Copy the block structure and resize the underlying matrix, but do not - * copy the matrix data. If blockStart(), rowStart(), and/or rowEnd() have - * been modified, this copies the structure of the corresponding matrix view. - * In the destination VerticalBlockView, blockStart() and rowStart() will - * thus be 0, rowEnd() will be cols() of the source VerticalBlockView, and - * the underlying matrix will be the size of the view of the source matrix. - */ - template - void copyStructureFrom(const RHS& rhs) { - if((size_t) matrix_.rows() != (size_t) rhs.rows() || (size_t) matrix_.cols() != (size_t) rhs.cols()) - matrix_.resize(rhs.rows(), rhs.cols()); - if(rhs.blockStart_ == 0) - variableColOffsets_ = rhs.variableColOffsets_; - else { - variableColOffsets_.resize(rhs.nBlocks() + 1); - variableColOffsets_[0] = 0; - size_t j=0; - assert(rhs.variableColOffsets_.begin()+rhs.blockStart_ < rhs.variableColOffsets_.end()-1); - for(std::vector::const_iterator off=rhs.variableColOffsets_.begin()+rhs.blockStart_; off!=rhs.variableColOffsets_.end()-1; ++off) { - variableColOffsets_[j+1] = variableColOffsets_[j] + (*(off+1) - *off); - ++ j; - } - } - rowStart_ = 0; - rowEnd_ = matrix_.rows(); - blockStart_ = 0; - assertInvariants(); - } - - /** Copy the block struture and matrix data, resizing the underlying matrix - * in the process. This can deal with assigning between different types of - * underlying matrices, as long as the matrices themselves are assignable. - * To avoid creating a temporary matrix this assumes no aliasing, i.e. that - * no part of the underlying matrices refer to the same memory! - * - * If blockStart(), rowStart(), and/or rowEnd() have been modified, this - * copies the structure of the corresponding matrix view. In the destination - * VerticalBlockView, blockStart() and rowStart() will thus be 0, rowEnd() - * will be cols() of the source VerticalBlockView, and the underlying matrix - * will be the size of the view of the source matrix. - */ - template - VerticalBlockView& assignNoalias(const RHS& rhs) { - copyStructureFrom(rhs); - matrix_.noalias() = rhs.full(); - return *this; - } - - /** Swap the contents of the underlying matrix and the block information with - * another VerticalBlockView. - */ - void swap(VerticalBlockView& other) { - matrix_.swap(other.matrix_); - variableColOffsets_.swap(other.variableColOffsets_); - std::swap(rowStart_, other.rowStart_); - std::swap(rowEnd_, other.rowEnd_); - std::swap(blockStart_, other.blockStart_); - assertInvariants(); - other.assertInvariants(); - } - -protected: - void assertInvariants() const { - assert((size_t) matrix_.cols() == variableColOffsets_.back()); - assert(blockStart_ < variableColOffsets_.size()); - assert(rowStart_ <= (size_t) matrix_.rows()); - assert(rowEnd_ <= (size_t) matrix_.rows()); - assert(rowStart_ <= rowEnd_); - } - - void checkBlock(size_t block) const { - assert((size_t) matrix_.cols() == variableColOffsets_.back()); - assert(block < variableColOffsets_.size()-1); - assert(variableColOffsets_[block] < (size_t) matrix_.cols() && variableColOffsets_[block+1] <= (size_t) matrix_.cols()); - } - - template - void fillOffsets(ITERATOR firstBlockDim, ITERATOR lastBlockDim) { - variableColOffsets_.resize((lastBlockDim-firstBlockDim)+1); - variableColOffsets_[0] = 0; - size_t j=0; - for(ITERATOR dim=firstBlockDim; dim!=lastBlockDim; ++dim) { - variableColOffsets_[j+1] = variableColOffsets_[j] + *dim; - ++ j; - } - } - - template friend class SymmetricBlockView; - template friend class VerticalBlockView; - -private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_NVP(matrix_); - ar & BOOST_SERIALIZATION_NVP(variableColOffsets_); - ar & BOOST_SERIALIZATION_NVP(rowStart_); - ar & BOOST_SERIALIZATION_NVP(rowEnd_); - ar & BOOST_SERIALIZATION_NVP(blockStart_); - } -}; - -/** - * This class stores a *reference* to a matrix and allows it to be accessed as - * a collection of blocks. It also provides for accessing individual - * columns from those blocks. When constructed or resized, the caller must - * provide the dimensions of the blocks, as well as an underlying matrix - * storage object. This class will resize the underlying matrix such that it - * is consistent with the given block dimensions. - * - * This class uses a symmetric block structure. The underlying matrix does not - * necessarily need to be symmetric. - * - * This class also has a parameter that can be changed after construction to - * change the apparent matrix view. firstBlock() determines the block that - * appears to have index 0 for all operations (except re-setting firstBlock()). - * - * @addtogroup base - */ -template -class SymmetricBlockView { -public: - typedef MATRIX FullMatrix; - typedef Eigen::Block Block; - typedef Eigen::Block constBlock; - typedef typename FullMatrix::ColXpr::SegmentReturnType Column; - typedef typename FullMatrix::ConstColXpr::ConstSegmentReturnType constColumn; - -private: - static FullMatrix matrixTemp_; // just for finding types - -protected: - FullMatrix& matrix_; // the reference to the full matrix - std::vector variableColOffsets_; // the starting columns of each block (0-based) - - // Changes apparent matrix view, see main class comment. - size_t blockStart_; // 0 initially - -public: - /** Construct from an empty matrix (asserts that the matrix is empty) */ - SymmetricBlockView(FullMatrix& matrix) : - matrix_(matrix), blockStart_(0) { - fillOffsets((size_t*)0, (size_t*)0); - assertInvariants(); - } - - /** Construct from iterators over the sizes of each block */ - template - SymmetricBlockView(FullMatrix& matrix, ITERATOR firstBlockDim, ITERATOR lastBlockDim) : - matrix_(matrix), blockStart_(0) { - fillOffsets(firstBlockDim, lastBlockDim); - assertInvariants(); - } - - /** - * Modify the size and structure of the underlying matrix and this block - * view. If 'preserve' is true, the underlying matrix data will be copied if - * the matrix size changes, otherwise the new data will be uninitialized. - */ - template - void resize(ITERATOR firstBlockDim, ITERATOR lastBlockDim, bool preserve) { - blockStart_ = 0; - fillOffsets(firstBlockDim, lastBlockDim); - if (preserve) - matrix_.conservativeResize(variableColOffsets_.back(), variableColOffsets_.back()); - else - matrix_.resize(variableColOffsets_.back(), variableColOffsets_.back()); - } - - /** Row size - */ - size_t rows() const { assertInvariants(); return variableColOffsets_.back() - variableColOffsets_[blockStart_]; } - - /** Column size - */ - size_t cols() const { return rows(); } - - - /** Block count - */ - size_t nBlocks() const { assertInvariants(); return variableColOffsets_.size() - 1 - blockStart_; } - - - Block operator()(size_t i_block, size_t j_block) { - return range(i_block, i_block+1, j_block, j_block+1); - } - - constBlock operator()(size_t i_block, size_t j_block) const { - return range(i_block, i_block+1, j_block, j_block+1); - } - - Block range(size_t i_startBlock, size_t i_endBlock, size_t j_startBlock, size_t j_endBlock) { - assertInvariants(); - size_t i_actualStartBlock = i_startBlock + blockStart_; - size_t i_actualEndBlock = i_endBlock + blockStart_; - size_t j_actualStartBlock = j_startBlock + blockStart_; - size_t j_actualEndBlock = j_endBlock + blockStart_; - checkBlock(i_actualStartBlock); - checkBlock(j_actualStartBlock); - assert(i_actualEndBlock < variableColOffsets_.size()); - assert(j_actualEndBlock < variableColOffsets_.size()); - return matrix_.block( - variableColOffsets_[i_actualStartBlock], variableColOffsets_[j_actualStartBlock], - variableColOffsets_[i_actualEndBlock]-variableColOffsets_[i_actualStartBlock], - variableColOffsets_[j_actualEndBlock]-variableColOffsets_[j_actualStartBlock]); - } - - constBlock range(size_t i_startBlock, size_t i_endBlock, size_t j_startBlock, size_t j_endBlock) const { - assertInvariants(); - size_t i_actualStartBlock = i_startBlock + blockStart_; - size_t i_actualEndBlock = i_endBlock + blockStart_; - size_t j_actualStartBlock = j_startBlock + blockStart_; - size_t j_actualEndBlock = j_endBlock + blockStart_; - checkBlock(i_actualStartBlock); - checkBlock(j_actualStartBlock); - assert(i_actualEndBlock < variableColOffsets_.size()); - assert(j_actualEndBlock < variableColOffsets_.size()); - return ((const FullMatrix&)matrix_).block( - variableColOffsets_[i_actualStartBlock], variableColOffsets_[j_actualStartBlock], - variableColOffsets_[i_actualEndBlock]-variableColOffsets_[i_actualStartBlock], - variableColOffsets_[j_actualEndBlock]-variableColOffsets_[j_actualStartBlock]); - } - - Block full() { - return range(0,nBlocks(), 0,nBlocks()); - } - - constBlock full() const { - return range(0,nBlocks(), 0,nBlocks()); - } - - /** access to full matrix */ - const FullMatrix& fullMatrix() const { return matrix_; } - - Column column(size_t i_block, size_t j_block, size_t columnOffset) { - assertInvariants(); - size_t i_actualBlock = i_block + blockStart_; - size_t j_actualBlock = j_block + blockStart_; - checkBlock(i_actualBlock); - checkBlock(j_actualBlock); - assert(i_actualBlock < variableColOffsets_.size()); - assert(j_actualBlock < variableColOffsets_.size()); - assert(variableColOffsets_[j_actualBlock] + columnOffset < variableColOffsets_[j_actualBlock+1]); - - return matrix_.col( - variableColOffsets_[j_actualBlock] + columnOffset).segment( - variableColOffsets_[i_actualBlock], - variableColOffsets_[i_actualBlock+1]-variableColOffsets_[i_actualBlock]); - } - - constColumn column(size_t i_block, size_t j_block, size_t columnOffset) const { - assertInvariants(); - size_t i_actualBlock = i_block + blockStart_; - size_t j_actualBlock = j_block + blockStart_; - checkBlock(i_actualBlock); - checkBlock(j_actualBlock); - assert(i_actualBlock < variableColOffsets_.size()); - assert(j_actualBlock < variableColOffsets_.size()); - assert(variableColOffsets_[j_actualBlock] + columnOffset < variableColOffsets_[j_actualBlock+1]); - - return ((const FullMatrix&)matrix_).col( - variableColOffsets_[j_actualBlock] + columnOffset).segment( - variableColOffsets_[i_actualBlock], - variableColOffsets_[i_actualBlock+1]-variableColOffsets_[i_actualBlock]); -// assertInvariants(); -// size_t j_actualBlock = j_block + blockStart_; -// assert(variableColOffsets_[j_actualBlock] + columnOffset < variableColOffsets_[j_actualBlock+1]); -// constBlock blockMat(operator()(i_block, j_block)); -// return constColumn(blockMat, columnOffset); - } - - Column rangeColumn(size_t i_startBlock, size_t i_endBlock, size_t j_block, size_t columnOffset) { - assertInvariants(); - - size_t i_actualStartBlock = i_startBlock + blockStart_; - size_t i_actualEndBlock = i_endBlock + blockStart_; - size_t j_actualStartBlock = j_block + blockStart_; - checkBlock(i_actualStartBlock); - checkBlock(j_actualStartBlock); - assert(i_actualEndBlock < variableColOffsets_.size()); - assert(variableColOffsets_[j_actualStartBlock] + columnOffset < variableColOffsets_[j_actualStartBlock+1]); - - return matrix_.col( - variableColOffsets_[j_actualStartBlock] + columnOffset).segment( - variableColOffsets_[i_actualStartBlock], - variableColOffsets_[i_actualEndBlock]-variableColOffsets_[i_actualStartBlock]); - } - - constColumn rangeColumn(size_t i_startBlock, size_t i_endBlock, size_t j_block, size_t columnOffset) const { - assertInvariants(); - - size_t i_actualStartBlock = i_startBlock + blockStart_; - size_t i_actualEndBlock = i_endBlock + blockStart_; - size_t j_actualStartBlock = j_block + blockStart_; - checkBlock(i_actualStartBlock); - checkBlock(j_actualStartBlock); - assert(i_actualEndBlock < variableColOffsets_.size()); - assert(variableColOffsets_[j_actualStartBlock] + columnOffset < variableColOffsets_[j_actualStartBlock+1]); - - return ((const FullMatrix&)matrix_).col( - variableColOffsets_[j_actualStartBlock] + columnOffset).segment( - variableColOffsets_[i_actualStartBlock], - variableColOffsets_[i_actualEndBlock]-variableColOffsets_[i_actualStartBlock]); - } - - size_t offset(size_t block) const { - assertInvariants(); - size_t actualBlock = block + blockStart_; - checkBlock(actualBlock); - return variableColOffsets_[actualBlock]; - } - - size_t& blockStart() { return blockStart_; } - size_t blockStart() const { return blockStart_; } - - /** Copy the block structure and resize the underlying matrix, but do not - * copy the matrix data. If blockStart() has been modified, this copies the - * structure of the corresponding matrix view. In the destination - * SymmetricBlockView, startBlock() will thus be 0 and the underlying matrix - * will be the size of the view of the source matrix. - */ - template - void copyStructureFrom(const RHS& rhs) { - matrix_.resize(rhs.cols(), rhs.cols()); - if(rhs.blockStart_ == 0) - variableColOffsets_ = rhs.variableColOffsets_; - else { - variableColOffsets_.resize(rhs.nBlocks() + 1); - variableColOffsets_[0] = 0; - size_t j=0; - assert(rhs.variableColOffsets_.begin()+rhs.blockStart_ < rhs.variableColOffsets_.end()-1); - for(std::vector::const_iterator off=rhs.variableColOffsets_.begin()+rhs.blockStart_; off!=rhs.variableColOffsets_.end()-1; ++off) { - variableColOffsets_[j+1] = variableColOffsets_[j] + (*(off+1) - *off); - ++ j; - } - } - blockStart_ = 0; - assertInvariants(); - } - - /** Copy the block struture and matrix data, resizing the underlying matrix - * in the process. This can deal with assigning between different types of - * underlying matrices, as long as the matrices themselves are assignable. - * To avoid creating a temporary matrix this assumes no aliasing, i.e. that - * no part of the underlying matrices refer to the same memory! - * - * If blockStart() has been modified, this copies the structure of the - * corresponding matrix view. In the destination SymmetricBlockView, - * startBlock() will thus be 0 and the underlying matrix will be the size - * of the view of the source matrix. - */ - template - SymmetricBlockView& assignNoalias(const SymmetricBlockView& rhs) { - copyStructureFrom(rhs); - matrix_.noalias() = rhs.full(); - return *this; - } - - /** Swap the contents of the underlying matrix and the block information with - * another VerticalBlockView. - */ - void swap(SymmetricBlockView& other) { - matrix_.swap(other.matrix_); - variableColOffsets_.swap(other.variableColOffsets_); - std::swap(blockStart_, other.blockStart_); - assertInvariants(); - other.assertInvariants(); - } - -protected: - void assertInvariants() const { - assert(matrix_.rows() == matrix_.cols()); - assert((size_t) matrix_.cols() == variableColOffsets_.back()); - assert(blockStart_ < variableColOffsets_.size()); - } - - void checkBlock(size_t block) const { - assert(matrix_.rows() == matrix_.cols()); - assert((size_t) matrix_.cols() == variableColOffsets_.back()); - assert(block < variableColOffsets_.size()-1); - assert(variableColOffsets_[block] < (size_t) matrix_.cols() && variableColOffsets_[block+1] <= (size_t) matrix_.cols()); - } - - template - void fillOffsets(ITERATOR firstBlockDim, ITERATOR lastBlockDim) { - variableColOffsets_.resize((lastBlockDim-firstBlockDim)+1); - variableColOffsets_[0] = 0; - size_t j=0; - for(ITERATOR dim=firstBlockDim; dim!=lastBlockDim; ++dim) { - variableColOffsets_[j+1] = variableColOffsets_[j] + *dim; - ++ j; - } - } - - template friend class SymmetricBlockView; - template friend class VerticalBlockView; - -private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_NVP(matrix_); - ar & BOOST_SERIALIZATION_NVP(variableColOffsets_); - ar & BOOST_SERIALIZATION_NVP(blockStart_); - } -}; - - -} diff --git a/gtsam/base/tests/testBlockMatrices.cpp b/gtsam/base/tests/testBlockMatrices.cpp deleted file mode 100644 index 3950d561a..000000000 --- a/gtsam/base/tests/testBlockMatrices.cpp +++ /dev/null @@ -1,133 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testBlockMatrices - * @author Alex Cunningham - */ - -#include -#include -#include - -using namespace std; -using namespace gtsam; - -/* ************************************************************************* */ -TEST(testBlockMatrices, jacobian_factor1) { - typedef Matrix AbMatrix; - typedef VerticalBlockView BlockAb; - - AbMatrix matrix; // actual matrix - empty to start with - - // from JacobianFactor::Constructor - one variable - Matrix A1 = Matrix_(2,3, - 1., 2., 3., - 4., 5., 6.); - Vector b = Vector_(2, 7., 8.); - size_t dims[] = { A1.cols(), 1}; - - // build the structure - BlockAb Ab(matrix, dims, dims+2, b.size()); - - // add a matrix and get back out - Ab(0) = A1; - EXPECT(assert_equal(A1, Ab(0))); - - // add vector to the system - Ab.column(1, 0) = b; - EXPECT(assert_equal(A1, Ab(0))); - EXPECT(assert_equal(b, Ab.column(1,0))); - - // examine matrix contents - EXPECT_LONGS_EQUAL(2, Ab.nBlocks()); - Matrix expFull = Matrix_(2, 4, - 1., 2., 3., 7., - 4., 5., 6., 8.); - Matrix actFull = Ab.full(); - EXPECT(assert_equal(expFull, actFull)); -} - -/* ************************************************************************* */ -TEST(testBlockMatrices, jacobian_factor2) { - typedef Matrix AbMatrix; - typedef VerticalBlockView BlockAb; - - AbMatrix matrix; // actual matrix - empty to start with - - // from JacobianFactor::Constructor - two variables - Matrix A1 = Matrix_(2,3, - 1., 2., 3., - 4., 5., 6.); - Matrix A2 = Matrix_(2,1, - 10., - 11.); - Vector b = Vector_(2, 7., 8.); - size_t dims[] = { A1.cols(), A2.cols(), 1}; - - // build the structure - BlockAb Ab(matrix, dims, dims+3, b.size()); - - // add blocks - Ab(0) = A1; - Ab(1) = A2; - EXPECT(assert_equal(A1, Ab(0))); - EXPECT(assert_equal(A2, Ab(1))); - - // add vector to the system - Ab.column(2, 0) = b; - EXPECT(assert_equal(A1, Ab(0))); - EXPECT(assert_equal(A2, Ab(1))); - EXPECT(assert_equal(b, Ab.column(2,0))); - - // examine matrix contents - EXPECT_LONGS_EQUAL(3, Ab.nBlocks()); - Matrix expFull = Matrix_(2, 5, - 1., 2., 3., 10., 7., - 4., 5., 6., 11., 8.); - Matrix actFull = Ab.full(); - EXPECT(assert_equal(expFull, actFull)); -} - -/* ************************************************************************* */ -TEST(testBlockMatrices, hessian_factor1) { - typedef Matrix InfoMatrix; - typedef SymmetricBlockView BlockInfo; - - Matrix expected_full = Matrix_(3, 3, - 3.0, 5.0, -8.0, - 0.0, 6.0, -9.0, - 0.0, 0.0, 10.0); - - Matrix G = Matrix_(2,2, 3.0, 5.0, 0.0, 6.0); - Vector g = Vector_(2, -8.0, -9.0); - double f = 10.0; - - size_t dims[] = { G.rows(), 1 }; - InfoMatrix fullMatrix = zeros(G.rows() + 1, G.rows() + 1); - BlockInfo infoView(fullMatrix, dims, dims+2); - infoView(0,0) = G; - infoView.column(0,1,0) = g; - infoView(1,1)(0,0) = f; - - EXPECT_LONGS_EQUAL(0, infoView.blockStart()); - EXPECT_LONGS_EQUAL(2, infoView.nBlocks()); - EXPECT(assert_equal(InfoMatrix(expected_full), fullMatrix)); - EXPECT(assert_equal(InfoMatrix(G), infoView.range(0, 1, 0, 1))) - EXPECT_DOUBLES_EQUAL(f, infoView(1, 1)(0,0), 1e-10); - - EXPECT(assert_equal(g, Vector(infoView.rangeColumn(0, 1, 1, 0)))); - EXPECT(assert_equal(g, Vector(((const BlockInfo)infoView).rangeColumn(0, 1, 1, 0)))); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ diff --git a/gtsam/base/tests/testNumericalDerivative.cpp b/gtsam/base/tests/testNumericalDerivative.cpp index 9d272d682..5647a0283 100644 --- a/gtsam/base/tests/testNumericalDerivative.cpp +++ b/gtsam/base/tests/testNumericalDerivative.cpp @@ -28,7 +28,7 @@ double f(const LieVector& x) { } /* ************************************************************************* */ -TEST_UNSAFE(testNumericalDerivative, numericalHessian) { +TEST(testNumericalDerivative, numericalHessian) { LieVector center = ones(2); Matrix expected = Matrix_(2,2, @@ -47,7 +47,7 @@ double f2(const LieVector& x) { } /* ************************************************************************* */ -TEST_UNSAFE(testNumericalDerivative, numericalHessian2) { +TEST(testNumericalDerivative, numericalHessian2) { LieVector center(2, 0.5, 1.0); Matrix expected = Matrix_(2,2, @@ -66,7 +66,7 @@ double f3(const LieVector& x1, const LieVector& x2) { } /* ************************************************************************* */ -TEST_UNSAFE(testNumericalDerivative, numericalHessian211) { +TEST(testNumericalDerivative, numericalHessian211) { LieVector center1(1, 1.0), center2(1, 5.0); Matrix expected11 = Matrix_(1,1,-sin(center1(0))*cos(center2(0))); @@ -89,7 +89,7 @@ double f4(const LieVector& x, const LieVector& y, const LieVector& z) { } /* ************************************************************************* */ -TEST_UNSAFE(testNumericalDerivative, numericalHessian311) { +TEST(testNumericalDerivative, numericalHessian311) { LieVector center1(1, 1.0), center2(1, 2.0), center3(1, 3.0); double x = center1(0), y = center2(0), z = center3(0); Matrix expected11 = Matrix_(1,1,-sin(x)*cos(y)*z*z); diff --git a/gtsam/base/tests/testTreeTraversal.cpp b/gtsam/base/tests/testTreeTraversal.cpp new file mode 100644 index 000000000..c9083f781 --- /dev/null +++ b/gtsam/base/tests/testTreeTraversal.cpp @@ -0,0 +1,163 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testTreeTraversal + * @brief Unit tests for tree traversal, cloning, and printing + * @author Richard Roberts + */ + +#include +#include +#include + +#include +#include +#include +#include +#include + +using boost::assign::operator+=; +using namespace std; +using namespace gtsam; + +struct TestNode { + typedef boost::shared_ptr shared_ptr; + int data; + vector children; + TestNode() : data(-1) {} + TestNode(int data) : data(data) {} +}; + +struct TestForest { + typedef TestNode Node; + typedef Node::shared_ptr sharedNode; + vector roots_; + const vector& roots() const { return roots_; } +}; + +TestForest makeTestForest() { + // 0 1 + // / \ + // 2 3 + // | + // 4 + TestForest forest; + forest.roots_.push_back(boost::make_shared(0)); + forest.roots_.push_back(boost::make_shared(1)); + forest.roots_[0]->children.push_back(boost::make_shared(2)); + forest.roots_[0]->children.push_back(boost::make_shared(3)); + forest.roots_[0]->children[1]->children.push_back(boost::make_shared(4)); + return forest; +} + +/* ************************************************************************* */ +struct PreOrderVisitor { + // This visitor stores the nodes visited so the visit order can be checked later. It also returns + // the current node index as the data and checks that the parent index properly matches the index + // referenced in the node. + std::list visited; + bool parentsMatched; + PreOrderVisitor() : parentsMatched(true) {} + int operator()(const TestNode::shared_ptr& node, int parentData) { + visited.push_back(node->data); + // Check parent index + const int expectedParentIndex = + node->data == 0 ? -1 : + node->data == 1 ? -1 : + node->data == 2 ? 0 : + node->data == 3 ? 0 : + node->data == 4 ? 3 : + node->data == 10 ? 0 : + (parentsMatched = false, -1); + if(expectedParentIndex != parentData) + parentsMatched = false; + return node->data; + } +}; + +/* ************************************************************************* */ +struct PostOrderVisitor { + // This visitor stores the nodes visited so the visit order can be checked later. + std::list visited; + void operator()(const TestNode::shared_ptr& node, int myData) { + visited.push_back(node->data); + } +}; + +/* ************************************************************************* */ +std::list getPreorder(const TestForest& forest) { + std::list result; + PreOrderVisitor preVisitor; + int rootData = -1; + treeTraversal::DepthFirstForest(forest, rootData, preVisitor); + result = preVisitor.visited; + return result; +} + +/* ************************************************************************* */ +TEST(treeTraversal, DepthFirst) +{ + // Get test forest + TestForest testForest = makeTestForest(); + + // Expected visit order + std::list preOrderExpected; + preOrderExpected += 0, 2, 3, 4, 1; + std::list postOrderExpected; + postOrderExpected += 2, 4, 3, 0, 1; + + // Actual visit order + PreOrderVisitor preVisitor; + PostOrderVisitor postVisitor; + int rootData = -1; + treeTraversal::DepthFirstForest(testForest, rootData, preVisitor, postVisitor); + + EXPECT(preVisitor.parentsMatched); + EXPECT(assert_container_equality(preOrderExpected, preVisitor.visited)); + EXPECT(assert_container_equality(postOrderExpected, postVisitor.visited)); +} + +/* ************************************************************************* */ +TEST(treeTraversal, CloneForest) +{ + // Get test forest + TestForest testForest1 = makeTestForest(); + TestForest testForest2; + testForest2.roots_ = treeTraversal::CloneForest(testForest1); + + // Check that the original and clone both are expected + std::list preOrder1Expected; + preOrder1Expected += 0, 2, 3, 4, 1; + std::list preOrder1Actual = getPreorder(testForest1); + std::list preOrder2Actual = getPreorder(testForest2); + EXPECT(assert_container_equality(preOrder1Expected, preOrder1Actual)); + EXPECT(assert_container_equality(preOrder1Expected, preOrder2Actual)); + + // Modify clone - should not modify original + testForest2.roots_[0]->children[1]->data = 10; + std::list preOrderModifiedExpected; + preOrderModifiedExpected += 0, 2, 10, 4, 1; + + // Check that original is the same and only the clone is modified + std::list preOrder1ModActual = getPreorder(testForest1); + std::list preOrder2ModActual = getPreorder(testForest2); + EXPECT(assert_container_equality(preOrder1Expected, preOrder1ModActual)); + EXPECT(assert_container_equality(preOrderModifiedExpected, preOrder2ModActual)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ + diff --git a/gtsam/base/tests/testVector.cpp b/gtsam/base/tests/testVector.cpp index 8fd151dbf..a0de5cf54 100644 --- a/gtsam/base/tests/testVector.cpp +++ b/gtsam/base/tests/testVector.cpp @@ -199,7 +199,7 @@ TEST( TestVector, weightedPseudoinverse_constraint ) // verify EXPECT(assert_equal(expected,actual)); - EXPECT(isinf(precision)); + EXPECT(std::isinf(precision)); } /* ************************************************************************* */ diff --git a/gtsam/base/treeTraversal-inst.h b/gtsam/base/treeTraversal-inst.h new file mode 100644 index 000000000..a90121738 --- /dev/null +++ b/gtsam/base/treeTraversal-inst.h @@ -0,0 +1,339 @@ +/* ---------------------------------------------------------------------------- + +* GTSAM Copyright 2010, Georgia Tech Research Corporation, +* Atlanta, Georgia 30332-0415 +* All Rights Reserved +* Authors: Frank Dellaert, et al. (see THANKS for the full author list) + +* See LICENSE for the license information + +* -------------------------------------------------------------------------- */ + +/** +* @file treeTraversal-inst.h +* @author Richard Roberts +* @date April 9, 2013 +*/ +#pragma once + +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include +#undef max // TBB seems to include windows.h and we don't want these macros +#undef min + +namespace gtsam { + + /** Internal functions used for traversing trees */ + namespace treeTraversal { + + /* ************************************************************************* */ + namespace { + // Internal node used in DFS preorder stack + template + struct TraversalNode { + bool expanded; + const boost::shared_ptr& treeNode; + DATA& parentData; + typename FastList::iterator dataPointer; + TraversalNode(const boost::shared_ptr& _treeNode, DATA& _parentData) : + expanded(false), treeNode(_treeNode), parentData(_parentData) {} + }; + + // Do nothing - default argument for post-visitor for tree traversal + template + void no_op(const boost::shared_ptr& node, const DATA& data) {} + + // Internal node used in parallel traversal stack + template + struct ParallelTraversalNode { + const boost::shared_ptr& treeNode; + DATA myData; + ParallelTraversalNode(const boost::shared_ptr& treeNode, const DATA& myData) : + treeNode(treeNode), myData(myData) {} + }; + + template + class PreOrderTask : public tbb::task + { + public: + const boost::shared_ptr& treeNode; + DATA myData; + VISITOR_PRE& visitorPre; + VISITOR_POST& visitorPost; + int problemSizeThreshold; + bool makeNewTasks; + PreOrderTask(const boost::shared_ptr& treeNode, const DATA& myData, + VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost, int problemSizeThreshold, + bool makeNewTasks = true) : + treeNode(treeNode), myData(myData), visitorPre(visitorPre), visitorPost(visitorPost), + problemSizeThreshold(problemSizeThreshold), makeNewTasks(makeNewTasks) {} + + typedef ParallelTraversalNode ParallelTraversalNodeType; + + tbb::task* execute() + { + // Process this node and its children + processNode(treeNode, myData); + + // Return NULL + return NULL; + } + + void processNode(const boost::shared_ptr& node, DATA& myData) + { + if(makeNewTasks) + { + bool overThreshold = (node->problemSize() >= problemSizeThreshold); + + tbb::task_list childTasks; + BOOST_FOREACH(const boost::shared_ptr& child, node->children) + { + // Process child in a subtask + childTasks.push_back(*new(allocate_child()) + PreOrderTask(child, visitorPre(child, myData), visitorPre, visitorPost, + problemSizeThreshold, overThreshold)); + } + + // If we have child tasks, start subtasks and wait for them to complete + set_ref_count(1 + (int)node->children.size()); + spawn(childTasks); + wait_for_all(); + } + else + { + BOOST_FOREACH(const boost::shared_ptr& child, node->children) + { + processNode(child, visitorPre(child, myData)); + } + } + + // Run the post-order visitor + (void) visitorPost(node, myData); + } + }; + + template + class RootTask : public tbb::task + { + public: + const ROOTS& roots; + DATA& myData; + VISITOR_PRE& visitorPre; + VISITOR_POST& visitorPost; + int problemSizeThreshold; + RootTask(const ROOTS& roots, DATA& myData, VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost, + int problemSizeThreshold) : + roots(roots), myData(myData), visitorPre(visitorPre), visitorPost(visitorPost), + problemSizeThreshold(problemSizeThreshold) {} + + tbb::task* execute() + { + typedef PreOrderTask PreOrderTask; + // Set TBB ref count + set_ref_count(1 + (int)roots.size()); + // Create data and tasks for our children + std::vector tasks; + tasks.reserve(roots.size()); + BOOST_FOREACH(const boost::shared_ptr& root, roots) + { + tasks.push_back(new(allocate_child()) + PreOrderTask(root, visitorPre(root, myData), visitorPre, visitorPost, problemSizeThreshold)); + } + // Spawn tasks + BOOST_FOREACH(PreOrderTask* task, tasks) + spawn(*task); + // Wait for tasks to finish + wait_for_all(); + // Return NULL + return NULL; + } + }; + + template + RootTask& + CreateRootTask(const ROOTS& roots, DATA& rootData, VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost, + int problemSizeThreshold) + { + typedef RootTask RootTask; + return *new(tbb::task::allocate_root()) RootTask(roots, rootData, visitorPre, visitorPost, problemSizeThreshold); + } + + /* ************************************************************************* */ + //template + //struct ParallelDFSData { + // DATA myData; + // FastList >& + //}; + } + + /** Traverse a forest depth-first with pre-order and post-order visits. + * @param forest The forest of trees to traverse. The method \c forest.roots() should exist + * and return a collection of (shared) pointers to \c FOREST::Node. + * @param visitorPre \c visitorPre(node, parentData) will be called at every node, before + * visiting its children, and will be passed, by reference, the \c DATA object returned + * by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object + * to pass to the children. The returned \c DATA object will be copy-constructed only + * upon returning to store internally, thus may be modified by visiting the children. + * Regarding efficiency, this copy-on-return is usually optimized out by the compiler. + * @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting + * its children, and will be passed, by reference, the \c DATA object returned by the + * call to \c visitorPre (the \c DATA object may be modified by visiting the children). + * @param rootData The data to pass by reference to \c visitorPre when it is called on each + * root node. */ + template + void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost) + { + // Typedefs + typedef typename FOREST::Node Node; + typedef boost::shared_ptr sharedNode; + + // Depth first traversal stack + typedef TraversalNode TraversalNode; + typedef FastList Stack; + Stack stack; + FastList dataList; // List to store node data as it is returned from the pre-order visitor + + // Add roots to stack (insert such that they are visited and processed in order + { + typename Stack::iterator insertLocation = stack.begin(); + BOOST_FOREACH(const sharedNode& root, forest.roots()) + stack.insert(insertLocation, TraversalNode(root, rootData)); + } + + // Traverse + while(!stack.empty()) + { + // Get next node + TraversalNode& node = stack.front(); + + if(node.expanded) { + // If already expanded, then the data stored in the node is no longer needed, so visit + // then delete it. + (void) visitorPost(node.treeNode, *node.dataPointer); + dataList.erase(node.dataPointer); + stack.pop_front(); + } else { + // If not already visited, visit the node and add its children (use reverse iterators so + // children are processed in the order they appear) + node.dataPointer = dataList.insert(dataList.end(), visitorPre(node.treeNode, node.parentData)); + typename Stack::iterator insertLocation = stack.begin(); + BOOST_FOREACH(const sharedNode& child, node.treeNode->children) + stack.insert(insertLocation, TraversalNode(child, *node.dataPointer)); + node.expanded = true; + } + } + assert(dataList.empty()); + } + + /** Traverse a forest depth-first, with a pre-order visit but no post-order visit. + * @param forest The forest of trees to traverse. The method \c forest.roots() should exist + * and return a collection of (shared) pointers to \c FOREST::Node. + * @param visitorPre \c visitorPre(node, parentData) will be called at every node, before + * visiting its children, and will be passed, by reference, the \c DATA object returned + * by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object + * to pass to the children. The returned \c DATA object will be copy-constructed only + * upon returning to store internally, thus may be modified by visiting the children. + * Regarding efficiency, this copy-on-return is usually optimized out by the compiler. + * @param rootData The data to pass by reference to \c visitorPre when it is called on each + * root node. */ + template + void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre) + { + DepthFirstForest(forest, rootData, visitorPre, no_op); + } + + /** Traverse a forest depth-first with pre-order and post-order visits. + * @param forest The forest of trees to traverse. The method \c forest.roots() should exist + * and return a collection of (shared) pointers to \c FOREST::Node. + * @param visitorPre \c visitorPre(node, parentData) will be called at every node, before + * visiting its children, and will be passed, by reference, the \c DATA object returned + * by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object + * to pass to the children. The returned \c DATA object will be copy-constructed only + * upon returning to store internally, thus may be modified by visiting the children. + * Regarding efficiency, this copy-on-return is usually optimized out by the compiler. + * @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting + * its children, and will be passed, by reference, the \c DATA object returned by the + * call to \c visitorPre (the \c DATA object may be modified by visiting the children). + * @param rootData The data to pass by reference to \c visitorPre when it is called on each + * root node. */ + template + void DepthFirstForestParallel(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost, + int problemSizeThreshold = 10) + { + // Typedefs + typedef typename FOREST::Node Node; + typedef boost::shared_ptr sharedNode; + + tbb::task::spawn_root_and_wait(CreateRootTask( + forest.roots(), rootData, visitorPre, visitorPost, problemSizeThreshold)); + } + + + /* ************************************************************************* */ + /** Traversal function for CloneForest */ + namespace { + template + boost::shared_ptr + CloneForestVisitorPre(const boost::shared_ptr& node, const boost::shared_ptr& parentPointer) + { + // Clone the current node and add it to its cloned parent + boost::shared_ptr clone = boost::make_shared(*node); + clone->children.clear(); + parentPointer->children.push_back(clone); + return clone; + } + } + + /** Clone a tree, copy-constructing new nodes (calling boost::make_shared) and setting up child + * pointers for a clone of the original tree. + * @param forest The forest of trees to clone. The method \c forest.roots() should exist and + * return a collection of shared pointers to \c FOREST::Node. + * @return The new collection of roots. */ + template + std::vector > CloneForest(const FOREST& forest) + { + typedef typename FOREST::Node Node; + boost::shared_ptr rootContainer = boost::make_shared(); + DepthFirstForest(forest, rootContainer, CloneForestVisitorPre); + return std::vector >(rootContainer->children.begin(), rootContainer->children.end()); + } + + + /* ************************************************************************* */ + /** Traversal function for PrintForest */ + namespace { + struct PrintForestVisitorPre + { + const KeyFormatter& formatter; + PrintForestVisitorPre(const KeyFormatter& formatter) : formatter(formatter) {} + template std::string operator()(const boost::shared_ptr& node, const std::string& parentString) + { + // Print the current node + node->print(parentString + "-", formatter); + // Increment the indentation + return parentString + "| "; + } + }; + } + + /** Print a tree, prefixing each line with \c str, and formatting keys using \c keyFormatter. + * To print each node, this function calls the \c print function of the tree nodes. */ + template + void PrintForest(const FOREST& forest, std::string str, const KeyFormatter& keyFormatter) { + typedef typename FOREST::Node Node; + PrintForestVisitorPre visitor(keyFormatter); + DepthFirstForest(forest, str, visitor); + } + } + +} diff --git a/gtsam/base/types.cpp b/gtsam/base/types.cpp index ea4db72c8..8b783d512 100644 --- a/gtsam/base/types.cpp +++ b/gtsam/base/types.cpp @@ -20,11 +20,22 @@ #include #include +#include namespace gtsam { + /* ************************************************************************* */ std::string _defaultIndexFormatter(Index j) { return boost::lexical_cast(j); } + /* ************************************************************************* */ + std::string _defaultKeyFormatter(Key key) { + const Symbol asSymbol(key); + if(asSymbol.chr() > 0) + return (std::string)asSymbol; + else + return boost::lexical_cast(key); + } + } \ No newline at end of file diff --git a/gtsam/base/types.h b/gtsam/base/types.h index 94fcb4b27..5a9ee28a3 100644 --- a/gtsam/base/types.h +++ b/gtsam/base/types.h @@ -25,6 +25,7 @@ #include #include +#include namespace gtsam { @@ -40,6 +41,26 @@ namespace gtsam { /** The default IndexFormatter outputs the index */ static const IndexFormatter DefaultIndexFormatter = &_defaultIndexFormatter; + + /// Integer nonlinear key type + typedef size_t Key; + + /// Typedef for a function to format a key, i.e. to convert it to a string + typedef boost::function KeyFormatter; + + // Helper function for DefaultKeyFormatter + GTSAM_EXPORT std::string _defaultKeyFormatter(Key key); + + /// The default KeyFormatter, which is used if no KeyFormatter is passed to + /// a nonlinear 'print' function. Automatically detects plain integer keys + /// and Symbol keys. + static const KeyFormatter DefaultKeyFormatter = &_defaultKeyFormatter; + + + /// The index type for Eigen objects + typedef ptrdiff_t DenseIndex; + + /** * Helper class that uses templates to select between two types based on * whether TEST_TYPE is const or not. @@ -83,6 +104,42 @@ namespace gtsam { operator T() const { return value; } }; + /** A helper class that behaves as a container with one element, and works with + * boost::range */ + template + class ListOfOneContainer { + T element_; + public: + typedef T value_type; + typedef const T* const_iterator; + typedef T* iterator; + ListOfOneContainer(const T& element) : element_(element) {} + const T* begin() const { return &element_; } + const T* end() const { return &element_ + 1; } + T* begin() { return &element_; } + T* end() { return &element_ + 1; } + size_t size() const { return 1; } + }; + + BOOST_CONCEPT_ASSERT((boost::RandomAccessRangeConcept >)); + + /** Factory function for ListOfOneContainer to enable ListOfOne(e) syntax. */ + template + ListOfOneContainer ListOfOne(const T& element) { + return ListOfOneContainer(element); + } + + /** An assertion that throws an exception if NDEBUG is not defined and + * evaluates to an empty statement otherwise. */ +#ifdef NDEBUG +#define assert_throw(CONDITION, EXCEPTION) ((void)0) +#else +#define assert_throw(CONDITION, EXCEPTION) \ + if(!(CONDITION)) { \ + throw (EXCEPTION); \ + } +#endif + } #ifdef _MSC_VER diff --git a/gtsam/discrete/DecisionTreeFactor.cpp b/gtsam/discrete/DecisionTreeFactor.cpp index 1eb428669..c4c2afedd 100644 --- a/gtsam/discrete/DecisionTreeFactor.cpp +++ b/gtsam/discrete/DecisionTreeFactor.cpp @@ -45,14 +45,14 @@ namespace gtsam { /* ************************************************************************* */ bool DecisionTreeFactor::equals(const This& other, double tol) const { - return IndexFactor::equals(other, tol) && Potentials::equals(other, tol); + return IndexFactorOrdered::equals(other, tol) && Potentials::equals(other, tol); } /* ************************************************************************* */ void DecisionTreeFactor::print(const string& s, const IndexFormatter& formatter) const { cout << s; - IndexFactor::print("IndexFactor:",formatter); + IndexFactorOrdered::print("IndexFactor:",formatter); Potentials::print("Potentials:",formatter); } diff --git a/gtsam/discrete/DiscreteBayesNet.cpp b/gtsam/discrete/DiscreteBayesNet.cpp index 98a61f7bd..f0a32ed64 100644 --- a/gtsam/discrete/DiscreteBayesNet.cpp +++ b/gtsam/discrete/DiscreteBayesNet.cpp @@ -18,13 +18,13 @@ #include #include -#include +#include #include namespace gtsam { // Explicitly instantiate so we don't have to include everywhere - template class BayesNet ; + template class BayesNetOrdered ; /* ************************************************************************* */ void add_front(DiscreteBayesNet& bayesNet, const Signature& s) { diff --git a/gtsam/discrete/DiscreteBayesNet.h b/gtsam/discrete/DiscreteBayesNet.h index 9c30d800a..aa7692142 100644 --- a/gtsam/discrete/DiscreteBayesNet.h +++ b/gtsam/discrete/DiscreteBayesNet.h @@ -20,12 +20,12 @@ #include #include #include -#include +#include #include namespace gtsam { - typedef BayesNet DiscreteBayesNet; + typedef BayesNetOrdered DiscreteBayesNet; /** Add a DiscreteCondtional */ GTSAM_EXPORT void add(DiscreteBayesNet&, const Signature& s); diff --git a/gtsam/discrete/DiscreteConditional.cpp b/gtsam/discrete/DiscreteConditional.cpp index ebf8c0c04..e0afd6354 100644 --- a/gtsam/discrete/DiscreteConditional.cpp +++ b/gtsam/discrete/DiscreteConditional.cpp @@ -37,14 +37,14 @@ namespace gtsam { /* ******************************************************************************** */ DiscreteConditional::DiscreteConditional(const size_t nrFrontals, const DecisionTreeFactor& f) : - IndexConditional(f.keys(), nrFrontals), Potentials( + IndexConditionalOrdered(f.keys(), nrFrontals), Potentials( f / (*f.sum(nrFrontals))) { } /* ******************************************************************************** */ DiscreteConditional::DiscreteConditional(const DecisionTreeFactor& joint, const DecisionTreeFactor& marginal) : - IndexConditional(joint.keys(), joint.size() - marginal.size()), Potentials( + IndexConditionalOrdered(joint.keys(), joint.size() - marginal.size()), Potentials( ISDEBUG("DiscreteConditional::COUNT") ? joint : joint / marginal) { if (ISDEBUG("DiscreteConditional::DiscreteConditional")) cout << (firstFrontalKey()) << endl; //TODO Print all keys @@ -52,20 +52,20 @@ namespace gtsam { /* ******************************************************************************** */ DiscreteConditional::DiscreteConditional(const Signature& signature) : - IndexConditional(signature.indices(), 1), Potentials( + IndexConditionalOrdered(signature.indices(), 1), Potentials( signature.discreteKeysParentsFirst(), signature.cpt()) { } /* ******************************************************************************** */ void DiscreteConditional::print(const std::string& s, const IndexFormatter& formatter) const { std::cout << s << std::endl; - IndexConditional::print(s, formatter); + IndexConditionalOrdered::print(s, formatter); Potentials::print(s); } /* ******************************************************************************** */ bool DiscreteConditional::equals(const DiscreteConditional& other, double tol) const { - return IndexConditional::equals(other, tol) + return IndexConditionalOrdered::equals(other, tol) && Potentials::equals(other, tol); } @@ -197,7 +197,7 @@ namespace gtsam { /* ******************************************************************************** */ void DiscreteConditional::permuteWithInverse(const Permutation& inversePermutation){ - IndexConditional::permuteWithInverse(inversePermutation); + IndexConditionalOrdered::permuteWithInverse(inversePermutation); Potentials::permuteWithInverse(inversePermutation); } diff --git a/gtsam/discrete/DiscreteConditional.h b/gtsam/discrete/DiscreteConditional.h index a14fa9ae7..3b46542ef 100644 --- a/gtsam/discrete/DiscreteConditional.h +++ b/gtsam/discrete/DiscreteConditional.h @@ -20,7 +20,7 @@ #include #include -#include +#include #include #include @@ -30,13 +30,13 @@ namespace gtsam { * Discrete Conditional Density * Derives from DecisionTreeFactor */ - class GTSAM_EXPORT DiscreteConditional: public IndexConditional, public Potentials { + class GTSAM_EXPORT DiscreteConditional: public IndexConditionalOrdered, public Potentials { public: // typedefs needed to play nice with gtsam typedef DiscreteFactor FactorType; typedef boost::shared_ptr shared_ptr; - typedef IndexConditional Base; + typedef IndexConditionalOrdered Base; /** A map from keys to values */ typedef Assignment Values; diff --git a/gtsam/discrete/DiscreteFactor.h b/gtsam/discrete/DiscreteFactor.h index 6adb1278b..a9a1ac0cf 100644 --- a/gtsam/discrete/DiscreteFactor.h +++ b/gtsam/discrete/DiscreteFactor.h @@ -19,7 +19,7 @@ #pragma once #include -#include +#include namespace gtsam { @@ -30,7 +30,7 @@ namespace gtsam { * Base class for discrete probabilistic factors * The most general one is the derived DecisionTreeFactor */ - class GTSAM_EXPORT DiscreteFactor: public IndexFactor { + class GTSAM_EXPORT DiscreteFactor: public IndexFactorOrdered { public: @@ -47,23 +47,23 @@ namespace gtsam { /// Construct n-way factor DiscreteFactor(const std::vector& js) : - IndexFactor(js) { + IndexFactorOrdered(js) { } /// Construct unary factor DiscreteFactor(Index j) : - IndexFactor(j) { + IndexFactorOrdered(j) { } /// Construct binary factor DiscreteFactor(Index j1, Index j2) : - IndexFactor(j1, j2) { + IndexFactorOrdered(j1, j2) { } /// construct from container template DiscreteFactor(KeyIterator beginKey, KeyIterator endKey) : - IndexFactor(beginKey, endKey) { + IndexFactorOrdered(beginKey, endKey) { } public: @@ -85,7 +85,7 @@ namespace gtsam { virtual void print(const std::string& s = "DiscreteFactor\n", const IndexFormatter& formatter =DefaultIndexFormatter) const { - IndexFactor::print(s,formatter); + IndexFactorOrdered::print(s,formatter); } /// @} @@ -105,14 +105,14 @@ namespace gtsam { * to already be inverted. */ virtual void permuteWithInverse(const Permutation& inversePermutation){ - IndexFactor::permuteWithInverse(inversePermutation); + IndexFactorOrdered::permuteWithInverse(inversePermutation); } /** * Apply a reduction, which is a remapping of variable indices. */ virtual void reduceWithInverse(const internal::Reduction& inverseReduction) { - IndexFactor::reduceWithInverse(inverseReduction); + IndexFactorOrdered::reduceWithInverse(inverseReduction); } /// @} diff --git a/gtsam/discrete/DiscreteFactorGraph.cpp b/gtsam/discrete/DiscreteFactorGraph.cpp index 5c4d815fb..642a87eca 100644 --- a/gtsam/discrete/DiscreteFactorGraph.cpp +++ b/gtsam/discrete/DiscreteFactorGraph.cpp @@ -19,14 +19,14 @@ //#define ENABLE_TIMING #include #include -#include +#include #include namespace gtsam { // Explicitly instantiate so we don't have to include everywhere - template class FactorGraph ; - template class EliminationTree ; + template class FactorGraphOrdered ; + template class EliminationTreeOrdered ; /* ************************************************************************* */ DiscreteFactorGraph::DiscreteFactorGraph() { @@ -34,8 +34,8 @@ namespace gtsam { /* ************************************************************************* */ DiscreteFactorGraph::DiscreteFactorGraph( - const BayesNet& bayesNet) : - FactorGraph(bayesNet) { + const BayesNetOrdered& bayesNet) : + FactorGraphOrdered(bayesNet) { } /* ************************************************************************* */ @@ -95,7 +95,7 @@ namespace gtsam { /* ************************************************************************* */ std::pair // - EliminateDiscrete(const FactorGraph& factors, size_t num) { + EliminateDiscrete(const FactorGraphOrdered& factors, size_t num) { // PRODUCT: multiply all factors gttic(product); diff --git a/gtsam/discrete/DiscreteFactorGraph.h b/gtsam/discrete/DiscreteFactorGraph.h index 7078662f7..b2da2d519 100644 --- a/gtsam/discrete/DiscreteFactorGraph.h +++ b/gtsam/discrete/DiscreteFactorGraph.h @@ -20,13 +20,13 @@ #include #include -#include +#include #include #include namespace gtsam { -class DiscreteFactorGraph: public FactorGraph { +class DiscreteFactorGraph: public FactorGraphOrdered { public: /** A map from keys to values */ @@ -39,12 +39,12 @@ public: /** Constructor from a factor graph of GaussianFactor or a derived type */ template - DiscreteFactorGraph(const FactorGraph& fg) { + DiscreteFactorGraph(const FactorGraphOrdered& fg) { push_back(fg); } /** construct from a BayesNet */ - GTSAM_EXPORT DiscreteFactorGraph(const BayesNet& bayesNet); + GTSAM_EXPORT DiscreteFactorGraph(const BayesNetOrdered& bayesNet); template void add(const DiscreteKey& j, SOURCE table) { @@ -90,7 +90,7 @@ public: /** Main elimination function for DiscreteFactorGraph */ GTSAM_EXPORT std::pair, DecisionTreeFactor::shared_ptr> -EliminateDiscrete(const FactorGraph& factors, +EliminateDiscrete(const FactorGraphOrdered& factors, size_t nrFrontals = 1); } // namespace gtsam diff --git a/gtsam/discrete/DiscreteMarginals.h b/gtsam/discrete/DiscreteMarginals.h index 411e4a4dd..c55f5956a 100644 --- a/gtsam/discrete/DiscreteMarginals.h +++ b/gtsam/discrete/DiscreteMarginals.h @@ -32,7 +32,7 @@ namespace gtsam { protected: - BayesTree bayesTree_; + BayesTreeOrdered bayesTree_; public: @@ -40,7 +40,7 @@ namespace gtsam { * @param graph The factor graph defining the full joint density on all variables. */ DiscreteMarginals(const DiscreteFactorGraph& graph) { - typedef JunctionTree DiscreteJT; + typedef JunctionTreeOrdered DiscreteJT; GenericMultifrontalSolver solver(graph); bayesTree_ = *solver.eliminate(&EliminateDiscrete); } diff --git a/gtsam/discrete/DiscreteSequentialSolver.h b/gtsam/discrete/DiscreteSequentialSolver.h index 36cf25545..8c007968e 100644 --- a/gtsam/discrete/DiscreteSequentialSolver.h +++ b/gtsam/discrete/DiscreteSequentialSolver.h @@ -47,7 +47,7 @@ namespace gtsam { * Construct the solver for a factor graph. This builds the elimination * tree, which already does some of the work of elimination. */ - DiscreteSequentialSolver(const FactorGraph& factorGraph) : + DiscreteSequentialSolver(const FactorGraphOrdered& factorGraph) : Base(factorGraph) { } @@ -57,12 +57,12 @@ namespace gtsam { * is the fastest. */ DiscreteSequentialSolver( - const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex) : + const FactorGraphOrdered::shared_ptr& factorGraph, + const VariableIndexOrdered::shared_ptr& variableIndex) : Base(factorGraph, variableIndex) { } - const EliminationTree& eliminationTree() const { + const EliminationTreeOrdered& eliminationTree() const { return *eliminationTree_; } @@ -70,7 +70,7 @@ namespace gtsam { * Eliminate the factor graph sequentially. Uses a column elimination tree * to recursively eliminate. */ - BayesNet::shared_ptr eliminate() const { + BayesNetOrdered::shared_ptr eliminate() const { return Base::eliminate(&EliminateDiscrete); } diff --git a/gtsam/discrete/Potentials.h b/gtsam/discrete/Potentials.h index 33e266c91..1f190da82 100644 --- a/gtsam/discrete/Potentials.h +++ b/gtsam/discrete/Potentials.h @@ -19,7 +19,7 @@ #include #include -#include +#include #include #include diff --git a/gtsam/discrete/tests/testDiscreteBayesTree.cpp b/gtsam/discrete/tests/testDiscreteBayesTree.cpp index e3a771940..eb8d3b3a6 100644 --- a/gtsam/discrete/tests/testDiscreteBayesTree.cpp +++ b/gtsam/discrete/tests/testDiscreteBayesTree.cpp @@ -17,7 +17,7 @@ #include #include -#include +#include #include using namespace boost::assign; @@ -32,13 +32,13 @@ static bool debug = false; /** * Custom clique class to debug shortcuts */ -class Clique: public BayesTreeCliqueBase { +class Clique: public BayesTreeCliqueBaseOrdered { protected: public: - typedef BayesTreeCliqueBase Base; + typedef BayesTreeCliqueBaseOrdered Base; typedef boost::shared_ptr shared_ptr; // Constructors @@ -56,7 +56,7 @@ public: /// print index signature only void printSignature(const std::string& s = "Clique: ", const IndexFormatter& indexFormatter = DefaultIndexFormatter) const { - ((IndexConditional::shared_ptr) conditional_)->print(s, indexFormatter); + ((IndexConditionalOrdered::shared_ptr) conditional_)->print(s, indexFormatter); } /// evaluate value of sub-tree @@ -70,7 +70,7 @@ public: }; -typedef BayesTree DiscreteBayesTree; +typedef BayesTreeOrdered DiscreteBayesTree; /* ************************************************************************* */ double evaluate(const DiscreteBayesTree& tree, diff --git a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp index f1b29a757..e30d3bb61 100644 --- a/gtsam/discrete/tests/testDiscreteFactorGraph.cpp +++ b/gtsam/discrete/tests/testDiscreteFactorGraph.cpp @@ -60,7 +60,7 @@ TEST_UNSAFE( DiscreteFactorGraph, debugScheduler) { // DiscreteSequentialSolver solver(graph); // solver.print("solver:"); - EliminationTree eliminationTree = solver.eliminationTree(); + EliminationTreeOrdered eliminationTree = solver.eliminationTree(); // eliminationTree.print("Elimination tree: "); eliminationTree.eliminate(EliminateDiscrete); // solver.optimize(); @@ -155,7 +155,7 @@ TEST( DiscreteFactorGraph, test) // GTSAM_PRINT(expected); // Test elimination tree - const EliminationTree& etree = solver.eliminationTree(); + const EliminationTreeOrdered& etree = solver.eliminationTree(); DiscreteBayesNet::shared_ptr actual = etree.eliminate(&EliminateDiscrete); EXPECT(assert_equal(expected, *actual)); @@ -221,16 +221,16 @@ TEST( DiscreteFactorGraph, testMPE_Darwiche09book_p244) // EXPECT_DOUBLES_EQUAL(0.4, (*root)(*actualMPE), 1e-9); // Let us create the Bayes tree here, just for fun, because we don't use it now - typedef JunctionTree JT; + typedef JunctionTreeOrdered JT; GenericMultifrontalSolver solver(graph); - BayesTree::shared_ptr bayesTree = solver.eliminate(&EliminateDiscrete); + BayesTreeOrdered::shared_ptr bayesTree = solver.eliminate(&EliminateDiscrete); // bayesTree->print("Bayes Tree"); EXPECT_LONGS_EQUAL(2,bayesTree->size()); #ifdef OLD // Create the elimination tree manually -VariableIndex structure(graph); -typedef EliminationTree ETree; +VariableIndexOrdered structure(graph); +typedef EliminationTreeOrdered ETree; ETree::shared_ptr eTree = ETree::Create(graph, structure); //eTree->print(">>>>>>>>>>> Elimination Tree <<<<<<<<<<<<<<<<<"); diff --git a/gtsam/discrete/tests/testDiscreteMarginals.cpp b/gtsam/discrete/tests/testDiscreteMarginals.cpp index 5ca26200f..a66ee0de2 100644 --- a/gtsam/discrete/tests/testDiscreteMarginals.cpp +++ b/gtsam/discrete/tests/testDiscreteMarginals.cpp @@ -119,9 +119,9 @@ TEST_UNSAFE( DiscreteMarginals, truss ) { graph.add(key[0] & key[2] & key[4],"2 2 2 2 1 1 1 1"); graph.add(key[1] & key[3] & key[4],"1 1 1 1 2 2 2 2"); graph.add(key[2] & key[3] & key[4],"1 1 1 1 1 1 1 1"); - typedef JunctionTree JT; + typedef JunctionTreeOrdered JT; GenericMultifrontalSolver solver(graph); - BayesTree::shared_ptr bayesTree = solver.eliminate(&EliminateDiscrete); + BayesTreeOrdered::shared_ptr bayesTree = solver.eliminate(&EliminateDiscrete); // bayesTree->print("Bayes Tree"); typedef BayesTreeClique Clique; diff --git a/gtsam/dllexport.h b/gtsam/dllexport.h deleted file mode 100644 index 0e327fc14..000000000 --- a/gtsam/dllexport.h +++ /dev/null @@ -1,36 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file dllexport.h - * @brief Symbols for exporting classes and methods from DLLs - * @author Richard Roberts - * @date Mar 9, 2013 - */ - -#ifdef _WIN32 -# ifdef GTSAM_EXPORTS -# define GTSAM_EXPORT __declspec(dllexport) -# define GTSAM_EXTERN_EXPORT __declspec(dllexport) extern -# else -# ifndef GTSAM_IMPORT_STATIC -# define GTSAM_EXPORT __declspec(dllimport) -# define GTSAM_EXTERN_EXPORT __declspec(dllimport) -# else /* GTSAM_IMPORT_STATIC */ -# define GTSAM_EXPORT -# define GTSAM_EXTERN_EXPORT extern -# endif /* GTSAM_IMPORT_STATIC */ -# endif /* GTSAM_EXPORTS */ -#else /* _WIN32 */ -# define GTSAM_EXPORT -# define GTSAM_EXTERN_EXPORT extern -#endif - diff --git a/gtsam/inference/.cvsignore b/gtsam/inference/.cvsignore deleted file mode 100644 index 67155973c..000000000 --- a/gtsam/inference/.cvsignore +++ /dev/null @@ -1,43 +0,0 @@ -*.lo -testPoint2 -testSimulated2D -testCal3_S2 -testChordalBayesNet -testNonlinearFactorGraph -testLinearFactor -testConditionalGaussian -testPose3 -testFactorgraph -testSimulated3D -testFGConfig -testNonlinearFactor -testPoint3 -testMatrix -testVector -testRot3 -testLinearFactorGraph -testVSLAMFactor -config.h -html -config.h.in -stamp-h1 -Makefile -Makefile.in -libgtsam.la -simple_svd_test -simple_ublas_resize_test -simple_ublas_resize_test.cpp -test_svdcmp.cpp -.deps -.libs -resize_test -testConstrainedChordalBayesNet -testConstrainedLinearFactorGraph -testConstrainedNonlinearFactorGraph -testDeltaFunction -testUnconstrainedLinearFactorGraph -testUnconstrainedNonlinearFactorGraph -testEqualityFactor -.DS_Store -testSimpleCamera -testCalibratedCamera diff --git a/gtsam/inference/BayesNet-inl.h b/gtsam/inference/BayesNet-inl.h deleted file mode 100644 index f0e26c98b..000000000 --- a/gtsam/inference/BayesNet-inl.h +++ /dev/null @@ -1,192 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file BayesNet-inl.h - * @brief Bayes net template definitions - * @author Frank Dellaert - */ - -#pragma once - -#include - -#include -#include -#include - -#include // for += -using boost::assign::operator+=; - -#include -#include -#include - -namespace gtsam { - - /* ************************************************************************* */ - template - void BayesNet::print(const std::string& s, - const IndexFormatter& formatter) const { - std::cout << s; - BOOST_REVERSE_FOREACH(sharedConditional conditional, conditionals_) - conditional->print("Conditional", formatter); - } - - /* ************************************************************************* */ - template - bool BayesNet::equals(const BayesNet& cbn, double tol) const { - if (size() != cbn.size()) - return false; - return std::equal(conditionals_.begin(), conditionals_.end(), - cbn.conditionals_.begin(), equals_star(tol)); - } - - /* ************************************************************************* */ - template - void BayesNet::printStats(const std::string& s) const { - - const size_t n = conditionals_.size(); - size_t max_size = 0; - size_t total = 0; - BOOST_REVERSE_FOREACH(sharedConditional conditional, conditionals_) { - max_size = std::max(max_size, conditional->size()); - total += conditional->size(); - } - std::cout << s - << "maximum conditional size = " << max_size << std::endl - << "average conditional size = " << total / n << std::endl - << "density = " << 100.0 * total / (double) (n*(n+1)/2) << " %" << std::endl; - } - - /* ************************************************************************* */ - template - void BayesNet::saveGraph(const std::string &s, - const IndexFormatter& indexFormatter) const { - std::ofstream of(s.c_str()); - of << "digraph G{\n"; - - BOOST_REVERSE_FOREACH(typename CONDITIONAL::shared_ptr conditional, conditionals_) { - typename CONDITIONAL::Frontals frontals = conditional->frontals(); - Index me = frontals.front(); - typename CONDITIONAL::Parents parents = conditional->parents(); - BOOST_FOREACH(Index p, parents) - of << p << "->" << me << std::endl; - } - - of << "}"; - of.close(); - } - - /* ************************************************************************* */ - template - typename BayesNet::const_iterator BayesNet::find( - Index key) const { - for (const_iterator it = begin(); it != end(); ++it) - if (std::find((*it)->beginFrontals(), (*it)->endFrontals(), key) - != (*it)->endFrontals()) - return it; - return end(); - } - - /* ************************************************************************* */ - template - typename BayesNet::iterator BayesNet::find( - Index key) { - for (iterator it = begin(); it != end(); ++it) - if (std::find((*it)->beginFrontals(), (*it)->endFrontals(), key) - != (*it)->endFrontals()) - return it; - return end(); - } - - /* ************************************************************************* */ - template - void BayesNet::permuteWithInverse( - const Permutation& inversePermutation) { - BOOST_FOREACH(sharedConditional conditional, conditionals_) { - conditional->permuteWithInverse(inversePermutation); - } - } - - /* ************************************************************************* */ - template - void BayesNet::push_back(const BayesNet& bn) { - BOOST_FOREACH(sharedConditional conditional,bn.conditionals_) - push_back(conditional); - } - - /* ************************************************************************* */ - template - void BayesNet::push_front(const BayesNet& bn) { - BOOST_FOREACH(sharedConditional conditional,bn.conditionals_) - push_front(conditional); - } - - /* ************************************************************************* */ - template - void BayesNet::popLeaf(iterator conditional) { -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - BOOST_FOREACH(typename CONDITIONAL::shared_ptr checkConditional, conditionals_) { - BOOST_FOREACH(Index key, (*conditional)->frontals()) { - if(std::find(checkConditional->beginParents(), checkConditional->endParents(), key) != checkConditional->endParents()) - throw std::invalid_argument( - "Debug mode exception: in BayesNet::popLeaf, the requested conditional is not a leaf."); - } - } -#endif - conditionals_.erase(conditional); - } - - /* ************************************************************************* */ - template - FastList BayesNet::ordering() const { - FastList ord; - BOOST_FOREACH(sharedConditional conditional,conditionals_) - ord.insert(ord.begin(), conditional->beginFrontals(), - conditional->endFrontals()); - return ord; - } - -// /* ************************************************************************* */ -// template -// void BayesNet::saveGraph(const std::string &s) const { -// ofstream of(s.c_str()); -// of<< "digraph G{\n"; -// BOOST_FOREACH(const_sharedConditional conditional,conditionals_) { -// Index child = conditional->key(); -// BOOST_FOREACH(Index parent, conditional->parents()) { -// of << parent << "->" << child << endl; -// } -// } -// of<<"}"; -// of.close(); -// } -// - /* ************************************************************************* */ - - template - typename BayesNet::sharedConditional BayesNet::operator[]( - Index key) const { - BOOST_FOREACH(typename CONDITIONAL::shared_ptr conditional, conditionals_) { - typename CONDITIONAL::const_iterator it = std::find( - conditional->beginFrontals(), conditional->endFrontals(), key); - if (it != conditional->endFrontals()) { - return conditional; - } - } - throw(std::invalid_argument( - (boost::format("BayesNet::operator['%1%']: not found") % key).str())); - } - -/* ************************************************************************* */ - -} // namespace gtsam diff --git a/gtsam/inference/BayesNet-inst.h b/gtsam/inference/BayesNet-inst.h new file mode 100644 index 000000000..db9c29519 --- /dev/null +++ b/gtsam/inference/BayesNet-inst.h @@ -0,0 +1,55 @@ +/* ---------------------------------------------------------------------------- + +* GTSAM Copyright 2010, Georgia Tech Research Corporation, +* Atlanta, Georgia 30332-0415 +* All Rights Reserved +* Authors: Frank Dellaert, et al. (see THANKS for the full author list) + +* See LICENSE for the license information + +* -------------------------------------------------------------------------- */ + +/** +* @file BayesNet.h +* @brief Bayes network +* @author Frank Dellaert +* @author Richard Roberts +*/ + +#pragma once + +#include +#include + +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + void BayesNet::print(const std::string& s, const KeyFormatter& formatter) const + { + Base::print(s, formatter); + } + + /* ************************************************************************* */ + template + void BayesNet::saveGraph(const std::string &s, const KeyFormatter& keyFormatter) const + { + std::ofstream of(s.c_str()); + of << "digraph G{\n"; + + BOOST_REVERSE_FOREACH(const sharedConditional& conditional, *this) { + typename CONDITIONAL::Frontals frontals = conditional->frontals(); + Key me = frontals.front(); + typename CONDITIONAL::Parents parents = conditional->parents(); + BOOST_FOREACH(Key p, parents) + of << p << "->" << me << std::endl; + } + + of << "}"; + of.close(); + } + +} diff --git a/gtsam/inference/BayesNet.h b/gtsam/inference/BayesNet.h index ad39da301..62072ee18 100644 --- a/gtsam/inference/BayesNet.h +++ b/gtsam/inference/BayesNet.h @@ -1,244 +1,72 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file BayesNet.h - * @brief Bayes network - * @author Frank Dellaert - */ - -// \callgraph - -#pragma once - -#include -#include -#include -#include -#include - -#include -#include -#include - -namespace gtsam { - -/** - * A BayesNet is a list of conditionals, stored in elimination order, i.e. - * leaves first, parents last. GaussianBayesNet and SymbolicBayesNet are - * defined as typedefs of this class, using GaussianConditional and - * IndexConditional as the CONDITIONAL template argument. - * - * todo: Symbolic using Index is a misnomer. - * todo: how to handle Bayes nets with an optimize function? Currently using global functions. - * \nosubgrouping - */ -template -class BayesNet { - -public: - - typedef typename boost::shared_ptr > shared_ptr; - - /** We store shared pointers to Conditional densities */ - typedef typename CONDITIONAL::KeyType KeyType; - typedef typename boost::shared_ptr sharedConditional; - typedef typename boost::shared_ptr const_sharedConditional; - typedef typename std::list Conditionals; - - typedef typename Conditionals::iterator iterator; - typedef typename Conditionals::reverse_iterator reverse_iterator; - typedef typename Conditionals::const_iterator const_iterator; - typedef typename Conditionals::const_reverse_iterator const_reverse_iterator; - -protected: - - /** - * Conditional densities are stored in reverse topological sort order (i.e., leaves first, - * parents last), which corresponds to the elimination ordering if so obtained, - * and is consistent with the column (block) ordering of an upper triangular matrix. - */ - Conditionals conditionals_; - -public: - - /// @name Standard Constructors - /// @{ - - /** Default constructor as an empty BayesNet */ - BayesNet() {}; - - /** convert from a derived type */ - template - BayesNet(const BayesNet& bn) { - conditionals_.assign(bn.begin(), bn.end()); - } - - /** BayesNet with 1 conditional */ - explicit BayesNet(const sharedConditional& conditional) { push_back(conditional); } - - /// @} - /// @name Testable - /// @{ - - /** print */ - void print(const std::string& s = "", - const IndexFormatter& formatter = DefaultIndexFormatter) const; - - /** check equality */ - bool equals(const BayesNet& other, double tol = 1e-9) const; - - /// @} - /// @name Standard Interface - /// @{ - - /** size is the number of nodes */ - size_t size() const { - return conditionals_.size(); - } - - /** @return true if there are no conditionals/nodes */ - bool empty() const { - return conditionals_.empty(); - } - - /** print statistics */ - void printStats(const std::string& s = "") const; - - /** save dot graph */ - void saveGraph(const std::string &s, const IndexFormatter& indexFormatter = - DefaultIndexFormatter) const; - - /** return keys in reverse topological sort order, i.e., elimination order */ - FastList ordering() const; - - /** SLOW O(n) random access to Conditional by key */ - sharedConditional operator[](Index key) const; - - /** return last node in ordering */ - boost::shared_ptr front() const { return conditionals_.front(); } - - /** return last node in ordering */ - boost::shared_ptr back() const { return conditionals_.back(); } - - /** return iterators. FD: breaks encapsulation? */ - const_iterator begin() const {return conditionals_.begin();} ///& bn); - - /// push_front an entire Bayes net - void push_front(const BayesNet& bn); - - /** += syntax for push_back, e.g. bayesNet += c1, c2, c3 - * @param conditional The conditional to add to the back of the BayesNet - */ - boost::assign::list_inserter >, sharedConditional> - operator+=(const sharedConditional& conditional) { - return boost::assign::make_list_inserter(boost::assign_detail::call_push_back >(*this))(conditional); } - - /** - * pop_front: remove node at the bottom, used in marginalization - * For example P(ABC)=P(A|BC)P(B|C)P(C) becomes P(BC)=P(B|C)P(C) - */ - void pop_front() {conditionals_.pop_front();} - - /** Permute the variables in the BayesNet */ - void permuteWithInverse(const Permutation& inversePermutation); - - iterator begin() {return conditionals_.begin();} /// - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_NVP(conditionals_); - } - - /// @} -}; // BayesNet - -} // namespace gtsam - -#include +/* ---------------------------------------------------------------------------- + +* GTSAM Copyright 2010, Georgia Tech Research Corporation, +* Atlanta, Georgia 30332-0415 +* All Rights Reserved +* Authors: Frank Dellaert, et al. (see THANKS for the full author list) + +* See LICENSE for the license information + +* -------------------------------------------------------------------------- */ + +/** +* @file BayesNet.h +* @brief Bayes network +* @author Frank Dellaert +* @author Richard Roberts +*/ + +#pragma once + +#include + +#include + +namespace gtsam { + + /** + * A BayesNet is a tree of conditionals, stored in elimination order. + * + * todo: how to handle Bayes nets with an optimize function? Currently using global functions. + * \nosubgrouping + */ + template + class BayesNet : public FactorGraph { + + private: + + typedef FactorGraph Base; + + public: + typedef typename boost::shared_ptr sharedConditional; ///< A shared pointer to a conditional + + protected: + /// @name Standard Constructors + /// @{ + + /** Default constructor as an empty BayesNet */ + BayesNet() {}; + + /** Construct from iterator over conditionals */ + template + BayesNet(ITERATOR firstConditional, ITERATOR lastConditional) : Base(firstConditional, lastConditional) {} + + /// @} + + public: + /// @name Testable + /// @{ + + /** print out graph */ + void print(const std::string& s = "BayesNet", + const KeyFormatter& formatter = DefaultKeyFormatter) const; + + /// @} + + /// @name Standard Interface + /// @{ + + void saveGraph(const std::string &s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + }; + +} \ No newline at end of file diff --git a/gtsam/inference/BayesTree-inl.h b/gtsam/inference/BayesTree-inl.h deleted file mode 100644 index d5592ed2a..000000000 --- a/gtsam/inference/BayesTree-inl.h +++ /dev/null @@ -1,804 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file BayesTree-inl.h - * @brief Bayes Tree is a tree of cliques of a Bayes Chain - * @author Frank Dellaert - * @author Michael Kaess - * @author Viorela Ila - * @author Richard Roberts - */ - -#pragma once - -#include -#include -#include -#include -#include -#include - -#include -#include -#include - -#include -#include // for operator += -using boost::assign::operator+=; -#include - -namespace gtsam { - - /* ************************************************************************* */ - template - typename BayesTree::CliqueData - BayesTree::getCliqueData() const { - CliqueData data; - getCliqueData(data, root_); - return data; - } - - /* ************************************************************************* */ - template - void BayesTree::getCliqueData(CliqueData& data, sharedClique clique) const { - data.conditionalSizes.push_back((*clique)->nrFrontals()); - data.separatorSizes.push_back((*clique)->nrParents()); - BOOST_FOREACH(sharedClique c, clique->children_) { - getCliqueData(data, c); - } - } - - /* ************************************************************************* */ - template - size_t BayesTree::numCachedSeparatorMarginals() const { - return (root_) ? root_->numCachedSeparatorMarginals() : 0; - } - - /* ************************************************************************* */ - template - void BayesTree::saveGraph(const std::string &s, const IndexFormatter& indexFormatter) const { - if (!root_.get()) throw std::invalid_argument("the root of Bayes tree has not been initialized!"); - std::ofstream of(s.c_str()); - of<< "digraph G{\n"; - saveGraph(of, root_, indexFormatter); - of<<"}"; - of.close(); - } - - /* ************************************************************************* */ - template - void BayesTree::saveGraph(std::ostream &s, sharedClique clique, const IndexFormatter& indexFormatter, int parentnum) const { - static int num = 0; - bool first = true; - std::stringstream out; - out << num; - std::string parent = out.str(); - parent += "[label=\""; - - BOOST_FOREACH(Index index, clique->conditional_->frontals()) { - if(!first) parent += ","; first = false; - parent += indexFormatter(index); - } - - if( clique != root_){ - parent += " : "; - s << parentnum << "->" << num << "\n"; - } - - first = true; - BOOST_FOREACH(Index sep, clique->conditional_->parents()) { - if(!first) parent += ","; first = false; - parent += indexFormatter(sep); - } - parent += "\"];\n"; - s << parent; - parentnum = num; - - BOOST_FOREACH(sharedClique c, clique->children_) { - num++; - saveGraph(s, c, indexFormatter, parentnum); - } - } - - - /* ************************************************************************* */ - template - void BayesTree::CliqueStats::print(const std::string& s) const { - std::cout << s - <<"avg Conditional Size: " << avgConditionalSize << std::endl - << "max Conditional Size: " << maxConditionalSize << std::endl - << "avg Separator Size: " << avgSeparatorSize << std::endl - << "max Separator Size: " << maxSeparatorSize << std::endl; - } - - /* ************************************************************************* */ - template - typename BayesTree::CliqueStats - BayesTree::CliqueData::getStats() const { - CliqueStats stats; - - double sum = 0.0; - size_t max = 0; - BOOST_FOREACH(size_t s, conditionalSizes) { - sum += (double)s; - if(s > max) max = s; - } - stats.avgConditionalSize = sum / (double)conditionalSizes.size(); - stats.maxConditionalSize = max; - - sum = 0.0; - max = 1; - BOOST_FOREACH(size_t s, separatorSizes) { - sum += (double)s; - if(s > max) max = s; - } - stats.avgSeparatorSize = sum / (double)separatorSizes.size(); - stats.maxSeparatorSize = max; - - return stats; - } - - /* ************************************************************************* */ - template - void BayesTree::Cliques::print(const std::string& s, const IndexFormatter& indexFormatter) const { - std::cout << s << ":\n"; - BOOST_FOREACH(sharedClique clique, *this) - clique->printTree("", indexFormatter); - } - - /* ************************************************************************* */ - template - bool BayesTree::Cliques::equals(const Cliques& other, double tol) const { - return other == *this; - } - - /* ************************************************************************* */ - template - typename BayesTree::sharedClique - BayesTree::addClique(const sharedConditional& conditional, const sharedClique& parent_clique) { - sharedClique new_clique(new Clique(conditional)); - addClique(new_clique, parent_clique); - return new_clique; - } - - /* ************************************************************************* */ - template - void BayesTree::addClique(const sharedClique& clique, const sharedClique& parent_clique) { - nodes_.resize(std::max((*clique)->lastFrontalKey()+1, nodes_.size())); - BOOST_FOREACH(Index j, (*clique)->frontals()) - nodes_[j] = clique; - if (parent_clique != NULL) { - clique->parent_ = parent_clique; - parent_clique->children_.push_back(clique); - } else { - assert(!root_); - root_ = clique; - } - clique->assertInvariants(); - } - - /* ************************************************************************* */ - template - typename BayesTree::sharedClique BayesTree::addClique( - const sharedConditional& conditional, std::list& child_cliques) { - sharedClique new_clique(new Clique(conditional)); - nodes_.resize(std::max(conditional->lastFrontalKey()+1, nodes_.size())); - BOOST_FOREACH(Index j, conditional->frontals()) - nodes_[j] = new_clique; - new_clique->children_ = child_cliques; - BOOST_FOREACH(sharedClique& child, child_cliques) - child->parent_ = new_clique; - new_clique->assertInvariants(); - return new_clique; - } - - /* ************************************************************************* */ - template - void BayesTree::permuteWithInverse(const Permutation& inversePermutation) { - // recursively permute the cliques and internal conditionals - if (root_) - root_->permuteWithInverse(inversePermutation); - - // need to know what the largest key is to get the right number of cliques - Index maxIndex = *std::max_element(inversePermutation.begin(), inversePermutation.end()); - - // Update the nodes structure - typename BayesTree::Nodes newNodes(maxIndex+1); -// inversePermutation.applyToCollection(newNodes, nodes_); // Uses the forward, rather than inverse permutation - for(size_t i = 0; i < nodes_.size(); ++i) - newNodes[inversePermutation[i]] = nodes_[i]; - - nodes_ = newNodes; - } - - /* ************************************************************************* */ - template - inline void BayesTree::addToCliqueFront(BayesTree& bayesTree, const sharedConditional& conditional, const sharedClique& clique) { - static const bool debug = false; -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - // Debug check to make sure the conditional variable is ordered lower than - // its parents and that all of its parents are present either in this - // clique or its separator. - BOOST_FOREACH(Index parent, conditional->parents()) { - assert(parent > conditional->lastFrontalKey()); - const Clique& cliquer(*clique); - assert(find(cliquer->begin(), cliquer->end(), parent) != cliquer->end()); - } -#endif - if(debug) conditional->print("Adding conditional "); - if(debug) clique->print("To clique "); - Index j = conditional->lastFrontalKey(); - bayesTree.nodes_.resize(std::max(j+1, bayesTree.nodes_.size())); - bayesTree.nodes_[j] = clique; - FastVector newIndices((*clique)->size() + 1); - newIndices[0] = j; - std::copy((*clique)->begin(), (*clique)->end(), newIndices.begin()+1); - clique->conditional_ = CONDITIONAL::FromKeys(newIndices, (*clique)->nrFrontals() + 1); - if(debug) clique->print("Expanded clique is "); - clique->assertInvariants(); - } - - /* ************************************************************************* */ - template - void BayesTree::removeClique(sharedClique clique) { - - if (clique->isRoot()) - root_.reset(); - else { // detach clique from parent - sharedClique parent = clique->parent_.lock(); - typename FastList::iterator child = std::find(parent->children().begin(), parent->children().end(), clique); - assert(child != parent->children().end()); - parent->children().erase(child); - } - - // orphan my children - BOOST_FOREACH(sharedClique child, clique->children_) - child->parent_ = typename Clique::weak_ptr(); - - BOOST_FOREACH(Index j, clique->conditional()->frontals()) { - nodes_[j].reset(); - } - } - - /* ************************************************************************* */ - template - void BayesTree::recursiveTreeBuild(const boost::shared_ptr >& symbolic, - const std::vector >& conditionals, - const typename BayesTree::sharedClique& parent) { - - // Helper function to build a non-symbolic tree (e.g. Gaussian) using a - // symbolic tree, used in the BT(BN) constructor. - - // Build the current clique - FastList cliqueConditionals; - BOOST_FOREACH(Index j, symbolic->conditional()->frontals()) { - cliqueConditionals.push_back(conditionals[j]); } - typename BayesTree::sharedClique thisClique(new CLIQUE(CONDITIONAL::Combine(cliqueConditionals.begin(), cliqueConditionals.end()))); - - // Add the new clique with the current parent - this->addClique(thisClique, parent); - - // Build the children, whose parent is the new clique - BOOST_FOREACH(const BayesTree::sharedClique& child, symbolic->children()) { - this->recursiveTreeBuild(child, conditionals, thisClique); } - } - - /* ************************************************************************* */ - template - BayesTree::BayesTree(const BayesNet& bayesNet) { - // First generate symbolic BT to determine clique structure - BayesTree sbt(bayesNet); - - // Build index of variables to conditionals - std::vector > conditionals(sbt.root()->conditional()->frontals().back() + 1); - BOOST_FOREACH(const boost::shared_ptr& c, bayesNet) { - if(c->nrFrontals() != 1) - throw std::invalid_argument("BayesTree constructor from BayesNet only supports single frontal variable conditionals"); - if(c->firstFrontalKey() >= conditionals.size()) - throw std::invalid_argument("An inconsistent BayesNet was passed into the BayesTree constructor!"); - if(conditionals[c->firstFrontalKey()]) - throw std::invalid_argument("An inconsistent BayesNet with duplicate frontal variables was passed into the BayesTree constructor!"); - - conditionals[c->firstFrontalKey()] = c; - } - - // Build the new tree - this->recursiveTreeBuild(sbt.root(), conditionals, sharedClique()); - } - - /* ************************************************************************* */ - template<> - inline BayesTree::BayesTree(const BayesNet& bayesNet) { - BayesNet::const_reverse_iterator rit; - for ( rit=bayesNet.rbegin(); rit != bayesNet.rend(); ++rit ) - insert(*this, *rit); - } - - /* ************************************************************************* */ - template - BayesTree::BayesTree(const BayesNet& bayesNet, std::list > subtrees) { - if (bayesNet.size() == 0) - throw std::invalid_argument("BayesTree::insert: empty bayes net!"); - - // get the roots of child subtrees and merge their nodes_ - std::list childRoots; - typedef BayesTree Tree; - BOOST_FOREACH(const Tree& subtree, subtrees) { - nodes_.assign(subtree.nodes_.begin(), subtree.nodes_.end()); - childRoots.push_back(subtree.root()); - } - - // create a new clique and add all the conditionals to the clique - sharedClique new_clique; - typename BayesNet::sharedConditional conditional; - BOOST_REVERSE_FOREACH(conditional, bayesNet) { - if (!new_clique.get()) - new_clique = addClique(conditional,childRoots); - else - addToCliqueFront(*this, conditional, new_clique); - } - - root_ = new_clique; - } - - /* ************************************************************************* */ - template - BayesTree::BayesTree(const This& other) { - *this = other; - } - - /* ************************************************************************* */ - template - BayesTree& BayesTree::operator=(const This& other) { - this->clear(); - other.cloneTo(*this); - return *this; - } - - /* ************************************************************************* */ - template - void BayesTree::print(const std::string& s, const IndexFormatter& indexFormatter) const { - if (root_.use_count() == 0) { - std::cout << "WARNING: BayesTree.print encountered a forest..." << std::endl; - return; - } - std::cout << s << ": clique size == " << size() << ", node size == " << nodes_.size() << std::endl; - if (nodes_.empty()) return; - root_->printTree("", indexFormatter); - } - - /* ************************************************************************* */ - // binary predicate to test equality of a pair for use in equals - template - bool check_sharedCliques( - const typename BayesTree::sharedClique& v1, - const typename BayesTree::sharedClique& v2 - ) { - return (!v1 && !v2) || (v1 && v2 && v1->equals(*v2)); - } - - /* ************************************************************************* */ - template - bool BayesTree::equals(const BayesTree& other, - double tol) const { - return size()==other.size() && - std::equal(nodes_.begin(), nodes_.end(), other.nodes_.begin(), &check_sharedCliques); - } - - /* ************************************************************************* */ - template - template - inline Index BayesTree::findParentClique(const CONTAINER& parents) const { - typename CONTAINER::const_iterator lowestOrderedParent = min_element(parents.begin(), parents.end()); - assert(lowestOrderedParent != parents.end()); - return *lowestOrderedParent; - } - - /* ************************************************************************* */ - template - void BayesTree::insert(BayesTree& bayesTree, const sharedConditional& conditional) - { - static const bool debug = false; - - // get indices and parents - const typename CONDITIONAL::Parents& parents = conditional->parents(); - - if(debug) conditional->print("Adding conditional "); - - // if no parents, start a new root clique - if (parents.empty()) { - if(debug) std::cout << "No parents so making root" << std::endl; - bayesTree.root_ = bayesTree.addClique(conditional); - return; - } - - // otherwise, find the parent clique by using the index data structure - // to find the lowest-ordered parent - Index parentRepresentative = bayesTree.findParentClique(parents); - if(debug) std::cout << "First-eliminated parent is " << parentRepresentative << ", have " << bayesTree.nodes_.size() << " nodes." << std::endl; - sharedClique parent_clique = bayesTree[parentRepresentative]; - if(debug) parent_clique->print("Parent clique is "); - - // if the parents and parent clique have the same size, add to parent clique - if ((*parent_clique)->size() == size_t(parents.size())) { - if(debug) std::cout << "Adding to parent clique" << std::endl; - addToCliqueFront(bayesTree, conditional, parent_clique); - } else { - if(debug) std::cout << "Starting new clique" << std::endl; - // otherwise, start a new clique and add it to the tree - bayesTree.addClique(conditional,parent_clique); - } - } - - /* ************************************************************************* */ - //TODO: remove this function after removing TSAM.cpp - template - typename BayesTree::sharedClique BayesTree::insert( - const sharedConditional& clique, std::list& children, bool isRootClique) { - - if (clique->nrFrontals() == 0) - throw std::invalid_argument("BayesTree::insert: empty clique!"); - - // create a new clique and add all the conditionals to the clique - sharedClique new_clique = addClique(clique, children); - if (isRootClique) root_ = new_clique; - - return new_clique; - } - - /* ************************************************************************* */ - template - void BayesTree::fillNodesIndex(const sharedClique& subtree) { - // Add each frontal variable of this root node - BOOST_FOREACH(const Index& j, subtree->conditional()->frontals()) { nodes_[j] = subtree; } - // Fill index for each child - typedef typename BayesTree::sharedClique sharedClique; - BOOST_FOREACH(const sharedClique& child, subtree->children_) { - fillNodesIndex(child); } - } - - /* ************************************************************************* */ - template - void BayesTree::insert(const sharedClique& subtree) { - if(subtree) { - // Find the parent clique of the new subtree. By the running intersection - // property, those separator variables in the subtree that are ordered - // lower than the highest frontal variable of the subtree root will all - // appear in the separator of the subtree root. - if(subtree->conditional()->parents().empty()) { - assert(!root_); - root_ = subtree; - } else { - Index parentRepresentative = findParentClique(subtree->conditional()->parents()); - sharedClique parent = (*this)[parentRepresentative]; - parent->children_ += subtree; - subtree->parent_ = parent; // set new parent! - } - - // Now fill in the nodes index - if(nodes_.size() == 0 || - *std::max_element(subtree->conditional()->beginFrontals(), subtree->conditional()->endFrontals()) > (nodes_.size() - 1)) { - nodes_.resize(subtree->conditional()->lastFrontalKey() + 1); - } - fillNodesIndex(subtree); - } - } - - /* ************************************************************************* */ - // First finds clique marginal then marginalizes that - /* ************************************************************************* */ - template - typename CONDITIONAL::FactorType::shared_ptr BayesTree::marginalFactor( - Index j, Eliminate function) const - { - gttic(BayesTree_marginalFactor); - - // get clique containing Index j - sharedClique clique = (*this)[j]; - - // calculate or retrieve its marginal P(C) = P(F,S) -#ifdef OLD_SHORTCUT_MARGINALS - FactorGraph cliqueMarginal = clique->marginal(root_,function); -#else - FactorGraph cliqueMarginal = clique->marginal2(root_,function); -#endif - - // Reduce the variable indices to start at zero - gttic(Reduce); - const Permutation reduction = internal::createReducingPermutation(cliqueMarginal.keys()); - internal::Reduction inverseReduction = internal::Reduction::CreateAsInverse(reduction); - BOOST_FOREACH(const boost::shared_ptr& factor, cliqueMarginal) { - if(factor) factor->reduceWithInverse(inverseReduction); } - gttoc(Reduce); - - // now, marginalize out everything that is not variable j - GenericSequentialSolver solver(cliqueMarginal); - boost::shared_ptr result = solver.marginalFactor(inverseReduction[j], function); - - // Undo the reduction - gttic(Undo_Reduce); - result->permuteWithInverse(reduction); - BOOST_FOREACH(const boost::shared_ptr& factor, cliqueMarginal) { - if(factor) factor->permuteWithInverse(reduction); } - gttoc(Undo_Reduce); - return result; - } - - /* ************************************************************************* */ - template - typename BayesNet::shared_ptr BayesTree::marginalBayesNet( - Index j, Eliminate function) const - { - gttic(BayesTree_marginalBayesNet); - - // calculate marginal as a factor graph - FactorGraph fg; - fg.push_back(this->marginalFactor(j,function)); - - // Reduce the variable indices to start at zero - gttic(Reduce); - const Permutation reduction = internal::createReducingPermutation(fg.keys()); - internal::Reduction inverseReduction = internal::Reduction::CreateAsInverse(reduction); - BOOST_FOREACH(const boost::shared_ptr& factor, fg) { - if(factor) factor->reduceWithInverse(inverseReduction); } - gttoc(Reduce); - - // eliminate factor graph marginal to a Bayes net - boost::shared_ptr > bn = GenericSequentialSolver(fg).eliminate(function); - - // Undo the reduction - gttic(Undo_Reduce); - bn->permuteWithInverse(reduction); - BOOST_FOREACH(const boost::shared_ptr& factor, fg) { - if(factor) factor->permuteWithInverse(reduction); } - gttoc(Undo_Reduce); - return bn; - } - - /* ************************************************************************* */ - // Find two cliques, their joint, then marginalizes - /* ************************************************************************* */ - template - typename FactorGraph::shared_ptr - BayesTree::joint(Index j1, Index j2, Eliminate function) const { - gttic(BayesTree_joint); - - // get clique C1 and C2 - sharedClique C1 = (*this)[j1], C2 = (*this)[j2]; - - gttic(Lowest_common_ancestor); - // Find lowest common ancestor clique - sharedClique B; { - // Build two paths to the root - FastList path1, path2; { - sharedClique p = C1; - while(p) { - path1.push_front(p); - p = p->parent(); - } - } { - sharedClique p = C2; - while(p) { - path2.push_front(p); - p = p->parent(); - } - } - // Find the path intersection - B = this->root(); - typename FastList::const_iterator p1 = path1.begin(), p2 = path2.begin(); - while(p1 != path1.end() && p2 != path2.end() && *p1 == *p2) { - B = *p1; - ++p1; - ++p2; - } - } - gttoc(Lowest_common_ancestor); - - // Compute marginal on lowest common ancestor clique - gttic(LCA_marginal); - FactorGraph p_B = B->marginal2(this->root(), function); - gttoc(LCA_marginal); - - // Compute shortcuts of the requested cliques given the lowest common ancestor - gttic(Clique_shortcuts); - BayesNet p_C1_Bred = C1->shortcut(B, function); - BayesNet p_C2_Bred = C2->shortcut(B, function); - gttoc(Clique_shortcuts); - - // Factor the shortcuts to be conditioned on the full root - // Get the set of variables to eliminate, which is C1\B. - gttic(Full_root_factoring); - sharedConditional p_C1_B; { - std::vector C1_minus_B; { - FastSet C1_minus_B_set(C1->conditional()->beginParents(), C1->conditional()->endParents()); - BOOST_FOREACH(const Index j, *B->conditional()) { - C1_minus_B_set.erase(j); } - C1_minus_B.assign(C1_minus_B_set.begin(), C1_minus_B_set.end()); - } - // Factor into C1\B | B. - FactorGraph temp_remaining; - boost::tie(p_C1_B, temp_remaining) = FactorGraph(p_C1_Bred).eliminate(C1_minus_B, function); - } - sharedConditional p_C2_B; { - std::vector C2_minus_B; { - FastSet C2_minus_B_set(C2->conditional()->beginParents(), C2->conditional()->endParents()); - BOOST_FOREACH(const Index j, *B->conditional()) { - C2_minus_B_set.erase(j); } - C2_minus_B.assign(C2_minus_B_set.begin(), C2_minus_B_set.end()); - } - // Factor into C2\B | B. - FactorGraph temp_remaining; - boost::tie(p_C2_B, temp_remaining) = FactorGraph(p_C2_Bred).eliminate(C2_minus_B, function); - } - gttoc(Full_root_factoring); - - gttic(Variable_joint); - // Build joint on all involved variables - FactorGraph p_BC1C2; - p_BC1C2.push_back(p_B); - p_BC1C2.push_back(p_C1_B->toFactor()); - p_BC1C2.push_back(p_C2_B->toFactor()); - if(C1 != B) - p_BC1C2.push_back(C1->conditional()->toFactor()); - if(C2 != B) - p_BC1C2.push_back(C2->conditional()->toFactor()); - - // Reduce the variable indices to start at zero - gttic(Reduce); - const Permutation reduction = internal::createReducingPermutation(p_BC1C2.keys()); - internal::Reduction inverseReduction = internal::Reduction::CreateAsInverse(reduction); - BOOST_FOREACH(const boost::shared_ptr& factor, p_BC1C2) { - if(factor) factor->reduceWithInverse(inverseReduction); } - std::vector js; js.push_back(inverseReduction[j1]); js.push_back(inverseReduction[j2]); - gttoc(Reduce); - - // now, marginalize out everything that is not variable j - GenericSequentialSolver solver(p_BC1C2); - boost::shared_ptr > result = solver.jointFactorGraph(js, function); - - // Undo the reduction - gttic(Undo_Reduce); - BOOST_FOREACH(const boost::shared_ptr& factor, *result) { - if(factor) factor->permuteWithInverse(reduction); } - BOOST_FOREACH(const boost::shared_ptr& factor, p_BC1C2) { - if(factor) factor->permuteWithInverse(reduction); } - gttoc(Undo_Reduce); - return result; - - } - - /* ************************************************************************* */ - template - typename BayesNet::shared_ptr BayesTree::jointBayesNet( - Index j1, Index j2, Eliminate function) const { - - // eliminate factor graph marginal to a Bayes net - return GenericSequentialSolver ( - *this->joint(j1, j2, function)).eliminate(function); - } - - /* ************************************************************************* */ - template - void BayesTree::clear() { - // Remove all nodes and clear the root pointer - nodes_.clear(); - root_.reset(); - } - - /* ************************************************************************* */ - template - void BayesTree::removePath(sharedClique clique, - BayesNet& bn, typename BayesTree::Cliques& orphans) { - - // base case is NULL, if so we do nothing and return empties above - if (clique!=NULL) { - - // remove the clique from orphans in case it has been added earlier - orphans.remove(clique); - - // remove me - this->removeClique(clique); - - // remove path above me - this->removePath(typename Clique::shared_ptr(clique->parent_.lock()), bn, orphans); - - // add children to list of orphans (splice also removed them from clique->children_) - orphans.splice(orphans.begin(), clique->children_); - - bn.push_back(clique->conditional()); - - } - } - - /* ************************************************************************* */ - template - template - void BayesTree::removeTop(const CONTAINER& keys, - BayesNet& bn, typename BayesTree::Cliques& orphans) { - - // process each key of the new factor - BOOST_FOREACH(const Index& j, keys) { - - // get the clique - if(j < nodes_.size()) { - const sharedClique& clique(nodes_[j]); - if(clique) { - // remove path from clique to root - this->removePath(clique, bn, orphans); - } - } - } - - // Delete cachedShortcuts for each orphan subtree - //TODO: Consider Improving - BOOST_FOREACH(sharedClique& orphan, orphans) - orphan->deleteCachedShortcuts(); - } - - /* ************************************************************************* */ - template - typename BayesTree::Cliques BayesTree::removeSubtree( - const sharedClique& subtree) - { - // Result clique list - Cliques cliques; - cliques.push_back(subtree); - - // Remove the first clique from its parents - if(!subtree->isRoot()) - subtree->parent()->children().remove(subtree); - else - root_.reset(); - - // Add all subtree cliques and erase the children and parent of each - for(typename Cliques::iterator clique = cliques.begin(); clique != cliques.end(); ++clique) - { - // Add children - BOOST_FOREACH(const sharedClique& child, (*clique)->children()) { - cliques.push_back(child); } - - // Delete cached shortcuts - (*clique)->deleteCachedShortcutsNonRecursive(); - - // Remove this node from the nodes index - BOOST_FOREACH(Index j, (*clique)->conditional()->frontals()) { - nodes_[j].reset(); } - - // Erase the parent and children pointers - (*clique)->parent_.reset(); - (*clique)->children_.clear(); - } - - return cliques; - } - - /* ************************************************************************* */ - template - void BayesTree::cloneTo(This& newTree) const { - if(root()) - cloneTo(newTree, root(), sharedClique()); - } - - /* ************************************************************************* */ - template - void BayesTree::cloneTo( - This& newTree, const sharedClique& subtree, const sharedClique& parent) const { - sharedClique newClique(subtree->clone()); - newTree.addClique(newClique, parent); - BOOST_FOREACH(const sharedClique& childClique, subtree->children()) { - cloneTo(newTree, childClique, newClique); - } - } - - /* ************************************************************************* */ - -} // \namespace gtsam diff --git a/gtsam/inference/BayesTree-inst.h b/gtsam/inference/BayesTree-inst.h new file mode 100644 index 000000000..4bbed8bf0 --- /dev/null +++ b/gtsam/inference/BayesTree-inst.h @@ -0,0 +1,509 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file BayesTree-inl.h + * @brief Bayes Tree is a tree of cliques of a Bayes Chain + * @author Frank Dellaert + * @author Michael Kaess + * @author Viorela Ila + * @author Richard Roberts + */ + +#pragma once + +#include +#include +#include +#include + +#include +#include +#include +#include + +using boost::assign::cref_list_of; + +namespace gtsam { + + /* ************************************************************************* */ + template + BayesTreeCliqueData BayesTree::getCliqueData() const { + BayesTreeCliqueData data; + BOOST_FOREACH(const sharedClique& root, roots_) + getCliqueData(data, root); + return data; + } + + /* ************************************************************************* */ + template + void BayesTree::getCliqueData(BayesTreeCliqueData& data, sharedClique clique) const { + data.conditionalSizes.push_back(clique->conditional()->nrFrontals()); + data.separatorSizes.push_back(clique->conditional()->nrParents()); + BOOST_FOREACH(sharedClique c, clique->children) { + getCliqueData(data, c); + } + } + + /* ************************************************************************* */ + template + size_t BayesTree::numCachedSeparatorMarginals() const { + size_t count = 0; + BOOST_FOREACH(const sharedClique& root, roots_) + count += root->numCachedSeparatorMarginals(); + return count; + } + + /* ************************************************************************* */ + template + void BayesTree::saveGraph(const std::string &s, const KeyFormatter& keyFormatter) const { + if (roots_.empty()) throw std::invalid_argument("the root of Bayes tree has not been initialized!"); + std::ofstream of(s.c_str()); + of<< "digraph G{\n"; + BOOST_FOREACH(const sharedClique& root, roots_) + saveGraph(of, root, keyFormatter); + of<<"}"; + of.close(); + } + + /* ************************************************************************* */ + template + void BayesTree::saveGraph(std::ostream &s, sharedClique clique, const KeyFormatter& indexFormatter, int parentnum) const { + static int num = 0; + bool first = true; + std::stringstream out; + out << num; + std::string parent = out.str(); + parent += "[label=\""; + + BOOST_FOREACH(Key index, clique->conditional_->frontals()) { + if(!first) parent += ","; first = false; + parent += indexFormatter(index); + } + + if(clique->parent()){ + parent += " : "; + s << parentnum << "->" << num << "\n"; + } + + first = true; + BOOST_FOREACH(Key sep, clique->conditional_->parents()) { + if(!first) parent += ","; first = false; + parent += indexFormatter(sep); + } + parent += "\"];\n"; + s << parent; + parentnum = num; + + BOOST_FOREACH(sharedClique c, clique->children) { + num++; + saveGraph(s, c, indexFormatter, parentnum); + } + } + + /* ************************************************************************* */ + template + size_t BayesTree::size() const { + size_t size = 0; + BOOST_FOREACH(const sharedClique& clique, roots_) + size += clique->treeSize(); + return size; + } + + /* ************************************************************************* */ + template + void BayesTree::addClique(const sharedClique& clique, const sharedClique& parent_clique) { + BOOST_FOREACH(Index j, clique->conditional()->frontals()) + nodes_[j] = clique; + if (parent_clique != NULL) { + clique->parent_ = parent_clique; + parent_clique->children.push_back(clique); + } else { + roots_.push_back(clique); + } + } + + /* ************************************************************************* */ + // TODO: Clean up + namespace { + template + int _pushClique(FactorGraph& fg, const boost::shared_ptr& clique) { + fg.push_back(clique->conditional_); + return 0; + } + + template + struct _pushCliqueFunctor { + _pushCliqueFunctor(FactorGraph& graph_) : graph(graph_) {} + FactorGraph& graph; + int operator()(const boost::shared_ptr& clique, int dummy) { + graph.push_back(clique->conditional_); + return 0; + } + }; + } + + /* ************************************************************************* */ + template + void BayesTree::addFactorsToGraph(FactorGraph& graph) const + { + // Traverse the BayesTree and add all conditionals to this graph + int data = 0; // Unused + _pushCliqueFunctor functor(graph); + treeTraversal::DepthFirstForest(*this, data, functor); // FIXME: sort of works? +// treeTraversal::DepthFirstForest(*this, data, boost::bind(&_pushClique, boost::ref(graph), _1)); + } + + /* ************************************************************************* */ + template + BayesTree::BayesTree(const This& other) { + *this = other; + } + + /* ************************************************************************* */ + namespace { + template + boost::shared_ptr + BayesTreeCloneForestVisitorPre(const boost::shared_ptr& node, const boost::shared_ptr& parentPointer) + { + // Clone the current node and add it to its cloned parent + boost::shared_ptr clone = boost::make_shared(*node); + clone->children.clear(); + clone->parent_ = parentPointer; + parentPointer->children.push_back(clone); + return clone; + } + } + + /* ************************************************************************* */ + template + BayesTree& BayesTree::operator=(const This& other) { + this->clear(); + boost::shared_ptr rootContainer = boost::make_shared(); + treeTraversal::DepthFirstForest(other, rootContainer, BayesTreeCloneForestVisitorPre); + BOOST_FOREACH(const sharedClique& root, rootContainer->children) { + root->parent_ = typename Clique::weak_ptr(); // Reset the parent since it's set to the dummy clique + insertRoot(root); + } + return *this; + } + + /* ************************************************************************* */ + template + void BayesTree::print(const std::string& s, const KeyFormatter& keyFormatter) const { + std::cout << s << ": cliques: " << size() << ", variables: " << nodes_.size() << std::endl; + treeTraversal::PrintForest(*this, s, keyFormatter); + } + + /* ************************************************************************* */ + // binary predicate to test equality of a pair for use in equals + template + bool check_sharedCliques( + const std::pair::sharedClique>& v1, + const std::pair::sharedClique>& v2 + ) { + return v1.first == v2.first && + ((!v1.second && !v2.second) || (v1.second && v2.second && v1.second->equals(*v2.second))); + } + + /* ************************************************************************* */ + template + bool BayesTree::equals(const BayesTree& other, double tol) const { + return size()==other.size() && + std::equal(nodes_.begin(), nodes_.end(), other.nodes_.begin(), &check_sharedCliques); + } + + /* ************************************************************************* */ + template + template + Key BayesTree::findParentClique(const CONTAINER& parents) const { + typename CONTAINER::const_iterator lowestOrderedParent = min_element(parents.begin(), parents.end()); + assert(lowestOrderedParent != parents.end()); + return *lowestOrderedParent; + } + + /* ************************************************************************* */ + template + void BayesTree::fillNodesIndex(const sharedClique& subtree) { + // Add each frontal variable of this root node + BOOST_FOREACH(const Key& j, subtree->conditional()->frontals()) { + bool inserted = nodes_.insert(std::make_pair(j, subtree)).second; + assert(inserted); (void)inserted; + } + // Fill index for each child + typedef typename BayesTree::sharedClique sharedClique; + BOOST_FOREACH(const sharedClique& child, subtree->children) { + fillNodesIndex(child); } + } + + /* ************************************************************************* */ + template + void BayesTree::insertRoot(const sharedClique& subtree) { + roots_.push_back(subtree); // Add to roots + fillNodesIndex(subtree); // Populate nodes index + } + + /* ************************************************************************* */ + // First finds clique marginal then marginalizes that + /* ************************************************************************* */ + template + typename BayesTree::sharedConditional + BayesTree::marginalFactor(Key j, const Eliminate& function) const + { + gttic(BayesTree_marginalFactor); + + // get clique containing Index j + sharedClique clique = this->clique(j); + + // calculate or retrieve its marginal P(C) = P(F,S) + FactorGraphType cliqueMarginal = clique->marginal2(function); + + // Now, marginalize out everything that is not variable j + BayesNetType marginalBN = *cliqueMarginal.marginalMultifrontalBayesNet( + Ordering(cref_list_of<1,Key>(j)), boost::none, function); + + // The Bayes net should contain only one conditional for variable j, so return it + return marginalBN.front(); + } + + /* ************************************************************************* */ + // Find two cliques, their joint, then marginalizes + /* ************************************************************************* */ + template + typename BayesTree::sharedFactorGraph + BayesTree::joint(Key j1, Key j2, const Eliminate& function) const + { + gttic(BayesTree_joint); + return boost::make_shared(*jointBayesNet(j1, j2, function)); + } + + /* ************************************************************************* */ + template + typename BayesTree::sharedBayesNet + BayesTree::jointBayesNet(Key j1, Key j2, const Eliminate& function) const + { + gttic(BayesTree_jointBayesNet); + // get clique C1 and C2 + sharedClique C1 = (*this)[j1], C2 = (*this)[j2]; + + gttic(Lowest_common_ancestor); + // Find lowest common ancestor clique + sharedClique B; { + // Build two paths to the root + FastList path1, path2; { + sharedClique p = C1; + while(p) { + path1.push_front(p); + p = p->parent(); + } + } { + sharedClique p = C2; + while(p) { + path2.push_front(p); + p = p->parent(); + } + } + // Find the path intersection + typename FastList::const_iterator p1 = path1.begin(), p2 = path2.begin(); + if(*p1 == *p2) + B = *p1; + while(p1 != path1.end() && p2 != path2.end() && *p1 == *p2) { + B = *p1; + ++p1; + ++p2; + } + } + if(!B) + throw std::invalid_argument("BayesTree::jointBayesNet does not yet work for joints across a forest"); + gttoc(Lowest_common_ancestor); + + // Compute marginal on lowest common ancestor clique + gttic(LCA_marginal); + FactorGraphType p_B = B->marginal2(function); + gttoc(LCA_marginal); + + // Compute shortcuts of the requested cliques given the lowest common ancestor + gttic(Clique_shortcuts); + BayesNetType p_C1_Bred = C1->shortcut(B, function); + BayesNetType p_C2_Bred = C2->shortcut(B, function); + gttoc(Clique_shortcuts); + + // Factor the shortcuts to be conditioned on the full root + // Get the set of variables to eliminate, which is C1\B. + gttic(Full_root_factoring); + boost::shared_ptr p_C1_B; { + std::vector C1_minus_B; { + FastSet C1_minus_B_set(C1->conditional()->beginParents(), C1->conditional()->endParents()); + BOOST_FOREACH(const Index j, *B->conditional()) { + C1_minus_B_set.erase(j); } + C1_minus_B.assign(C1_minus_B_set.begin(), C1_minus_B_set.end()); + } + // Factor into C1\B | B. + sharedFactorGraph temp_remaining; + boost::tie(p_C1_B, temp_remaining) = + FactorGraphType(p_C1_Bred).eliminatePartialMultifrontal(Ordering(C1_minus_B), function); + } + boost::shared_ptr p_C2_B; { + std::vector C2_minus_B; { + FastSet C2_minus_B_set(C2->conditional()->beginParents(), C2->conditional()->endParents()); + BOOST_FOREACH(const Index j, *B->conditional()) { + C2_minus_B_set.erase(j); } + C2_minus_B.assign(C2_minus_B_set.begin(), C2_minus_B_set.end()); + } + // Factor into C2\B | B. + sharedFactorGraph temp_remaining; + boost::tie(p_C2_B, temp_remaining) = + FactorGraphType(p_C2_Bred).eliminatePartialMultifrontal(Ordering(C2_minus_B), function); + } + gttoc(Full_root_factoring); + + gttic(Variable_joint); + // Build joint on all involved variables + FactorGraphType p_BC1C2; + p_BC1C2 += p_B; + p_BC1C2 += *p_C1_B; + p_BC1C2 += *p_C2_B; + if(C1 != B) + p_BC1C2 += C1->conditional(); + if(C2 != B) + p_BC1C2 += C2->conditional(); + + // now, marginalize out everything that is not variable j1 or j2 + return p_BC1C2.marginalMultifrontalBayesNet(Ordering(cref_list_of<2,Key>(j1)(j2)), boost::none, function); + } + + /* ************************************************************************* */ + template + void BayesTree::clear() { + // Remove all nodes and clear the root pointer + nodes_.clear(); + roots_.clear(); + } + + /* ************************************************************************* */ + template + void BayesTree::deleteCachedShortcuts() { + BOOST_FOREACH(const sharedClique& root, roots_) { + root->deleteCachedShortcuts(); + } + } + + /* ************************************************************************* */ + template + void BayesTree::removeClique(sharedClique clique) + { + if (clique->isRoot()) { + typename std::vector::iterator root = std::find(roots_.begin(), roots_.end(), clique); + if(root != roots_.end()) + roots_.erase(root); + } else { // detach clique from parent + sharedClique parent = clique->parent_.lock(); + typename std::vector::iterator child = std::find(parent->children.begin(), parent->children.end(), clique); + assert(child != parent->children.end()); + parent->children.erase(child); + } + + // orphan my children + BOOST_FOREACH(sharedClique child, clique->children) + child->parent_ = typename Clique::weak_ptr(); + + BOOST_FOREACH(Key j, clique->conditional()->frontals()) { + nodes_.erase(j); + } + } + + /* ************************************************************************* */ + template + void BayesTree::removePath(sharedClique clique, BayesNetType& bn, Cliques& orphans) + { + // base case is NULL, if so we do nothing and return empties above + if (clique) { + + // remove the clique from orphans in case it has been added earlier + orphans.remove(clique); + + // remove me + this->removeClique(clique); + + // remove path above me + this->removePath(typename Clique::shared_ptr(clique->parent_.lock()), bn, orphans); + + // add children to list of orphans (splice also removed them from clique->children_) + orphans.insert(orphans.begin(), clique->children.begin(), clique->children.end()); + clique->children.clear(); + + bn.push_back(clique->conditional_); + + } + } + + /* ************************************************************************* */ + template + void BayesTree::removeTop(const std::vector& keys, BayesNetType& bn, Cliques& orphans) + { + // process each key of the new factor + BOOST_FOREACH(const Key& j, keys) + { + // get the clique + // TODO: Nodes will be searched again in removeClique + typename Nodes::const_iterator node = nodes_.find(j); + if(node != nodes_.end()) { + // remove path from clique to root + this->removePath(node->second, bn, orphans); + } + } + + // Delete cachedShortcuts for each orphan subtree + //TODO: Consider Improving + BOOST_FOREACH(sharedClique& orphan, orphans) + orphan->deleteCachedShortcuts(); + } + + /* ************************************************************************* */ + template + typename BayesTree::Cliques BayesTree::removeSubtree( + const sharedClique& subtree) + { + // Result clique list + Cliques cliques; + cliques.push_back(subtree); + + // Remove the first clique from its parents + if(!subtree->isRoot()) + subtree->parent()->children.erase(std::find( + subtree->parent()->children.begin(), subtree->parent()->children.end(), subtree)); + else + roots_.erase(std::find(roots_.begin(), roots_.end(), subtree)); + + // Add all subtree cliques and erase the children and parent of each + for(typename Cliques::iterator clique = cliques.begin(); clique != cliques.end(); ++clique) + { + // Add children + BOOST_FOREACH(const sharedClique& child, (*clique)->children) { + cliques.push_back(child); } + + // Delete cached shortcuts + (*clique)->deleteCachedShortcutsNonRecursive(); + + // Remove this node from the nodes index + BOOST_FOREACH(Key j, (*clique)->conditional()->frontals()) { + nodes_.erase(j); } + + // Erase the parent and children pointers + (*clique)->parent_.reset(); + (*clique)->children.clear(); + } + + return cliques; + } + +} +/// namespace gtsam diff --git a/gtsam/inference/BayesTree.cpp b/gtsam/inference/BayesTree.cpp new file mode 100644 index 000000000..53a91ebc2 --- /dev/null +++ b/gtsam/inference/BayesTree.cpp @@ -0,0 +1,62 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file BayesTree.cpp + * @brief Bayes Tree is a tree of cliques of a Bayes Chain + * @author Frank Dellaert + * @author Michael Kaess + * @author Viorela Ila + * @author Richard Roberts + */ + +#include + +#include + +namespace gtsam { + +/* ************************************************************************* */ +void BayesTreeCliqueStats::print(const std::string& s) const { + std::cout << s + << "avg Conditional Size: " << avgConditionalSize << std::endl + << "max Conditional Size: " << maxConditionalSize << std::endl + << "avg Separator Size: " << avgSeparatorSize << std::endl + << "max Separator Size: " << maxSeparatorSize << std::endl; +} + +/* ************************************************************************* */ +BayesTreeCliqueStats BayesTreeCliqueData::getStats() const +{ + BayesTreeCliqueStats stats; + + double sum = 0.0; + size_t max = 0; + BOOST_FOREACH(size_t s, conditionalSizes) { + sum += (double)s; + if(s > max) max = s; + } + stats.avgConditionalSize = sum / (double)conditionalSizes.size(); + stats.maxConditionalSize = max; + + sum = 0.0; + max = 1; + BOOST_FOREACH(size_t s, separatorSizes) { + sum += (double)s; + if(s > max) max = s; + } + stats.avgSeparatorSize = sum / (double)separatorSizes.size(); + stats.maxSeparatorSize = max; + + return stats; +} + +} diff --git a/gtsam/inference/BayesTree.h b/gtsam/inference/BayesTree.h index 9e487d0a2..15c572128 100644 --- a/gtsam/inference/BayesTree.h +++ b/gtsam/inference/BayesTree.h @@ -1,418 +1,281 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file BayesTree.h - * @brief Bayes Tree is a tree of cliques of a Bayes Chain - * @author Frank Dellaert - */ - -// \callgraph - -#pragma once - -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include - -namespace gtsam { - - // Forward declaration of BayesTreeClique which is defined below BayesTree in this file - template struct BayesTreeClique; - - /** - * Bayes tree - * @tparam CONDITIONAL The type of the conditional densities, i.e. the type of node in the underlying Bayes chain, - * which could be a ConditionalProbabilityTable, a GaussianConditional, or a SymbolicConditional. - * @tparam CLIQUE The type of the clique data structure, defaults to BayesTreeClique, normally do not change this - * as it is only used when developing special versions of BayesTree, e.g. for ISAM2. - * - * \addtogroup Multifrontal - * \nosubgrouping - */ - template > - class BayesTree { - - public: - - typedef BayesTree This; - typedef boost::shared_ptr > shared_ptr; - typedef boost::shared_ptr sharedConditional; - typedef boost::shared_ptr > sharedBayesNet; - typedef CONDITIONAL ConditionalType; - typedef typename CONDITIONAL::FactorType FactorType; - typedef typename FactorGraph::Eliminate Eliminate; - - typedef CLIQUE Clique; ///< The clique type, normally BayesTreeClique - - // typedef for shared pointers to cliques - typedef boost::shared_ptr sharedClique; - - // A convenience class for a list of shared cliques - struct Cliques : public FastList { - void print(const std::string& s = "Cliques", - const IndexFormatter& indexFormatter = DefaultIndexFormatter) const; - bool equals(const Cliques& other, double tol = 1e-9) const; - }; - - /** clique statistics */ - struct CliqueStats { - double avgConditionalSize; - std::size_t maxConditionalSize; - double avgSeparatorSize; - std::size_t maxSeparatorSize; - void print(const std::string& s = "") const ; - }; - - /** store all the sizes */ - struct CliqueData { - std::vector conditionalSizes; - std::vector separatorSizes; - CliqueStats getStats() const; - }; - - /** Map from indices to Clique */ - typedef std::vector Nodes; - - protected: - - /** Map from indices to Clique */ - Nodes nodes_; - - /** Root clique */ - sharedClique root_; - - public: - - /// @name Standard Constructors - /// @{ - - /** Create an empty Bayes Tree */ - BayesTree() {} - - /** Create a Bayes Tree from a Bayes Net (requires CONDITIONAL is IndexConditional *or* CONDITIONAL::Combine) */ - explicit BayesTree(const BayesNet& bayesNet); - - /** Copy constructor */ - BayesTree(const This& other); - - /** Assignment operator */ - This& operator=(const This& other); - - /// @} - /// @name Advanced Constructors - /// @{ - - /** - * Create a Bayes Tree from a Bayes Net and some subtrees. The Bayes net corresponds to the - * new root clique and the subtrees are connected to the root clique. - */ - BayesTree(const BayesNet& bayesNet, std::list > subtrees); - - /** Destructor */ - virtual ~BayesTree() {} - - /// @} - /// @name Testable - /// @{ - - /** check equality */ - bool equals(const BayesTree& other, double tol = 1e-9) const; - - /** print */ - void print(const std::string& s = "", - const IndexFormatter& indexFormatter = DefaultIndexFormatter ) const; - - /// @} - /// @name Standard Interface - /// @{ - - /** - * Find parent clique of a conditional. It will look at all parents and - * return the one with the lowest index in the ordering. - */ - template - Index findParentClique(const CONTAINER& parents) const; - - /** number of cliques */ - inline size_t size() const { - if(root_) - return root_->treeSize(); - else - return 0; - } - - /** number of nodes */ - inline size_t nrNodes() const { return nodes_.size(); } - - /** Check if there are any cliques in the tree */ - inline bool empty() const { - return nodes_.empty(); - } - - /** return nodes */ - const Nodes& nodes() const { return nodes_; } - - /** return root clique */ - const sharedClique& root() const { return root_; } - - /** find the clique that contains the variable with Index j */ - inline sharedClique operator[](Index j) const { - return nodes_.at(j); - } - - /** alternate syntax for matlab: find the clique that contains the variable with Index j */ - inline sharedClique clique(Index j) const { - return nodes_.at(j); - } - - /** Gather data on all cliques */ - CliqueData getCliqueData() const; - - /** Collect number of cliques with cached separator marginals */ - size_t numCachedSeparatorMarginals() const; - - /** return marginal on any variable */ - typename FactorType::shared_ptr marginalFactor(Index j, Eliminate function) const; - - /** - * return marginal on any variable, as a Bayes Net - * NOTE: this function calls marginal, and then eliminates it into a Bayes Net - * This is more expensive than the above function - */ - typename BayesNet::shared_ptr marginalBayesNet(Index j, Eliminate function) const; - - /** - * return joint on two variables - * Limitation: can only calculate joint if cliques are disjoint or one of them is root - */ - typename FactorGraph::shared_ptr joint(Index j1, Index j2, Eliminate function) const; - - /** - * return joint on two variables as a BayesNet - * Limitation: can only calculate joint if cliques are disjoint or one of them is root - */ - typename BayesNet::shared_ptr jointBayesNet(Index j1, Index j2, Eliminate function) const; - - /** - * Read only with side effects - */ - - /** saves the Tree to a text file in GraphViz format */ - void saveGraph(const std::string& s, const IndexFormatter& indexFormatter = DefaultIndexFormatter ) const; - - /// @} - /// @name Advanced Interface - /// @{ - - /** Access the root clique (non-const version) */ - sharedClique& root() { return root_; } - - /** Access the nodes (non-cost version) */ - Nodes& nodes() { return nodes_; } - - /** Remove all nodes */ - void clear(); - - /** Clear all shortcut caches - use before timing on marginal calculation to avoid residual cache data */ - void deleteCachedShortcuts() { - root_->deleteCachedShortcuts(); - } - - /** Apply a permutation to the full tree - also updates the nodes structure */ - void permuteWithInverse(const Permutation& inversePermutation); - - /** - * Remove path from clique to root and return that path as factors - * plus a list of orphaned subtree roots. Used in removeTop below. - */ - void removePath(sharedClique clique, BayesNet& bn, Cliques& orphans); - - /** - * Given a list of indices, turn "contaminated" part of the tree back into a factor graph. - * Factors and orphans are added to the in/out arguments. - */ - template - void removeTop(const CONTAINER& indices, BayesNet& bn, Cliques& orphans); - - /** - * Remove the requested subtree. */ - Cliques removeSubtree(const sharedClique& subtree); - - /** - * Hang a new subtree off of the existing tree. This finds the appropriate - * parent clique for the subtree (which may be the root), and updates the - * nodes index with the new cliques in the subtree. None of the frontal - * variables in the subtree may appear in the separators of the existing - * BayesTree. - */ - void insert(const sharedClique& subtree); - - /** Insert a new conditional - * This function only applies for Symbolic case with IndexCondtional, - * We make it static so that it won't be compiled in GaussianConditional case. - * */ - static void insert(BayesTree& bayesTree, const sharedConditional& conditional); - - /** - * Insert a new clique corresponding to the given Bayes net. - * It is the caller's responsibility to decide whether the given Bayes net is a valid clique, - * i.e. all the variables (frontal and separator) are connected - */ - sharedClique insert(const sharedConditional& clique, - std::list& children, bool isRootClique = false); - - /** - * Create a clone of this object as a shared pointer - * Necessary for inheritance in matlab interface - */ - virtual shared_ptr clone() const { - return shared_ptr(new This(*this)); - } - - protected: - - /** private helper method for saving the Tree to a text file in GraphViz format */ - void saveGraph(std::ostream &s, sharedClique clique, const IndexFormatter& indexFormatter, - int parentnum = 0) const; - - /** Gather data on a single clique */ - void getCliqueData(CliqueData& stats, sharedClique clique) const; - - /** remove a clique: warning, can result in a forest */ - void removeClique(sharedClique clique); - - /** add a clique (top down) */ - sharedClique addClique(const sharedConditional& conditional, const sharedClique& parent_clique = sharedClique()); - - /** add a clique (top down) */ - void addClique(const sharedClique& clique, const sharedClique& parent_clique = sharedClique()); - - /** add a clique (bottom up) */ - sharedClique addClique(const sharedConditional& conditional, std::list& child_cliques); - - /** - * Add a conditional to the front of a clique, i.e. a conditional whose - * parents are already in the clique or its separators. This function does - * not check for this condition, it just updates the data structures. - */ - static void addToCliqueFront(BayesTree& bayesTree, - const sharedConditional& conditional, const sharedClique& clique); - - /** Fill the nodes index for a subtree */ - void fillNodesIndex(const sharedClique& subtree); - - /** Helper function to build a non-symbolic tree (e.g. Gaussian) using a - * symbolic tree, used in the BT(BN) constructor. - */ - void recursiveTreeBuild(const boost::shared_ptr >& symbolic, - const std::vector >& conditionals, - const typename BayesTree::sharedClique& parent); - - private: - - /** deep copy to another tree */ - void cloneTo(This& newTree) const; - - /** deep copy to another tree */ - void cloneTo(This& newTree, const sharedClique& subtree, const sharedClique& parent) const; - - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_NVP(nodes_); - ar & BOOST_SERIALIZATION_NVP(root_); - } - - /// @} - - }; // BayesTree - - - /* ************************************************************************* */ - template - void _BayesTree_dim_adder( - std::vector& dims, - const typename BayesTree::sharedClique& clique) { - - if(clique) { - // Add dims from this clique - for(typename CONDITIONAL::const_iterator it = (*clique)->beginFrontals(); it != (*clique)->endFrontals(); ++it) - dims[*it] = (*clique)->dim(it); - - // Traverse children - typedef typename BayesTree::sharedClique sharedClique; - BOOST_FOREACH(const sharedClique& child, clique->children()) { - _BayesTree_dim_adder(dims, child); - } - } - } - - /* ************************************************************************* */ - template - boost::shared_ptr allocateVectorValues(const BayesTree& bt) { - std::vector dimensions(bt.nodes().size(), 0); - _BayesTree_dim_adder(dimensions, bt.root()); - return boost::shared_ptr(new VectorValues(dimensions)); - } - - - /* ************************************************************************* */ - /** - * A Clique in the tree is an incomplete Bayes net: the variables - * in the Bayes net are the frontal nodes, and the variables conditioned - * on are the separator. We also have pointers up and down the tree. - * - * Since our Conditional class already handles multiple frontal variables, - * this Clique contains exactly 1 conditional. - * - * This is the default clique type in a BayesTree, but some algorithms, like - * iSAM2 (see ISAM2Clique), use a different clique type in order to store - * extra data along with the clique. - */ - template - struct BayesTreeClique : public BayesTreeCliqueBase, CONDITIONAL> { - public: - typedef CONDITIONAL ConditionalType; - typedef BayesTreeClique This; - typedef BayesTreeCliqueBase Base; - typedef boost::shared_ptr shared_ptr; - typedef boost::weak_ptr weak_ptr; - BayesTreeClique() {} - BayesTreeClique(const typename ConditionalType::shared_ptr& conditional) : Base(conditional) {} - BayesTreeClique(const std::pair& result) : Base(result) {} - - private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); - } - }; - -} /// namespace gtsam - -#include -#include +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file BayesTree.h + * @brief Bayes Tree is a tree of cliques of a Bayes Chain + * @author Frank Dellaert + */ + +// \callgraph + +#pragma once + +#include +#include + +#include +#include +#include + +namespace gtsam { + + // Forward declarations + template class FactorGraph; + template class JunctionTree; + + /* ************************************************************************* */ + /** clique statistics */ + struct GTSAM_EXPORT BayesTreeCliqueStats { + double avgConditionalSize; + std::size_t maxConditionalSize; + double avgSeparatorSize; + std::size_t maxSeparatorSize; + void print(const std::string& s = "") const ; + }; + + /** store all the sizes */ + struct GTSAM_EXPORT BayesTreeCliqueData { + std::vector conditionalSizes; + std::vector separatorSizes; + BayesTreeCliqueStats getStats() const; + }; + + /* ************************************************************************* */ + /** + * Bayes tree + * @tparam CONDITIONAL The type of the conditional densities, i.e. the type of node in the underlying Bayes chain, + * which could be a ConditionalProbabilityTable, a GaussianConditional, or a SymbolicConditional. + * @tparam CLIQUE The type of the clique data structure, defaults to BayesTreeClique, normally do not change this + * as it is only used when developing special versions of BayesTree, e.g. for ISAM2. + * + * \addtogroup Multifrontal + * \nosubgrouping + */ + template + class BayesTree + { + protected: + typedef BayesTree This; + typedef boost::shared_ptr shared_ptr; + + public: + typedef CLIQUE Clique; ///< The clique type, normally BayesTreeClique + typedef boost::shared_ptr sharedClique; ///< Shared pointer to a clique + typedef Clique Node; ///< Synonym for Clique (TODO: remove) + typedef sharedClique sharedNode; ///< Synonym for sharedClique (TODO: remove) + typedef typename CLIQUE::ConditionalType ConditionalType; + typedef boost::shared_ptr sharedConditional; + typedef typename CLIQUE::BayesNetType BayesNetType; + typedef boost::shared_ptr sharedBayesNet; + typedef typename CLIQUE::FactorType FactorType; + typedef boost::shared_ptr sharedFactor; + typedef typename CLIQUE::FactorGraphType FactorGraphType; + typedef boost::shared_ptr sharedFactorGraph; + typedef typename FactorGraphType::Eliminate Eliminate; + typedef typename CLIQUE::EliminationTraitsType EliminationTraitsType; + + /** A convenience class for a list of shared cliques */ + typedef FastList Cliques; + + /** Map from keys to Clique */ + typedef FastMap Nodes; + + protected: + + /** Map from indices to Clique */ + Nodes nodes_; + + /** Root cliques */ + std::vector roots_; + + /// @name Standard Constructors + /// @{ + + /** Create an empty Bayes Tree */ + BayesTree() {} + + /** Copy constructor */ + BayesTree(const This& other); + + /// @} + + /** Assignment operator */ + This& operator=(const This& other); + + /// @name Testable + /// @{ + + /** check equality */ + bool equals(const This& other, double tol = 1e-9) const; + + public: + /** print */ + void print(const std::string& s = "", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + /// @} + + /// @name Standard Interface + /// @{ + + /** number of cliques */ + size_t size() const; + + /** Check if there are any cliques in the tree */ + inline bool empty() const { + return nodes_.empty(); + } + + /** return nodes */ + const Nodes& nodes() const { return nodes_; } + + /** Access node by variable */ + const sharedNode operator[](Key j) const { return nodes_.at(j); } + + /** return root cliques */ + const std::vector& roots() const { return roots_; } + + /** alternate syntax for matlab: find the clique that contains the variable with Index j */ + const sharedClique& clique(Key j) const { + typename Nodes::const_iterator c = nodes_.find(j); + if(c == nodes_.end()) + throw std::out_of_range("Requested the BayesTree clique for a key that is not in the BayesTree"); + else + return c->second; + } + + /** Gather data on all cliques */ + BayesTreeCliqueData getCliqueData() const; + + /** Collect number of cliques with cached separator marginals */ + size_t numCachedSeparatorMarginals() const; + + /** Return marginal on any variable. Note that this actually returns a conditional, for which a + * solution may be directly obtained by calling .solve() on the returned object. + * Alternatively, it may be directly used as its factor base class. For example, for Gaussian + * systems, this returns a GaussianConditional, which inherits from JacobianFactor and + * GaussianFactor. */ + sharedConditional marginalFactor(Key j, const Eliminate& function = EliminationTraitsType::DefaultEliminate) const; + + /** + * return joint on two variables + * Limitation: can only calculate joint if cliques are disjoint or one of them is root + */ + sharedFactorGraph joint(Index j1, Index j2, const Eliminate& function = EliminationTraitsType::DefaultEliminate) const; + + /** + * return joint on two variables as a BayesNet + * Limitation: can only calculate joint if cliques are disjoint or one of them is root + */ + sharedBayesNet jointBayesNet(Index j1, Index j2, const Eliminate& function = EliminationTraitsType::DefaultEliminate) const; + + /** + * Read only with side effects + */ + + /** saves the Tree to a text file in GraphViz format */ + void saveGraph(const std::string& s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + + /// @} + /// @name Advanced Interface + /// @{ + + /** + * Find parent clique of a conditional. It will look at all parents and + * return the one with the lowest index in the ordering. + */ + template + Index findParentClique(const CONTAINER& parents) const; + + /** Remove all nodes */ + void clear(); + + /** Clear all shortcut caches - use before timing on marginal calculation to avoid residual cache data */ + void deleteCachedShortcuts(); + + /** + * Remove path from clique to root and return that path as factors + * plus a list of orphaned subtree roots. Used in removeTop below. + */ + void removePath(sharedClique clique, BayesNetType& bn, Cliques& orphans); + + /** + * Given a list of indices, turn "contaminated" part of the tree back into a factor graph. + * Factors and orphans are added to the in/out arguments. + */ + void removeTop(const std::vector& keys, BayesNetType& bn, Cliques& orphans); + + /** + * Remove the requested subtree. */ + Cliques removeSubtree(const sharedClique& subtree); + + /** Insert a new subtree with known parent clique. This function does not check that the + * specified parent is the correct parent. This function updates all of the internal data + * structures associated with adding a subtree, such as populating the nodes index. */ + void insertRoot(const sharedClique& subtree); + + /** add a clique (top down) */ + void addClique(const sharedClique& clique, const sharedClique& parent_clique = sharedClique()); + + /** Add all cliques in this BayesTree to the specified factor graph */ + void addFactorsToGraph(FactorGraph& graph) const; + + protected: + + /** private helper method for saving the Tree to a text file in GraphViz format */ + void saveGraph(std::ostream &s, sharedClique clique, const KeyFormatter& keyFormatter, + int parentnum = 0) const; + + /** Gather data on a single clique */ + void getCliqueData(BayesTreeCliqueData& stats, sharedClique clique) const; + + /** remove a clique: warning, can result in a forest */ + void removeClique(sharedClique clique); + + /** Fill the nodes index for a subtree */ + void fillNodesIndex(const sharedClique& subtree); + + // Friend JunctionTree because it directly fills roots and nodes index. + template friend class JunctionTree; + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(nodes_); + ar & BOOST_SERIALIZATION_NVP(roots_); + } + + /// @} + + }; // BayesTree + + /* ************************************************************************* */ + template + class BayesTreeOrphanWrapper : public CLIQUE::ConditionalType + { + public: + typedef CLIQUE CliqueType; + typedef typename CLIQUE::ConditionalType Base; + + boost::shared_ptr clique; + + BayesTreeOrphanWrapper(const boost::shared_ptr& clique) : + clique(clique) + { + // Store parent keys in our base type factor so that eliminating those parent keys will pull + // this subtree into the elimination. + this->keys_.assign(clique->conditional()->beginParents(), clique->conditional()->endParents()); + } + }; + +} /// namespace gtsam diff --git a/gtsam/inference/BayesTreeCliqueBase-inl.h b/gtsam/inference/BayesTreeCliqueBase-inl.h deleted file mode 100644 index e2ac492b7..000000000 --- a/gtsam/inference/BayesTreeCliqueBase-inl.h +++ /dev/null @@ -1,301 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - * -------------------------------------------------------------------------- */ - -/** - * @file BayesTreeCliqueBase-inl.h - * @brief Base class for cliques of a BayesTree - * @author Richard Roberts and Frank Dellaert - */ - -#pragma once - -#include -#include - -namespace gtsam { - - /* ************************************************************************* */ - template - void BayesTreeCliqueBase::assertInvariants() const { - } - - /* ************************************************************************* */ - template - std::vector BayesTreeCliqueBase::separator_setminus_B( - derived_ptr B) const { - sharedConditional p_F_S = this->conditional(); - std::vector &indicesB = B->conditional()->keys(); - std::vector S_setminus_B; - std::set_difference(p_F_S->beginParents(), p_F_S->endParents(), // - indicesB.begin(), indicesB.end(), back_inserter(S_setminus_B)); - return S_setminus_B; - } - - /* ************************************************************************* */ - template - std::vector BayesTreeCliqueBase::shortcut_indices( - derived_ptr B, const FactorGraph& p_Cp_B) const - { - gttic(shortcut_indices); - std::set allKeys = p_Cp_B.keys(); - std::vector &indicesB = B->conditional()->keys(); - std::vector S_setminus_B = separator_setminus_B(B); // TODO, get as argument? - std::vector keep; - // keep = S\B intersect allKeys - std::set_intersection(S_setminus_B.begin(), S_setminus_B.end(), // - allKeys.begin(), allKeys.end(), back_inserter(keep)); - // keep += B intersect allKeys - std::set_intersection(indicesB.begin(), indicesB.end(), // - allKeys.begin(), allKeys.end(), back_inserter(keep)); - // BOOST_FOREACH(Index j, keep) std::cout << j << " "; std::cout << std::endl; - return keep; - } - - /* ************************************************************************* */ - template - BayesTreeCliqueBase::BayesTreeCliqueBase( - const sharedConditional& conditional) : - conditional_(conditional) { - assertInvariants(); - } - - /* ************************************************************************* */ - template - BayesTreeCliqueBase::BayesTreeCliqueBase( - const std::pair >& result) : - conditional_(result.first) { - assertInvariants(); - } - - /* ************************************************************************* */ - template - void BayesTreeCliqueBase::print(const std::string& s, - const IndexFormatter& indexFormatter) const { - conditional_->print(s, indexFormatter); - } - - /* ************************************************************************* */ - template - size_t BayesTreeCliqueBase::treeSize() const { - size_t size = 1; - BOOST_FOREACH(const derived_ptr& child, children_) - size += child->treeSize(); - return size; - } - - /* ************************************************************************* */ - template - size_t BayesTreeCliqueBase::numCachedSeparatorMarginals() const { - if (!cachedSeparatorMarginal_) - return 0; - - size_t subtree_count = 1; - BOOST_FOREACH(const derived_ptr& child, children_) - subtree_count += child->numCachedSeparatorMarginals(); - - return subtree_count; - } - - /* ************************************************************************* */ - template - void BayesTreeCliqueBase::printTree( - const std::string& indent, const IndexFormatter& indexFormatter) const { - asDerived(this)->print(indent, indexFormatter); - BOOST_FOREACH(const derived_ptr& child, children_) - child->printTree(indent + " ", indexFormatter); - } - - /* ************************************************************************* */ - template - void BayesTreeCliqueBase::permuteWithInverse( - const Permutation& inversePermutation) { - conditional_->permuteWithInverse(inversePermutation); - BOOST_FOREACH(const derived_ptr& child, children_) { - child->permuteWithInverse(inversePermutation); - } - assertInvariants(); - } - - /* ************************************************************************* */ - template - bool BayesTreeCliqueBase::reduceSeparatorWithInverse( - const internal::Reduction& inverseReduction) - { - bool changed = conditional_->reduceSeparatorWithInverse(inverseReduction); -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - if(!changed) { - BOOST_FOREACH(const derived_ptr& child, children_) { - assert(child->reduceSeparatorWithInverse(inverseReduction) == false); } - } -#endif - if(changed) { - BOOST_FOREACH(const derived_ptr& child, children_) { - (void) child->reduceSeparatorWithInverse(inverseReduction); } - } - assertInvariants(); - return changed; - } - - /* ************************************************************************* */ - // The shortcut density is a conditional P(S|R) of the separator of this - // clique on the root. We can compute it recursively from the parent shortcut - // P(Sp|R) as \int P(Fp|Sp) P(Sp|R), where Fp are the frontal nodes in p - /* ************************************************************************* */ - template - BayesNet BayesTreeCliqueBase::shortcut( - derived_ptr B, Eliminate function) const - { - gttic(BayesTreeCliqueBase_shortcut); - - // We only calculate the shortcut when this clique is not B - // and when the S\B is not empty - std::vector S_setminus_B = separator_setminus_B(B); - if (B.get() != this && !S_setminus_B.empty()) { - - // Obtain P(Cp||B) = P(Fp|Sp) * P(Sp||B) as a factor graph - derived_ptr parent(parent_.lock()); - gttoc(BayesTreeCliqueBase_shortcut); - FactorGraph p_Cp_B(parent->shortcut(B, function)); // P(Sp||B) - gttic(BayesTreeCliqueBase_shortcut); - p_Cp_B.push_back(parent->conditional()->toFactor()); // P(Fp|Sp) - - // Determine the variables we want to keepSet, S union B - std::vector keep = shortcut_indices(B, p_Cp_B); - - // Reduce the variable indices to start at zero - gttic(Reduce); - const Permutation reduction = internal::createReducingPermutation(p_Cp_B.keys()); - internal::Reduction inverseReduction = internal::Reduction::CreateAsInverse(reduction); - BOOST_FOREACH(const boost::shared_ptr& factor, p_Cp_B) { - if(factor) factor->reduceWithInverse(inverseReduction); } - inverseReduction.applyInverse(keep); - gttoc(Reduce); - - // Create solver that will marginalize for us - GenericSequentialSolver solver(p_Cp_B); - - // Finally, we only want to have S\B variables in the Bayes net, so - size_t nrFrontals = S_setminus_B.size(); - BayesNet result = *solver.conditionalBayesNet(keep, nrFrontals, function); - - // Undo the reduction - gttic(Undo_Reduce); - BOOST_FOREACH(const typename boost::shared_ptr& factor, p_Cp_B) { - if (factor) factor->permuteWithInverse(reduction); } - result.permuteWithInverse(reduction); - gttoc(Undo_Reduce); - - assertInvariants(); - - return result; - } else { - return BayesNet(); - } - } - - /* ************************************************************************* */ - // separator marginal, uses separator marginal of parent recursively - // P(C) = P(F|S) P(S) - /* ************************************************************************* */ - template - FactorGraph::FactorType> BayesTreeCliqueBase< - DERIVED, CONDITIONAL>::separatorMarginal(derived_ptr R, Eliminate function) const - { - gttic(BayesTreeCliqueBase_separatorMarginal); - // Check if the Separator marginal was already calculated - if (!cachedSeparatorMarginal_) { - gttic(BayesTreeCliqueBase_separatorMarginal_cachemiss); - // If this is the root, there is no separator - if (R.get() == this) { - // we are root, return empty - FactorGraph empty; - cachedSeparatorMarginal_ = empty; - } else { - // Obtain P(S) = \int P(Cp) = \int P(Fp|Sp) P(Sp) - // initialize P(Cp) with the parent separator marginal - derived_ptr parent(parent_.lock()); - gttoc(BayesTreeCliqueBase_separatorMarginal_cachemiss); // Flatten recursion in timing outline - gttoc(BayesTreeCliqueBase_separatorMarginal); - FactorGraph p_Cp(parent->separatorMarginal(R, function)); // P(Sp) - gttic(BayesTreeCliqueBase_separatorMarginal); - gttic(BayesTreeCliqueBase_separatorMarginal_cachemiss); - // now add the parent conditional - p_Cp.push_back(parent->conditional()->toFactor()); // P(Fp|Sp) - - // Reduce the variable indices to start at zero - gttic(Reduce); - const Permutation reduction = internal::createReducingPermutation(p_Cp.keys()); - internal::Reduction inverseReduction = internal::Reduction::CreateAsInverse(reduction); - BOOST_FOREACH(const boost::shared_ptr& factor, p_Cp) { - if(factor) factor->reduceWithInverse(inverseReduction); } - - // The variables we want to keepSet are exactly the ones in S - sharedConditional p_F_S = this->conditional(); - std::vector indicesS(p_F_S->beginParents(), p_F_S->endParents()); - inverseReduction.applyInverse(indicesS); - gttoc(Reduce); - - // Create solver that will marginalize for us - GenericSequentialSolver solver(p_Cp); - - cachedSeparatorMarginal_ = *(solver.jointBayesNet(indicesS, function)); - - // Undo the reduction - gttic(Undo_Reduce); - BOOST_FOREACH(const typename boost::shared_ptr& factor, p_Cp) { - if (factor) factor->permuteWithInverse(reduction); } - BOOST_FOREACH(const typename boost::shared_ptr& factor, *cachedSeparatorMarginal_) { - if (factor) factor->permuteWithInverse(reduction); } - gttoc(Undo_Reduce); - } - } else { - gttic(BayesTreeCliqueBase_separatorMarginal_cachehit); - } - - // return the shortcut P(S||B) - return *cachedSeparatorMarginal_; // return the cached version - } - - /* ************************************************************************* */ - // marginal2, uses separator marginal of parent recursively - // P(C) = P(F|S) P(S) - /* ************************************************************************* */ - template - FactorGraph::FactorType> BayesTreeCliqueBase< - DERIVED, CONDITIONAL>::marginal2(derived_ptr R, Eliminate function) const - { - gttic(BayesTreeCliqueBase_marginal2); - // initialize with separator marginal P(S) - FactorGraph p_C(this->separatorMarginal(R, function)); - // add the conditional P(F|S) - p_C.push_back(this->conditional()->toFactor()); - return p_C; - } - - /* ************************************************************************* */ - template - void BayesTreeCliqueBase::deleteCachedShortcuts() { - - // When a shortcut is requested, all of the shortcuts between it and the - // root are also generated. So, if this clique's cached shortcut is set, - // recursively call over all child cliques. Otherwise, it is unnecessary. - if (cachedSeparatorMarginal_) { - BOOST_FOREACH(derived_ptr& child, children_) { - child->deleteCachedShortcuts(); - } - - //Delete CachedShortcut for this clique - cachedSeparatorMarginal_ = boost::none; - } - - } - -} diff --git a/gtsam/inference/BayesTreeCliqueBase-inst.h b/gtsam/inference/BayesTreeCliqueBase-inst.h new file mode 100644 index 000000000..eb736f663 --- /dev/null +++ b/gtsam/inference/BayesTreeCliqueBase-inst.h @@ -0,0 +1,217 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + * -------------------------------------------------------------------------- */ + +/** + * @file BayesTreeCliqueBase-inst.h + * @brief Base class for cliques of a BayesTree + * @author Richard Roberts and Frank Dellaert + */ + +#pragma once + +#include +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + void BayesTreeCliqueBase::setEliminationResult( + const typename FactorGraphType::EliminationResult& eliminationResult) + { + conditional_ = eliminationResult.first; + } + + /* ************************************************************************* */ + template + bool BayesTreeCliqueBase::equals( + const DERIVED& other, double tol) const + { + return (!conditional_ && !other.conditional()) + || conditional_->equals(*other.conditional(), tol); + } + + /* ************************************************************************* */ + template + std::vector + BayesTreeCliqueBase::separator_setminus_B(const derived_ptr& B) const + { + FastSet p_F_S_parents(this->conditional()->beginParents(), this->conditional()->endParents()); + FastSet indicesB(B->conditional()->begin(), B->conditional()->end()); + std::vector S_setminus_B; + std::set_difference(p_F_S_parents.begin(), p_F_S_parents.end(), + indicesB.begin(), indicesB.end(), back_inserter(S_setminus_B)); + return S_setminus_B; + } + + /* ************************************************************************* */ + template + std::vector BayesTreeCliqueBase::shortcut_indices( + const derived_ptr& B, const FactorGraphType& p_Cp_B) const + { + gttic(shortcut_indices); + FastSet allKeys = p_Cp_B.keys(); + FastSet indicesB(B->conditional()->begin(), B->conditional()->end()); + std::vector S_setminus_B = separator_setminus_B(B); + std::vector keep; + // keep = S\B intersect allKeys (S_setminus_B is already sorted) + std::set_intersection(S_setminus_B.begin(), S_setminus_B.end(), // + allKeys.begin(), allKeys.end(), back_inserter(keep)); + // keep += B intersect allKeys + std::set_intersection(indicesB.begin(), indicesB.end(), // + allKeys.begin(), allKeys.end(), back_inserter(keep)); + return keep; + } + + /* ************************************************************************* */ + template + void BayesTreeCliqueBase::print( + const std::string& s, const KeyFormatter& keyFormatter) const + { + conditional_->print(s, keyFormatter); + } + + /* ************************************************************************* */ + template + size_t BayesTreeCliqueBase::treeSize() const { + size_t size = 1; + BOOST_FOREACH(const derived_ptr& child, children) + size += child->treeSize(); + return size; + } + + /* ************************************************************************* */ + template + size_t BayesTreeCliqueBase::numCachedSeparatorMarginals() const + { + if (!cachedSeparatorMarginal_) + return 0; + + size_t subtree_count = 1; + BOOST_FOREACH(const derived_ptr& child, children) + subtree_count += child->numCachedSeparatorMarginals(); + + return subtree_count; + } + + /* ************************************************************************* */ + // The shortcut density is a conditional P(S|R) of the separator of this + // clique on the root. We can compute it recursively from the parent shortcut + // P(Sp|R) as \int P(Fp|Sp) P(Sp|R), where Fp are the frontal nodes in p + /* ************************************************************************* */ + template + typename BayesTreeCliqueBase::BayesNetType + BayesTreeCliqueBase::shortcut(const derived_ptr& B, Eliminate function) const + { + gttic(BayesTreeCliqueBase_shortcut); + // We only calculate the shortcut when this clique is not B + // and when the S\B is not empty + std::vector S_setminus_B = separator_setminus_B(B); + if (!parent_.expired() /*(if we're not the root)*/ && !S_setminus_B.empty()) + { + // Obtain P(Cp||B) = P(Fp|Sp) * P(Sp||B) as a factor graph + derived_ptr parent(parent_.lock()); + gttoc(BayesTreeCliqueBase_shortcut); + FactorGraphType p_Cp_B(parent->shortcut(B, function)); // P(Sp||B) + gttic(BayesTreeCliqueBase_shortcut); + p_Cp_B += parent->conditional_; // P(Fp|Sp) + + // Determine the variables we want to keepSet, S union B + std::vector keep = shortcut_indices(B, p_Cp_B); + + // Marginalize out everything except S union B + boost::shared_ptr p_S_B = p_Cp_B.marginal(keep, function); + return *p_S_B->eliminatePartialSequential(S_setminus_B, function).first; + } + else + { + return BayesNetType(); + } + } + + /* ************************************************************************* */ + // separator marginal, uses separator marginal of parent recursively + // P(C) = P(F|S) P(S) + /* ************************************************************************* */ + template + typename BayesTreeCliqueBase::FactorGraphType + BayesTreeCliqueBase::separatorMarginal(Eliminate function) const + { + gttic(BayesTreeCliqueBase_separatorMarginal); + // Check if the Separator marginal was already calculated + if (!cachedSeparatorMarginal_) + { + gttic(BayesTreeCliqueBase_separatorMarginal_cachemiss); + // If this is the root, there is no separator + if (parent_.expired() /*(if we're the root)*/) + { + // we are root, return empty + FactorGraphType empty; + cachedSeparatorMarginal_ = empty; + } + else + { + // Obtain P(S) = \int P(Cp) = \int P(Fp|Sp) P(Sp) + // initialize P(Cp) with the parent separator marginal + derived_ptr parent(parent_.lock()); + gttoc(BayesTreeCliqueBase_separatorMarginal_cachemiss); // Flatten recursion in timing outline + gttoc(BayesTreeCliqueBase_separatorMarginal); + FactorGraphType p_Cp(parent->separatorMarginal(function)); // P(Sp) + gttic(BayesTreeCliqueBase_separatorMarginal); + gttic(BayesTreeCliqueBase_separatorMarginal_cachemiss); + // now add the parent conditional + p_Cp += parent->conditional_; // P(Fp|Sp) + + // The variables we want to keepSet are exactly the ones in S + std::vector indicesS(this->conditional()->beginParents(), this->conditional()->endParents()); + cachedSeparatorMarginal_ = *p_Cp.marginalMultifrontalBayesNet(Ordering(indicesS), boost::none, function); + } + } + + // return the shortcut P(S||B) + return *cachedSeparatorMarginal_; // return the cached version + } + + /* ************************************************************************* */ + // marginal2, uses separator marginal of parent recursively + // P(C) = P(F|S) P(S) + /* ************************************************************************* */ + template + typename BayesTreeCliqueBase::FactorGraphType + BayesTreeCliqueBase::marginal2(Eliminate function) const + { + gttic(BayesTreeCliqueBase_marginal2); + // initialize with separator marginal P(S) + FactorGraphType p_C = this->separatorMarginal(function); + // add the conditional P(F|S) + p_C += boost::shared_ptr(this->conditional_); + return p_C; + } + + /* ************************************************************************* */ + template + void BayesTreeCliqueBase::deleteCachedShortcuts() { + + // When a shortcut is requested, all of the shortcuts between it and the + // root are also generated. So, if this clique's cached shortcut is set, + // recursively call over all child cliques. Otherwise, it is unnecessary. + if (cachedSeparatorMarginal_) { + BOOST_FOREACH(derived_ptr& child, children) { + child->deleteCachedShortcuts(); + } + + //Delete CachedShortcut for this clique + cachedSeparatorMarginal_ = boost::none; + } + + } + +} diff --git a/gtsam/inference/BayesTreeCliqueBase.h b/gtsam/inference/BayesTreeCliqueBase.h index 5e06aea1c..56f4744fe 100644 --- a/gtsam/inference/BayesTreeCliqueBase.h +++ b/gtsam/inference/BayesTreeCliqueBase.h @@ -1,273 +1,168 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file BayesTreeCliqueBase.h - * @brief Base class for cliques of a BayesTree - * @author Richard Roberts and Frank Dellaert - */ - -#pragma once - -#include -#include -#include - -#include -#include -#include - -namespace gtsam { - template class BayesTree; -} - -namespace gtsam { - - /** - * This is the base class for BayesTree cliques. The default and standard - * derived type is BayesTreeClique, but some algorithms, like iSAM2, use a - * different clique type in order to store extra data along with the clique. - * - * This class is templated on the derived class (i.e. the curiously recursive - * template pattern). The advantage of this over using virtual classes is - * that it avoids the need for casting to get the derived type. This is - * possible because all cliques in a BayesTree are the same type - if they - * were not then we'd need a virtual class. - * - * @tparam DERIVED The derived clique type. - * @tparam CONDITIONAL The conditional type. - * \nosubgrouping - */ - template - struct BayesTreeCliqueBase { - - public: - typedef BayesTreeCliqueBase This; - typedef DERIVED DerivedType; - typedef CONDITIONAL ConditionalType; - typedef boost::shared_ptr sharedConditional; - typedef boost::shared_ptr shared_ptr; - typedef boost::weak_ptr weak_ptr; - typedef boost::shared_ptr derived_ptr; - typedef boost::weak_ptr derived_weak_ptr; - typedef typename ConditionalType::FactorType FactorType; - typedef typename FactorGraph::Eliminate Eliminate; - - protected: - - /// @name Standard Constructors - /// @{ - - /** Default constructor */ - BayesTreeCliqueBase() {} - - /** Construct from a conditional, leaving parent and child pointers uninitialized */ - BayesTreeCliqueBase(const sharedConditional& conditional); - - /** Construct from an elimination result, which is a pair */ - BayesTreeCliqueBase( - const std::pair >& result); - - /// @} - - /// This stores the Cached separator margnal P(S) - mutable boost::optional > cachedSeparatorMarginal_; - - public: - sharedConditional conditional_; - derived_weak_ptr parent_; - FastList children_; - - /// @name Testable - /// @{ - - /** check equality */ - bool equals(const This& other, double tol = 1e-9) const { - return (!conditional_ && !other.conditional()) - || conditional_->equals(*other.conditional(), tol); - } - - /** print this node */ - void print(const std::string& s = "", const IndexFormatter& indexFormatter = - DefaultIndexFormatter) const; - - /** print this node and entire subtree below it */ - void printTree(const std::string& indent = "", - const IndexFormatter& indexFormatter = DefaultIndexFormatter) const; - - /// @} - /// @name Standard Interface - /// @{ - - /** Access the conditional */ - const sharedConditional& conditional() const { - return conditional_; - } - - /** is this the root of a Bayes tree ? */ - inline bool isRoot() const { - return parent_.expired(); - } - - /** The size of subtree rooted at this clique, i.e., nr of Cliques */ - size_t treeSize() const; - - /** Collect number of cliques with cached separator marginals */ - size_t numCachedSeparatorMarginals() const; - - /** The arrow operator accesses the conditional */ - const ConditionalType* operator->() const { - return conditional_.get(); - } - - /** return the const reference of children */ - const std::list& children() const { - return children_; - } - - /** return a shared_ptr to the parent clique */ - derived_ptr parent() const { - return parent_.lock(); - } - - /// @} - /// @name Advanced Interface - /// @{ - - /** The arrow operator accesses the conditional */ - ConditionalType* operator->() { - return conditional_.get(); - } - - /** return the reference of children non-const version*/ - FastList& children() { - return children_; - } - - /** Construct shared_ptr from a conditional, leaving parent and child pointers uninitialized */ - static derived_ptr Create(const sharedConditional& conditional) { - return boost::make_shared(conditional); - } - - /** Construct shared_ptr from a FactorGraph::EliminationResult. In this class - * the conditional part is kept and the factor part is ignored, but in derived clique - * types, such as ISAM2Clique, the factor part is kept as a cached factor. - * @param result An elimination result, which is a pair - */ - static derived_ptr Create(const std::pair >& result) { - return boost::make_shared(result); - } - - /** Returns a new clique containing a copy of the conditional but without - * the parent and child clique pointers. - */ - derived_ptr clone() const { - return Create(sharedConditional(new ConditionalType(*conditional_))); - } - - /** Permute the variables in the whole subtree rooted at this clique */ - void permuteWithInverse(const Permutation& inversePermutation); - - /** Permute variables when they only appear in the separators. In this - * case the running intersection property will be used to prevent always - * traversing the whole tree. Returns whether any separator variables in - * this subtree were reordered. - */ - bool reduceSeparatorWithInverse(const internal::Reduction& inverseReduction); - - /** return the conditional P(S|Root) on the separator given the root */ - BayesNet shortcut(derived_ptr root, Eliminate function) const; - - /** return the marginal P(S) on the separator */ - FactorGraph separatorMarginal(derived_ptr root, Eliminate function) const; - - /** return the marginal P(C) of the clique, using marginal caching */ - FactorGraph marginal2(derived_ptr root, Eliminate function) const; - - /** - * This deletes the cached shortcuts of all cliques (subtree) below this clique. - * This is performed when the bayes tree is modified. - */ - void deleteCachedShortcuts(); - - const boost::optional >& cachedSeparatorMarginal() const { - return cachedSeparatorMarginal_; } - - friend class BayesTree ; - - protected: - - /// assert invariants that have to hold in a clique - void assertInvariants() const; - - /// Calculate set \f$ S \setminus B \f$ for shortcut calculations - std::vector separator_setminus_B(derived_ptr B) const; - - /// Calculate set \f$ S_p \cap B \f$ for shortcut calculations - std::vector parent_separator_intersection_B(derived_ptr B) const; - - /** - * Determine variable indices to keep in recursive separator shortcut calculation - * The factor graph p_Cp_B has keys from the parent clique Cp and from B. - * But we only keep the variables not in S union B. - */ - std::vector shortcut_indices(derived_ptr B, - const FactorGraph& p_Cp_B) const; - - /** Non-recursive delete cached shortcuts and marginals - internal only. */ - void deleteCachedShortcutsNonRecursive() { cachedSeparatorMarginal_ = boost::none; } - - private: - - /** - * Cliques cannot be copied except by the clone() method, which does not - * copy the parent and child pointers. - */ - BayesTreeCliqueBase(const This& other) { - assert(false); - } - - /** Cliques cannot be copied except by the clone() method, which does not - * copy the parent and child pointers. - */ - This& operator=(const This& other) { - assert(false); - return *this; - } - - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_NVP(conditional_); - ar & BOOST_SERIALIZATION_NVP(parent_); - ar & BOOST_SERIALIZATION_NVP(children_); - } - - /// @} - - }; - // \struct Clique - - template - const DERIVED* asDerived( - const BayesTreeCliqueBase* base) { - return static_cast(base); - } - - template - DERIVED* asDerived(BayesTreeCliqueBase* base) { - return static_cast(base); - } - -} +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file BayesTreeCliqueBase.h + * @brief Base class for cliques of a BayesTree + * @author Richard Roberts and Frank Dellaert + */ + +#pragma once + +#include +#include + +namespace gtsam { + + // Forward declarations + template class BayesTree; + template struct EliminationTraits; + + /** + * This is the base class for BayesTree cliques. The default and standard derived type is + * BayesTreeClique, but some algorithms, like iSAM2, use a different clique type in order to store + * extra data along with the clique. + * + * This class is templated on the derived class (i.e. the curiously recursive template pattern). + * The advantage of this over using virtual classes is that it avoids the need for casting to get + * the derived type. This is possible because all cliques in a BayesTree are the same type - if + * they were not then we'd need a virtual class. + * + * @tparam DERIVED The derived clique type. + * @tparam CONDITIONAL The conditional type. + * \nosubgrouping */ + template + class BayesTreeCliqueBase + { + private: + typedef BayesTreeCliqueBase This; + typedef DERIVED DerivedType; + typedef EliminationTraits EliminationTraitsType; + typedef boost::shared_ptr shared_ptr; + typedef boost::weak_ptr weak_ptr; + typedef boost::shared_ptr derived_ptr; + typedef boost::weak_ptr derived_weak_ptr; + + public: + typedef FACTORGRAPH FactorGraphType; + typedef typename EliminationTraitsType::BayesNetType BayesNetType; + typedef typename BayesNetType::ConditionalType ConditionalType; + typedef boost::shared_ptr sharedConditional; + typedef typename FactorGraphType::FactorType FactorType; + typedef typename FactorGraphType::Eliminate Eliminate; + + protected: + + /// @name Standard Constructors + /// @{ + + /** Default constructor */ + BayesTreeCliqueBase() {} + + /** Construct from a conditional, leaving parent and child pointers uninitialized */ + BayesTreeCliqueBase(const sharedConditional& conditional) : conditional_(conditional) {} + + /// @} + + /// This stores the Cached separator margnal P(S) + mutable boost::optional cachedSeparatorMarginal_; + + public: + sharedConditional conditional_; + derived_weak_ptr parent_; + std::vector children; + + /// Fill the elimination result produced during elimination. Here this just stores the + /// conditional and ignores the remaining factor, but this is overridden in ISAM2Clique + /// to also cache the remaining factor. + void setEliminationResult(const typename FactorGraphType::EliminationResult& eliminationResult); + + /// @name Testable + /// @{ + + /** check equality */ + bool equals(const DERIVED& other, double tol = 1e-9) const; + + /** print this node */ + void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + + /// @} + /// @name Standard Interface + /// @{ + + /** Access the conditional */ + const sharedConditional& conditional() const { return conditional_; } + + /** is this the root of a Bayes tree ? */ + inline bool isRoot() const { return parent_.expired(); } + + /** The size of subtree rooted at this clique, i.e., nr of Cliques */ + size_t treeSize() const; + + /** Collect number of cliques with cached separator marginals */ + size_t numCachedSeparatorMarginals() const; + + /** return a shared_ptr to the parent clique */ + derived_ptr parent() const { return parent_.lock(); } + + /// @} + /// @name Advanced Interface + /// @{ + + /** return the conditional P(S|Root) on the separator given the root */ + BayesNetType shortcut(const derived_ptr& root, Eliminate function = EliminationTraitsType::DefaultEliminate) const; + + /** return the marginal P(S) on the separator */ + FactorGraphType separatorMarginal(Eliminate function = EliminationTraitsType::DefaultEliminate) const; + + /** return the marginal P(C) of the clique, using marginal caching */ + FactorGraphType marginal2(Eliminate function = EliminationTraitsType::DefaultEliminate) const; + + /** + * This deletes the cached shortcuts of all cliques (subtree) below this clique. + * This is performed when the bayes tree is modified. + */ + void deleteCachedShortcuts(); + + const boost::optional& cachedSeparatorMarginal() const { + return cachedSeparatorMarginal_; } + + friend class BayesTree; + + protected: + + /// Calculate set \f$ S \setminus B \f$ for shortcut calculations + std::vector separator_setminus_B(const derived_ptr& B) const; + + /** Determine variable indices to keep in recursive separator shortcut calculation The factor + * graph p_Cp_B has keys from the parent clique Cp and from B. But we only keep the variables + * not in S union B. */ + std::vector shortcut_indices(const derived_ptr& B, const FactorGraphType& p_Cp_B) const; + + /** Non-recursive delete cached shortcuts and marginals - internal only. */ + void deleteCachedShortcutsNonRecursive() { cachedSeparatorMarginal_ = boost::none; } + + private: + + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(conditional_); + ar & BOOST_SERIALIZATION_NVP(parent_); + ar & BOOST_SERIALIZATION_NVP(children); + } + + /// @} + + }; + +} diff --git a/gtsam/inference/ClusterTree-inl.h b/gtsam/inference/ClusterTree-inl.h deleted file mode 100644 index 5be1383c4..000000000 --- a/gtsam/inference/ClusterTree-inl.h +++ /dev/null @@ -1,112 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file ClusterTree-inl.h - * @date July 13, 2010 - * @author Kai Ni - * @author Frank Dellaert - * @brief: Collects factorgraph fragments defined on variable clusters, arranged in a tree - */ - -#pragma once - -#include -#include - -#include - -namespace gtsam { - - /* ************************************************************************* * - * Cluster - * ************************************************************************* */ - template - template - ClusterTree::Cluster::Cluster(const FG& fg, Index key, Iterator firstSeparator, Iterator lastSeparator) : - FG(fg), frontal(1, key), separator(firstSeparator, lastSeparator) {} - - /* ************************************************************************* */ - template - template - ClusterTree::Cluster::Cluster( - const FG& fg, FRONTALIT firstFrontal, FRONTALIT lastFrontal, SEPARATORIT firstSeparator, SEPARATORIT lastSeparator) : - FG(fg), frontal(firstFrontal, lastFrontal), separator(firstSeparator, lastSeparator) {} - - /* ************************************************************************* */ - template - template - ClusterTree::Cluster::Cluster( - FRONTALIT firstFrontal, FRONTALIT lastFrontal, SEPARATORIT firstSeparator, SEPARATORIT lastSeparator) : - frontal(firstFrontal, lastFrontal), separator(firstSeparator, lastSeparator) {} - - /* ************************************************************************* */ - template - void ClusterTree::Cluster::addChild(typename ClusterTree::Cluster::shared_ptr child) { - children_.push_back(child); - } - - /* ************************************************************************* */ - template - bool ClusterTree::Cluster::equals(const Cluster& other) const { - if (frontal != other.frontal) return false; - if (separator != other.separator) return false; - if (children_.size() != other.children_.size()) return false; - - typename std::list::const_iterator it1 = children_.begin(); - typename std::list::const_iterator it2 = other.children_.begin(); - for (; it1 != children_.end(); it1++, it2++) - if (!(*it1)->equals(**it2)) return false; - - return true; - } - - /* ************************************************************************* */ - template - void ClusterTree::Cluster::print(const std::string& indent, - const IndexFormatter& formatter) const { - std::cout << indent; - BOOST_FOREACH(const Index key, frontal) - std::cout << formatter(key) << " "; - std::cout << ": "; - BOOST_FOREACH(const Index key, separator) - std::cout << key << " "; - std::cout << std::endl; - } - - /* ************************************************************************* */ - template - void ClusterTree::Cluster::printTree(const std::string& indent, - const IndexFormatter& formatter) const { - print(indent, formatter); - BOOST_FOREACH(const shared_ptr& child, children_) - child->printTree(indent + " ", formatter); - } - - /* ************************************************************************* * - * ClusterTree - * ************************************************************************* */ - template - void ClusterTree::print(const std::string& str, - const IndexFormatter& formatter) const { - std::cout << str << std::endl; - if (root_) root_->printTree("", formatter); - } - - /* ************************************************************************* */ - template - bool ClusterTree::equals(const ClusterTree& other, double tol) const { - if (!root_ && !other.root_) return true; - if (!root_ || !other.root_) return false; - return root_->equals(*other.root_); - } - -} //namespace gtsam diff --git a/gtsam/inference/ClusterTree.h b/gtsam/inference/ClusterTree.h deleted file mode 100644 index b88929878..000000000 --- a/gtsam/inference/ClusterTree.h +++ /dev/null @@ -1,141 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file ClusterTree.h - * @date July 13, 2010 - * @author Kai Ni - * @author Frank Dellaert - * @brief: Collects factorgraph fragments defined on variable clusters, arranged in a tree - */ - -#pragma once - -#include -#include -#include - -#include -#include -#include - -#include - -namespace gtsam { - - /** - * A cluster-tree is associated with a factor graph and is defined as in Koller-Friedman: - * each node k represents a subset \f$ C_k \sub X \f$, and the tree is family preserving, in that - * each factor \f$ f_i \f$ is associated with a single cluster and \f$ scope(f_i) \sub C_k \f$. - * \nosubgrouping - */ - template - class ClusterTree { - public: - // Access to factor types - typedef typename FG::KeyType KeyType; - - protected: - - // the class for subgraphs that also include the pointers to the parents and two children - class Cluster : public FG { - public: - typedef typename boost::shared_ptr shared_ptr; - typedef typename boost::weak_ptr weak_ptr; - - const std::vector frontal; // the frontal variables - const std::vector separator; // the separator variables - - protected: - - weak_ptr parent_; // the parent cluster - std::list children_; // the child clusters - const typename FG::sharedFactor eliminated_; // the eliminated factor to pass on to the parent - - public: - - /// Construct empty clique - Cluster() {} - - /** Create a node with a single frontal variable */ - template - Cluster(const FG& fg, Index key, Iterator firstSeparator, Iterator lastSeparator); - - /** Create a node with several frontal variables */ - template - Cluster(const FG& fg, FRONTALIT firstFrontal, FRONTALIT lastFrontal, SEPARATORIT firstSeparator, SEPARATORIT lastSeparator); - - /** Create a node with several frontal variables */ - template - Cluster(FRONTALIT firstFrontal, FRONTALIT lastFrontal, SEPARATORIT firstSeparator, SEPARATORIT lastSeparator); - - /// print - void print(const std::string& indent, const IndexFormatter& formatter = DefaultIndexFormatter) const; - - /// print the enire tree - void printTree(const std::string& indent, const IndexFormatter& formatter = DefaultIndexFormatter) const; - - /// check equality - bool equals(const Cluster& other) const; - - /// get a reference to the children - const std::list& children() const { return children_; } - - /// add a child - void addChild(shared_ptr child); - - /// get or set the parent - weak_ptr& parent() { return parent_; } - - }; - - /// @name Advanced Interface - /// @{ - - /// typedef for shared pointers to clusters - typedef typename Cluster::shared_ptr sharedCluster; - - /// Root cluster - sharedCluster root_; - - public: - - /// @} - /// @name Standard Constructors - /// @{ - - /// constructor of empty tree - ClusterTree() {} - - /// @} - /// @name Standard Interface - /// @{ - - /// return the root cluster - sharedCluster root() const { return root_; } - - /// @} - /// @name Testable - /// @{ - - /// print the object - void print(const std::string& str="", const IndexFormatter& formatter = DefaultIndexFormatter) const; - - /** check equality */ - bool equals(const ClusterTree& other, double tol = 1e-9) const; - - /// @} - - }; // ClusterTree - -} // namespace gtsam - -#include diff --git a/gtsam/inference/Conditional-inst.h b/gtsam/inference/Conditional-inst.h new file mode 100644 index 000000000..a22f290d9 --- /dev/null +++ b/gtsam/inference/Conditional-inst.h @@ -0,0 +1,48 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file Conditional.h + * @brief Base class for conditional densities + * @author Frank Dellaert + */ + +// \callgraph +#pragma once + +#include +#include + +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + void Conditional::print(const std::string& s, const KeyFormatter& formatter) const { + std::cout << s << " P("; + BOOST_FOREACH(Key key, frontals()) + std::cout << " " << formatter(key); + if (nrParents() > 0) + std::cout << " |"; + BOOST_FOREACH(Key parent, parents()) + std::cout << " " << formatter(parent); + std::cout << ")" << std::endl; + } + + /* ************************************************************************* */ + template + bool Conditional::equals(const This& c, double tol) const + { + return nrFrontals_ == c.nrFrontals_; + } + +} diff --git a/gtsam/inference/Conditional.h b/gtsam/inference/Conditional.h index 0e3e98361..067cdd802 100644 --- a/gtsam/inference/Conditional.h +++ b/gtsam/inference/Conditional.h @@ -1,266 +1,147 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file Conditional.h - * @brief Base class for conditional densities - * @author Frank Dellaert - */ - -// \callgraph -#pragma once - -#include - -#include - -#include -#include - -namespace gtsam { - - /** - * Base class for conditional densities, templated on KEY type. This class - * provides storage for the keys involved in a conditional, and iterators and - * access to the frontal and separator keys. - * - * Derived classes *must* redefine the Factor and shared_ptr typedefs to refer - * to the associated factor type and shared_ptr type of the derived class. See - * IndexConditional and GaussianConditional for examples. - * \nosubgrouping - */ - template - class Conditional: public gtsam::Factor { - - private: - - /** Create keys by adding key in front */ - template - static std::vector MakeKeys(KEY key, ITERATOR firstParent, - ITERATOR lastParent) { - std::vector keys((lastParent - firstParent) + 1); - std::copy(firstParent, lastParent, keys.begin() + 1); - keys[0] = key; - return keys; - } - - protected: - - /** The first nFrontal variables are frontal and the rest are parents. */ - size_t nrFrontals_; - - // Calls the base class assertInvariants, which checks for unique keys - void assertInvariants() const { - Factor::assertInvariants(); - } - - public: - - typedef KEY KeyType; - typedef Conditional This; - typedef Factor Base; - - /** - * Typedef to the factor type that produces this conditional and that this - * conditional can be converted to using a factor constructor. Derived - * classes must redefine this. - */ - typedef gtsam::Factor FactorType; - - /** A shared_ptr to this class. Derived classes must redefine this. */ - typedef boost::shared_ptr shared_ptr; - - /** Iterator over keys */ - typedef typename FactorType::iterator iterator; - - /** Const iterator over keys */ - typedef typename FactorType::const_iterator const_iterator; - - /** View of the frontal keys (call frontals()) */ - typedef boost::iterator_range Frontals; - - /** View of the separator keys (call parents()) */ - typedef boost::iterator_range Parents; - - /// @name Standard Constructors - /// @{ - - /** Empty Constructor to make serialization possible */ - Conditional() : - nrFrontals_(0) { - assertInvariants(); - } - - /** No parents */ - Conditional(KeyType key) : - FactorType(key), nrFrontals_(1) { - assertInvariants(); - } - - /** Single parent */ - Conditional(KeyType key, KeyType parent) : - FactorType(key, parent), nrFrontals_(1) { - assertInvariants(); - } - - /** Two parents */ - Conditional(KeyType key, KeyType parent1, KeyType parent2) : - FactorType(key, parent1, parent2), nrFrontals_(1) { - assertInvariants(); - } - - /** Three parents */ - Conditional(KeyType key, KeyType parent1, KeyType parent2, KeyType parent3) : - FactorType(key, parent1, parent2, parent3), nrFrontals_(1) { - assertInvariants(); - } - - /// @} - /// @name Advanced Constructors - /// @{ - - /** Constructor from a frontal variable and a vector of parents */ - Conditional(KeyType key, const std::vector& parents) : - FactorType(MakeKeys(key, parents.begin(), parents.end())), nrFrontals_( - 1) { - assertInvariants(); - } - - /** Constructor from keys and nr of frontal variables */ - Conditional(const std::vector& keys, size_t nrFrontals) : - FactorType(keys), nrFrontals_(nrFrontals) { - assertInvariants(); - } - - /** Constructor from keys and nr of frontal variables */ - Conditional(const std::list& keys, size_t nrFrontals) : - FactorType(keys.begin(),keys.end()), nrFrontals_(nrFrontals) { - assertInvariants(); - } - - /// @} - /// @name Testable - /// @{ - - /** print with optional formatter */ - void print(const std::string& s = "Conditional", - const IndexFormatter& formatter = DefaultIndexFormatter) const; - - /** check equality */ - template - bool equals(const DERIVED& c, double tol = 1e-9) const { - return nrFrontals_ == c.nrFrontals_ && FactorType::equals(c, tol); - } - - /// @} - /// @name Standard Interface - /// @{ - - /** return the number of frontals */ - size_t nrFrontals() const { - return nrFrontals_; - } - - /** return the number of parents */ - size_t nrParents() const { - return FactorType::size() - nrFrontals_; - } - - /** Special accessor when there is only one frontal variable. */ - KeyType firstFrontalKey() const { - assert(nrFrontals_>0); - return FactorType::front(); - } - KeyType lastFrontalKey() const { - assert(nrFrontals_>0); - return *(endFrontals() - 1); - } - - /** return a view of the frontal keys */ - Frontals frontals() const { - return boost::make_iterator_range(beginFrontals(), endFrontals()); - } - - /** return a view of the parent keys */ - Parents parents() const { - return boost::make_iterator_range(beginParents(), endParents()); - } - - /** Iterators over frontal and parent variables. */ - const_iterator beginFrontals() const { - return FactorType::begin(); - } /// frontals() { - return boost::make_iterator_range(beginFrontals(), endFrontals()); - } - - ///TODO: comment - boost::iterator_range parents() { - return boost::make_iterator_range(beginParents(), endParents()); - } - - private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); - ar & BOOST_SERIALIZATION_NVP(nrFrontals_); - } - - /// @} - - }; - - /* ************************************************************************* */ - template - void Conditional::print(const std::string& s, - const IndexFormatter& formatter) const { - std::cout << s << " P("; - BOOST_FOREACH(KeyType key, frontals()) - std::cout << " " << formatter(key); - if (nrParents() > 0) - std::cout << " |"; - BOOST_FOREACH(KeyType parent, parents()) - std::cout << " " << formatter(parent); - std::cout << ")" << std::endl; - } - -} // gtsam +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file Conditional.h + * @brief Base class for conditional densities + * @author Frank Dellaert + */ + +// \callgraph +#pragma once + +#include + +#include + +namespace gtsam { + + /** + * Base class for conditional densities, templated on KEY type. This class + * provides storage for the keys involved in a conditional, and iterators and + * access to the frontal and separator keys. + * + * Derived classes *must* redefine the Factor and shared_ptr typedefs to refer + * to the associated factor type and shared_ptr type of the derived class. See + * IndexConditional and GaussianConditional for examples. + * \nosubgrouping + */ + template + class Conditional + { + protected: + /** The first nrFrontal variables are frontal and the rest are parents. */ + size_t nrFrontals_; + + private: + /// Typedef to this class + typedef Conditional This; + + public: + /** View of the frontal keys (call frontals()) */ + typedef boost::iterator_range Frontals; + + /** View of the separator keys (call parents()) */ + typedef boost::iterator_range Parents; + + protected: + /// @name Standard Constructors + /// @{ + + /** Empty Constructor to make serialization possible */ + Conditional() : nrFrontals_(0) {} + + /** Constructor */ + Conditional(size_t nrFrontals) : nrFrontals_(nrFrontals) {} + + /// @} + /// @name Testable + /// @{ + + /** print with optional formatter */ + void print(const std::string& s = "Conditional", const KeyFormatter& formatter = DefaultKeyFormatter) const; + + /** check equality */ + bool equals(const This& c, double tol = 1e-9) const; + + /// @} + + public: + /// @name Standard Interface + /// @{ + + /** return the number of frontals */ + size_t nrFrontals() const { return nrFrontals_; } + + /** return the number of parents */ + size_t nrParents() const { return asFactor().size() - nrFrontals_; } + + /** Convenience function to get the first frontal key */ + Key firstFrontalKey() const { + if(nrFrontals_ > 0) + return asFactor().front(); + else + throw std::invalid_argument("Requested Conditional::firstFrontalKey from a conditional with zero frontal keys"); + } + + /** return a view of the frontal keys */ + Frontals frontals() const { return boost::make_iterator_range(beginFrontals(), endFrontals()); } + + /** return a view of the parent keys */ + Parents parents() const { return boost::make_iterator_range(beginParents(), endParents()); } + + /** Iterator pointing to first frontal key. */ + typename FACTOR::const_iterator beginFrontals() const { return asFactor().begin(); } + + /** Iterator pointing past the last frontal key. */ + typename FACTOR::const_iterator endFrontals() const { return asFactor().begin() + nrFrontals_; } + + /** Iterator pointing to the first parent key. */ + typename FACTOR::const_iterator beginParents() const { return endFrontals(); } + + /** Iterator pointing past the last parent key. */ + typename FACTOR::const_iterator endParents() const { return asFactor().end(); } + + /// @} + /// @name Advanced Interface + /// @{ + + /** Mutable iterator pointing to first frontal key. */ + typename FACTOR::iterator beginFrontals() { return asFactor().begin(); } + + /** Mutable iterator pointing past the last frontal key. */ + typename FACTOR::iterator endFrontals() { return asFactor().begin() + nrFrontals_; } + + /** Mutable iterator pointing to the first parent key. */ + typename FACTOR::iterator beginParents() { return asFactor().begin() + nrFrontals_; } + + /** Mutable iterator pointing past the last parent key. */ + typename FACTOR::iterator endParents() { return asFactor().end(); } + + private: + // Cast to factor type (non-const) (casts down to derived conditional type, then up to factor type) + FACTOR& asFactor() { return static_cast(static_cast(*this)); } + + // Cast to derived type (const) (casts down to derived conditional type, then up to factor type) + const FACTOR& asFactor() const { return static_cast(static_cast(*this)); } + + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(nrFrontals_); + } + + /// @} + + }; + +} // gtsam diff --git a/gtsam/inference/EliminateableFactorGraph-inst.h b/gtsam/inference/EliminateableFactorGraph-inst.h new file mode 100644 index 000000000..8ec7bea45 --- /dev/null +++ b/gtsam/inference/EliminateableFactorGraph-inst.h @@ -0,0 +1,298 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file EliminateableFactorGraph.h + * @brief Variable elimination algorithms for factor graphs + * @author Richard Roberts + * @date Apr 21, 2013 + */ + +#pragma once + +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + boost::shared_ptr::BayesNetType> + EliminateableFactorGraph::eliminateSequential( + OptionalOrdering ordering, const Eliminate& function, OptionalVariableIndex variableIndex) const + { + if(ordering && variableIndex) { + gttic(eliminateSequential); + // Do elimination + std::pair, boost::shared_ptr > result + = EliminationTreeType(asDerived(), *variableIndex, *ordering).eliminate(function); + // If any factors are remaining, the ordering was incomplete + if(!result.second->empty()) + throw InconsistentEliminationRequested(); + // Return the Bayes net + return result.first; + } + else if(!variableIndex) { + // If no VariableIndex provided, compute one and call this function again IMPORTANT: we check + // for no variable index first so that it's always computed if we need to call COLAMD because + // no Ordering is provided. + return eliminateSequential(ordering, function, VariableIndex(asDerived())); + } + else /*if(!ordering)*/ { + // If no Ordering provided, compute one and call this function again. We are guaranteed to + // have a VariableIndex already here because we computed one if needed in the previous 'else' + // block. + return eliminateSequential(Ordering::COLAMD(*variableIndex), function); + } + } + + /* ************************************************************************* */ + template + boost::shared_ptr::BayesTreeType> + EliminateableFactorGraph::eliminateMultifrontal( + OptionalOrdering ordering, const Eliminate& function, OptionalVariableIndex variableIndex) const + { + if(ordering && variableIndex) { + gttic(eliminateMultifrontal); + // Do elimination with given ordering + std::pair, boost::shared_ptr > result + = JunctionTreeType(EliminationTreeType(asDerived(), *variableIndex, *ordering)).eliminate(function); + // If any factors are remaining, the ordering was incomplete + if(!result.second->empty()) + throw InconsistentEliminationRequested(); + // Return the Bayes tree + return result.first; + } + else if(!variableIndex) { + // If no VariableIndex provided, compute one and call this function again IMPORTANT: we check + // for no variable index first so that it's always computed if we need to call COLAMD because + // no Ordering is provided. + return eliminateMultifrontal(ordering, function, VariableIndex(asDerived())); + } + else /*if(!ordering)*/ { + // If no Ordering provided, compute one and call this function again. We are guaranteed to + // have a VariableIndex already here because we computed one if needed in the previous 'else' + // block. + return eliminateMultifrontal(Ordering::COLAMD(*variableIndex), function); + } + } + + /* ************************************************************************* */ + template + std::pair::BayesNetType>, boost::shared_ptr > + EliminateableFactorGraph::eliminatePartialSequential( + const Ordering& ordering, const Eliminate& function, OptionalVariableIndex variableIndex) const + { + if(variableIndex) { + gttic(eliminatePartialSequential); + // Do elimination + return EliminationTreeType(asDerived(), *variableIndex, ordering).eliminate(function); + } else { + // If no variable index is provided, compute one and call this function again + return eliminatePartialSequential(ordering, function, VariableIndex(asDerived())); + } + } + + /* ************************************************************************* */ + template + std::pair::BayesNetType>, boost::shared_ptr > + EliminateableFactorGraph::eliminatePartialSequential( + const std::vector& variables, const Eliminate& function, OptionalVariableIndex variableIndex) const + { + if(variableIndex) { + gttic(eliminatePartialSequential); + // Compute full ordering + Ordering fullOrdering = Ordering::COLAMDConstrainedFirst(*variableIndex, variables); + + // Split off the part of the ordering for the variables being eliminated + Ordering ordering(fullOrdering.begin(), fullOrdering.begin() + variables.size()); + return eliminatePartialSequential(ordering, function, variableIndex); + } else { + // If no variable index is provided, compute one and call this function again + return eliminatePartialSequential(variables, function, VariableIndex(asDerived())); + } + } + + /* ************************************************************************* */ + template + std::pair::BayesTreeType>, boost::shared_ptr > + EliminateableFactorGraph::eliminatePartialMultifrontal( + const Ordering& ordering, const Eliminate& function, OptionalVariableIndex variableIndex) const + { + if(variableIndex) { + gttic(eliminatePartialMultifrontal); + // Do elimination + return JunctionTreeType(EliminationTreeType(asDerived(), *variableIndex, ordering)).eliminate(function); + } else { + // If no variable index is provided, compute one and call this function again + return eliminatePartialMultifrontal(ordering, function, VariableIndex(asDerived())); + } + } + + /* ************************************************************************* */ + template + std::pair::BayesTreeType>, boost::shared_ptr > + EliminateableFactorGraph::eliminatePartialMultifrontal( + const std::vector& variables, const Eliminate& function, OptionalVariableIndex variableIndex) const + { + if(variableIndex) { + gttic(eliminatePartialMultifrontal); + // Compute full ordering + Ordering fullOrdering = Ordering::COLAMDConstrainedFirst(*variableIndex, variables); + + // Split off the part of the ordering for the variables being eliminated + Ordering ordering(fullOrdering.begin(), fullOrdering.begin() + variables.size()); + return eliminatePartialMultifrontal(ordering, function, variableIndex); + } else { + // If no variable index is provided, compute one and call this function again + return eliminatePartialMultifrontal(variables, function, VariableIndex(asDerived())); + } + } + + /* ************************************************************************* */ + template + boost::shared_ptr::BayesNetType> + EliminateableFactorGraph::marginalMultifrontalBayesNet( + boost::variant&> variables, + OptionalOrdering marginalizedVariableOrdering, + const Eliminate& function, OptionalVariableIndex variableIndex) const + { + if(variableIndex) + { + if(marginalizedVariableOrdering) + { + gttic(marginalMultifrontalBayesNet); + // An ordering was provided for the marginalized variables, so we can first eliminate them + // in the order requested. + std::pair, boost::shared_ptr > eliminated = + eliminatePartialMultifrontal(*marginalizedVariableOrdering, function, *variableIndex); + + if(const Ordering* varsAsOrdering = boost::get(&variables)) + { + // An ordering was also provided for the unmarginalized variables, so we can also + // eliminate them in the order requested. + return eliminated.second->eliminateSequential(*varsAsOrdering, function); + } + else + { + // No ordering was provided for the unmarginalized variables, so order them with COLAMD. + return eliminated.second->eliminateSequential(boost::none, function); + } + } + else + { + // No ordering was provided for the marginalized variables, so order them using constrained + // COLAMD. + bool unmarginalizedAreOrdered = (boost::get(&variables) != 0); + const std::vector* variablesOrOrdering = + unmarginalizedAreOrdered ? + boost::get(&variables) : boost::get&>(&variables); + + Ordering totalOrdering = + Ordering::COLAMDConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered); + + // Split up ordering + const size_t nVars = variablesOrOrdering->size(); + Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars); + Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end()); + + // Call this function again with the computed orderings + return marginalMultifrontalBayesNet(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex); + } + } else { + // If no variable index is provided, compute one and call this function again + return marginalMultifrontalBayesNet(variables, marginalizedVariableOrdering, function, VariableIndex(asDerived())); + } + } + + /* ************************************************************************* */ + template + boost::shared_ptr::BayesTreeType> + EliminateableFactorGraph::marginalMultifrontalBayesTree( + boost::variant&> variables, + OptionalOrdering marginalizedVariableOrdering, + const Eliminate& function, OptionalVariableIndex variableIndex) const + { + if(variableIndex) + { + if(marginalizedVariableOrdering) + { + gttic(marginalMultifrontalBayesTree); + // An ordering was provided for the marginalized variables, so we can first eliminate them + // in the order requested. + std::pair, boost::shared_ptr > eliminated = + eliminatePartialMultifrontal(*marginalizedVariableOrdering, function, *variableIndex); + + if(const Ordering* varsAsOrdering = boost::get(&variables)) + { + // An ordering was also provided for the unmarginalized variables, so we can also + // eliminate them in the order requested. + return eliminated.second->eliminateMultifrontal(*varsAsOrdering, function); + } + else + { + // No ordering was provided for the unmarginalized variables, so order them with COLAMD. + return eliminated.second->eliminateMultifrontal(boost::none, function); + } + } + else + { + // No ordering was provided for the marginalized variables, so order them using constrained + // COLAMD. + bool unmarginalizedAreOrdered = (boost::get(&variables) != 0); + const std::vector* variablesOrOrdering = + unmarginalizedAreOrdered ? + boost::get(&variables) : boost::get&>(&variables); + + Ordering totalOrdering = + Ordering::COLAMDConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered); + + // Split up ordering + const size_t nVars = variablesOrOrdering->size(); + Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - nVars); + Ordering marginalVarsOrdering(totalOrdering.end() - nVars, totalOrdering.end()); + + // Call this function again with the computed orderings + return marginalMultifrontalBayesTree(marginalVarsOrdering, marginalizationOrdering, function, *variableIndex); + } + } else { + // If no variable index is provided, compute one and call this function again + return marginalMultifrontalBayesTree(variables, marginalizedVariableOrdering, function, VariableIndex(asDerived())); + } + } + + /* ************************************************************************* */ + template + boost::shared_ptr + EliminateableFactorGraph::marginal( + const std::vector& variables, + const Eliminate& function, OptionalVariableIndex variableIndex) const + { + if(variableIndex) + { + // Compute a total ordering for all variables + Ordering totalOrdering = Ordering::COLAMDConstrainedLast(*variableIndex, variables); + + // Split out the part for the marginalized variables + Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - variables.size()); + + // Eliminate and return the remaining factor graph + return eliminatePartialMultifrontal(marginalizationOrdering, function, *variableIndex).second; + } + else + { + // If no variable index is provided, compute one and call this function again + return marginal(variables, function, VariableIndex(asDerived())); + } + } + + +} diff --git a/gtsam/inference/EliminateableFactorGraph.h b/gtsam/inference/EliminateableFactorGraph.h new file mode 100644 index 000000000..717f49167 --- /dev/null +++ b/gtsam/inference/EliminateableFactorGraph.h @@ -0,0 +1,236 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file EliminateableFactorGraph.h + * @brief Variable elimination algorithms for factor graphs + * @author Richard Roberts + * @date Apr 21, 2013 + */ + +#pragma once + +#include +#include +#include +#include + +#include +#include + +namespace gtsam { + + /// Traits class for eliminateable factor graphs, specifies the types that result from + /// elimination, etc. This must be defined for each factor graph that inherits from + /// EliminateableFactorGraph. + template + struct EliminationTraits + { + // Template for deriving: + // typedef MyFactor FactorType; ///< Type of factors in factor graph (e.g. GaussianFactor) + // typedef MyFactorGraphType FactorGraphType; ///< Type of the factor graph (e.g. GaussianFactorGraph) + // typedef MyConditional ConditionalType; ///< Type of conditionals from elimination (e.g. GaussianConditional) + // typedef MyBayesNet BayesNetType; ///< Type of Bayes net from sequential elimination (e.g. GaussianBayesNet) + // typedef MyEliminationTree EliminationTreeType; ///< Type of elimination tree (e.g. GaussianEliminationTree) + // typedef MyBayesTree BayesTreeType; ///< Type of Bayes tree (e.g. GaussianBayesTree) + // typedef MyJunctionTree JunctionTreeType; ///< Type of Junction tree (e.g. GaussianJunctionTree) + // static pair, shared_ptr + // DefaultEliminate( + // const MyFactorGraph& factors, const Ordering& keys); ///< The default dense elimination function + }; + + + /** EliminateableFactorGraph is a base class for factor graphs that contains elimination + * algorithms. Any factor graph holding eliminateable factors can derive from this class to + * expose functions for computing marginals, conditional marginals, doing multifrontal and + * sequential elimination, etc. */ + template + class EliminateableFactorGraph + { + private: + typedef EliminateableFactorGraph This; ///< Typedef to this class. + typedef FACTORGRAPH FactorGraphType; ///< Typedef to factor graph type + // Base factor type stored in this graph (private because derived classes will get this from + // their FactorGraph base class) + typedef typename EliminationTraits::FactorType _FactorType; + + public: + /// Typedef to the specific EliminationTraits for this graph + typedef EliminationTraits EliminationTraitsType; + + /// Conditional type stored in the Bayes net produced by elimination + typedef typename EliminationTraitsType::ConditionalType ConditionalType; + + /// Bayes net type produced by sequential elimination + typedef typename EliminationTraitsType::BayesNetType BayesNetType; + + /// Elimination tree type that can do sequential elimination of this graph + typedef typename EliminationTraitsType::EliminationTreeType EliminationTreeType; + + /// Bayes tree type produced by multifrontal elimination + typedef typename EliminationTraitsType::BayesTreeType BayesTreeType; + + /// Junction tree type that can do multifrontal elimination of this graph + typedef typename EliminationTraitsType::JunctionTreeType JunctionTreeType; + + /// The pair of conditional and remaining factor produced by a single dense elimination step on + /// a subgraph. + typedef std::pair, boost::shared_ptr<_FactorType> > EliminationResult; + + /// The function type that does a single dense elimination step on a subgraph. + typedef boost::function Eliminate; + + /// Typedef for an optional ordering as an argument to elimination functions + typedef boost::optional OptionalOrdering; + + /// Typedef for an optional variable index as an argument to elimination functions + typedef boost::optional OptionalVariableIndex; + + /** Do sequential elimination of all variables to produce a Bayes net. If an ordering is not + * provided, the ordering provided by COLAMD will be used. + * + * Example - Full Cholesky elimination in COLAMD order: + * \code + * boost::shared_ptr result = graph.eliminateSequential(EliminateCholesky); + * \endcode + * + * Example - Full QR elimination in specified order: + * \code + * boost::shared_ptr result = graph.eliminateSequential(EliminateQR, myOrdering); + * \endcode + * + * Example - Reusing an existing VariableIndex to improve performance, and using COLAMD ordering: + * \code + * VariableIndex varIndex(graph); // Build variable index + * Data data = otherFunctionUsingVariableIndex(graph, varIndex); // Other code that uses variable index + * boost::shared_ptr result = graph.eliminateSequential(EliminateQR, boost::none, varIndex); + * \endcode + * */ + boost::shared_ptr eliminateSequential( + OptionalOrdering ordering = boost::none, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + /** Do multifrontal elimination of all variables to produce a Bayes tree. If an ordering is not + * provided, the ordering provided by COLAMD will be used. + * + * Example - Full Cholesky elimination in COLAMD order: + * \code + * boost::shared_ptr result = graph.eliminateMultifrontal(EliminateCholesky); + * \endcode + * + * Example - Full QR elimination in specified order: + * \code + * boost::shared_ptr result = graph.eliminateMultifrontal(EliminateQR, myOrdering); + * \endcode + * + * Example - Reusing an existing VariableIndex to improve performance, and using COLAMD ordering: + * \code + * VariableIndex varIndex(graph); // Build variable index + * Data data = otherFunctionUsingVariableIndex(graph, varIndex); // Other code that uses variable index + * boost::shared_ptr result = graph.eliminateMultifrontal(EliminateQR, boost::none, varIndex); + * \endcode + * */ + boost::shared_ptr eliminateMultifrontal( + OptionalOrdering ordering = boost::none, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + /** Do sequential elimination of some variables, in \c ordering provided, to produce a Bayes net + * and a remaining factor graph. This computes the factorization \f$ p(X) = p(A|B) p(B) \f$, + * where \f$ A = \f$ \c variables, \f$ X \f$ is all the variables in the factor graph, and \f$ + * B = X\backslash A \f$. */ + std::pair, boost::shared_ptr > + eliminatePartialSequential( + const Ordering& ordering, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + /** Do sequential elimination of the given \c variables in an ordering computed by COLAMD to + * produce a Bayes net and a remaining factor graph. This computes the factorization \f$ p(X) + * = p(A|B) p(B) \f$, where \f$ A = \f$ \c variables, \f$ X \f$ is all the variables in the + * factor graph, and \f$ B = X\backslash A \f$. */ + std::pair, boost::shared_ptr > + eliminatePartialSequential( + const std::vector& variables, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + /** Do multifrontal elimination of some variables, in \c ordering provided, to produce a Bayes + * tree and a remaining factor graph. This computes the factorization \f$ p(X) = p(A|B) p(B) + * \f$, where \f$ A = \f$ \c variables, \f$ X \f$ is all the variables in the factor graph, and + * \f$ B = X\backslash A \f$. */ + std::pair, boost::shared_ptr > + eliminatePartialMultifrontal( + const Ordering& ordering, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + /** Do multifrontal elimination of the given \c variables in an ordering computed by COLAMD to + * produce a Bayes net and a remaining factor graph. This computes the factorization \f$ p(X) + * = p(A|B) p(B) \f$, where \f$ A = \f$ \c variables, \f$ X \f$ is all the variables in the + * factor graph, and \f$ B = X\backslash A \f$. */ + std::pair, boost::shared_ptr > + eliminatePartialMultifrontal( + const std::vector& variables, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + /** Compute the marginal of the requested variables and return the result as a Bayes net. + * @param variables Determines the variables whose marginal to compute, if provided as an + * Ordering they will be ordered in the returned BayesNet as specified, and if provided + * as a vector they will be ordered using constrained COLAMD. + * @param marginalizedVariableOrdering Optional ordering for the variables being marginalized + * out, i.e. all variables not in \c variables. If this is boost::none, the ordering + * will be computed with COLAMD. + * @param function Optional dense elimination function, if not provided the default will be + * used. + * @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not + * provided one will be computed. */ + boost::shared_ptr marginalMultifrontalBayesNet( + boost::variant&> variables, + OptionalOrdering marginalizedVariableOrdering = boost::none, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + /** Compute the marginal of the requested variables and return the result as a Bayes tree. + * @param variables Determines the variables whose marginal to compute, if provided as an + * Ordering they will be ordered in the returned BayesNet as specified, and if provided + * as a vector they will be ordered using constrained COLAMD. + * @param marginalizedVariableOrdering Optional ordering for the variables being marginalized + * out, i.e. all variables not in \c variables. If this is boost::none, the ordering + * will be computed with COLAMD. + * @param function Optional dense elimination function, if not provided the default will be + * used. + * @param variableIndex Optional pre-computed VariableIndex for the factor graph, if not + * provided one will be computed. */ + boost::shared_ptr marginalMultifrontalBayesTree( + boost::variant&> variables, + OptionalOrdering marginalizedVariableOrdering = boost::none, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + /** Compute the marginal factor graph of the requested variables. */ + boost::shared_ptr marginal( + const std::vector& variables, + const Eliminate& function = EliminationTraitsType::DefaultEliminate, + OptionalVariableIndex variableIndex = boost::none) const; + + private: + + // Access the derived factor graph class + const FactorGraphType& asDerived() const { return static_cast(*this); } + + // Access the derived factor graph class + FactorGraphType& asDerived() { return static_cast(*this); } + }; + +} diff --git a/gtsam/inference/EliminationTree-inl.h b/gtsam/inference/EliminationTree-inl.h deleted file mode 100644 index d85e2e651..000000000 --- a/gtsam/inference/EliminationTree-inl.h +++ /dev/null @@ -1,234 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file EliminationTree-inl.h - * @author Frank Dellaert - * @date Oct 13, 2010 - */ -#pragma once - -#include -#include -#include -#include -#include -#include - -#include -#include - -#include -#include -#include - -namespace gtsam { - -/* ************************************************************************* */ -template -typename EliminationTree::sharedFactor EliminationTree::eliminate_( - Eliminate function, Conditionals& conditionals) const { - - static const bool debug = false; - - if(debug) std::cout << "ETree: eliminating " << this->key_ << std::endl; - - if(this->key_ < conditionals.size()) { // If it is requested to eliminate the current variable - // Create the list of factors to be eliminated, initially empty, and reserve space - FactorGraph factors; - factors.reserve(this->factors_.size() + this->subTrees_.size()); - - // Add all factors associated with the current node - factors.push_back(this->factors_.begin(), this->factors_.end()); - - // for all subtrees, eliminate into Bayes net and a separator factor, added to [factors] - BOOST_FOREACH(const shared_ptr& child, subTrees_) - factors.push_back(child->eliminate_(function, conditionals)); // TODO: spawn thread - // TODO: wait for completion of all threads - - // Combine all factors (from this node and from subtrees) into a joint factor - typename FactorGraph::EliminationResult - eliminated(function(factors, 1)); - conditionals[this->key_] = eliminated.first; - - if(debug) std::cout << "Eliminated " << this->key_ << " to get:\n"; - if(debug) eliminated.first->print("Conditional: "); - if(debug) eliminated.second->print("Factor: "); - - return eliminated.second; - } else { - // Eliminate each child but discard the result. - BOOST_FOREACH(const shared_ptr& child, subTrees_) { - (void)child->eliminate_(function, conditionals); - } - return sharedFactor(); // Return a NULL factor - } -} - -/* ************************************************************************* */ -template -std::vector EliminationTree::ComputeParents(const VariableIndex& structure) { - - // Number of factors and variables - const size_t m = structure.nFactors(); - const size_t n = structure.size(); - - static const Index none = std::numeric_limits::max(); - - // Allocate result parent vector and vector of last factor columns - std::vector parents(n, none); - std::vector prevCol(m, none); - - // for column j \in 1 to n do - for (Index j = 0; j < n; j++) { - // for row i \in Struct[A*j] do - BOOST_FOREACH(const size_t i, structure[j]) { - if (prevCol[i] != none) { - Index k = prevCol[i]; - // find root r of the current tree that contains k - Index r = k; - while (parents[r] != none) - r = parents[r]; - if (r != j) parents[r] = j; - } - prevCol[i] = j; - } - } - - return parents; -} - -/* ************************************************************************* */ -template -template -typename EliminationTree::shared_ptr EliminationTree::Create( - const FactorGraph& factorGraph, - const VariableIndex& structure) { - - static const bool debug = false; - gttic(ET_Create1); - - gttic(ET_ComputeParents); - // Compute the tree structure - std::vector parents(ComputeParents(structure)); - gttoc(ET_ComputeParents); - - // Number of variables - const size_t n = structure.size(); - - static const Index none = std::numeric_limits::max(); - - // Create tree structure - gttic(assemble_tree); - std::vector trees(n); - for (Index k = 1; k <= n; k++) { - Index j = n - k; // Start at the last variable and loop down to 0 - trees[j].reset(new EliminationTree(j)); // Create a new node on this variable - if (parents[j] != none) // If this node has a parent, add it to the parent's children - trees[parents[j]]->add(trees[j]); - else if(!structure[j].empty() && j != n - 1) // If a node other than the last has no parents, this is a forest - throw DisconnectedGraphException(); - } - gttoc(assemble_tree); - - // Hang factors in right places - gttic(hang_factors); - BOOST_FOREACH(const typename boost::shared_ptr& factor, factorGraph) { - if(factor && factor->size() > 0) { - Index j = *std::min_element(factor->begin(), factor->end()); - if(j < structure.size()) - trees[j]->add(factor); - } - } - gttoc(hang_factors); - - if(debug) - trees.back()->print("ETree: "); - - // Check that this is not null - assert(trees.back().get()); - return trees.back(); -} - -/* ************************************************************************* */ -template -template -typename EliminationTree::shared_ptr -EliminationTree::Create(const FactorGraph& factorGraph) { - - gttic(ET_Create2); - // Build variable index - const VariableIndex variableIndex(factorGraph); - - // Build elimination tree - return Create(factorGraph, variableIndex); -} - -/* ************************************************************************* */ -template -void EliminationTree::print(const std::string& name, - const IndexFormatter& formatter) const { - std::cout << name << " (" << formatter(key_) << ")" << std::endl; - BOOST_FOREACH(const sharedFactor& factor, factors_) { - factor->print(name + " ", formatter); } - BOOST_FOREACH(const shared_ptr& child, subTrees_) { - child->print(name + " ", formatter); } -} - -/* ************************************************************************* */ -template -bool EliminationTree::equals(const EliminationTree& expected, double tol) const { - if(this->key_ == expected.key_ && this->factors_ == expected.factors_ - && this->subTrees_.size() == expected.subTrees_.size()) { - typename SubTrees::const_iterator this_subtree = this->subTrees_.begin(); - typename SubTrees::const_iterator expected_subtree = expected.subTrees_.begin(); - while(this_subtree != this->subTrees_.end()) - if( ! (*(this_subtree++))->equals(**(expected_subtree++), tol)) - return false; - return true; - } else - return false; -} - -/* ************************************************************************* */ -template -typename EliminationTree::BayesNet::shared_ptr - EliminationTree::eliminatePartial(typename EliminationTree::Eliminate function, size_t nrToEliminate) const { - - // call recursive routine - gttic(ET_recursive_eliminate); - if(nrToEliminate > this->key_ + 1) - throw std::invalid_argument("Requested that EliminationTree::eliminatePartial eliminate more variables than exist"); - Conditionals conditionals(nrToEliminate); // reserve a vector of conditional shared pointers - (void)eliminate_(function, conditionals); // modify in place - gttoc(ET_recursive_eliminate); - - // Add conditionals to BayesNet - gttic(assemble_BayesNet); - typename BayesNet::shared_ptr bayesNet(new BayesNet); - BOOST_FOREACH(const typename BayesNet::sharedConditional& conditional, conditionals) { - if(conditional) - bayesNet->push_back(conditional); - } - gttoc(assemble_BayesNet); - - return bayesNet; -} - -/* ************************************************************************* */ -template -typename EliminationTree::BayesNet::shared_ptr -EliminationTree::eliminate(Eliminate function) const { - size_t nrConditionals = this->key_ + 1; // root key has highest index - return eliminatePartial(function, nrConditionals); -} - -} diff --git a/gtsam/inference/EliminationTree-inst.h b/gtsam/inference/EliminationTree-inst.h new file mode 100644 index 000000000..617fb6e0e --- /dev/null +++ b/gtsam/inference/EliminationTree-inst.h @@ -0,0 +1,290 @@ +/* ---------------------------------------------------------------------------- + +* GTSAM Copyright 2010, Georgia Tech Research Corporation, +* Atlanta, Georgia 30332-0415 +* All Rights Reserved +* Authors: Frank Dellaert, et al. (see THANKS for the full author list) + +* See LICENSE for the license information + +* -------------------------------------------------------------------------- */ + +/** +* @file EliminationTree-inl.h +* @author Frank Dellaert +* @author Richard Roberts +* @date Oct 13, 2010 +*/ +#pragma once + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + typename EliminationTree::sharedFactor + EliminationTree::Node::eliminate( + const boost::shared_ptr& output, + const Eliminate& function, const std::vector& childrenResults) const + { + // This function eliminates one node (Node::eliminate) - see below eliminate for the whole tree. + + assert(childrenResults.size() == children.size()); + + // Gather factors + FactorGraphType gatheredFactors; + gatheredFactors.reserve(factors.size() + children.size()); + gatheredFactors.push_back(factors.begin(), factors.end()); + gatheredFactors.push_back(childrenResults.begin(), childrenResults.end()); + + // Do dense elimination step + std::vector keyAsVector(1); keyAsVector[0] = key; + std::pair, boost::shared_ptr > eliminationResult = + function(gatheredFactors, Ordering(keyAsVector)); + + // Add conditional to BayesNet + output->push_back(eliminationResult.first); + + // Return result + return eliminationResult.second; + } + + /* ************************************************************************* */ + template + void EliminationTree::Node::print( + const std::string& str, const KeyFormatter& keyFormatter) const + { + std::cout << str << "(" << keyFormatter(key) << ")\n"; + BOOST_FOREACH(const sharedFactor& factor, factors) { + if(factor) + factor->print(str); + else + std::cout << str << "null factor\n"; + } + } + + + /* ************************************************************************* */ + template + EliminationTree::EliminationTree(const FactorGraphType& graph, + const VariableIndex& structure, const Ordering& order) + { + gttic(EliminationTree_Contructor); + + // Number of factors and variables - NOTE in the case of partial elimination, n here may + // be fewer variables than are actually present in the graph. + const size_t m = graph.size(); + const size_t n = order.size(); + + static const size_t none = std::numeric_limits::max(); + + // Allocate result parent vector and vector of last factor columns + std::vector nodes(n); + std::vector parents(n, none); + std::vector prevCol(m, none); + std::vector factorUsed(m, false); + + try { + // for column j \in 1 to n do + for (size_t j = 0; j < n; j++) + { + // Retrieve the factors involving this variable and create the current node + const VariableIndex::Factors& factors = structure[order[j]]; + nodes[j] = boost::make_shared(); + nodes[j]->key = order[j]; + + // for row i \in Struct[A*j] do + BOOST_FOREACH(const size_t i, factors) { + // If we already hit a variable in this factor, make the subtree containing the previous + // variable in this factor a child of the current node. This means that the variables + // eliminated earlier in the factor depend on the later variables in the factor. If we + // haven't yet hit a variable in this factor, we add the factor to the current node. + // TODO: Store root shortcuts instead of parents. + if (prevCol[i] != none) { + size_t k = prevCol[i]; + // Find root r of the current tree that contains k. Use raw pointers in computing the + // parents to avoid changing the reference counts while traversing up the tree. + size_t r = k; + while (parents[r] != none) + r = parents[r]; + // If the root of the subtree involving this node is actually the current node, + // TODO: what does this mean? forest? + if (r != j) { + // Now that we found the root, hook up parent and child pointers in the nodes. + parents[r] = j; + nodes[j]->children.push_back(nodes[r]); + } + } else { + // Add the current factor to the current node since we are at the first variable in this + // factor. + nodes[j]->factors.push_back(graph[i]); + factorUsed[i] = true; + } + prevCol[i] = j; + } + } + } catch(std::invalid_argument& e) { + // If this is thrown from structure[order[j]] above, it means that it was requested to + // eliminate a variable not present in the graph, so throw a more informative error message. + (void)e; // Prevent unused variable warning + throw std::invalid_argument("EliminationTree: given ordering contains variables that are not involved in the factor graph"); + } catch(...) { + throw; + } + + // Find roots + assert(parents.empty() || parents.back() == none); // We expect the last-eliminated node to be a root no matter what + for(size_t j = 0; j < n; ++j) + if(parents[j] == none) + roots_.push_back(nodes[j]); + + // Gather remaining factors (exclude null factors) + for(size_t i = 0; i < m; ++i) + if(!factorUsed[i] && graph[i]) + remainingFactors_.push_back(graph[i]); + } + + /* ************************************************************************* */ + template + EliminationTree::EliminationTree( + const FactorGraphType& factorGraph, const Ordering& order) + { + gttic(ET_Create2); + // Build variable index first + const VariableIndex variableIndex(factorGraph); + This temp(factorGraph, variableIndex, order); + this->swap(temp); // Swap in the tree, and temp will be deleted + } + + /* ************************************************************************* */ + template + EliminationTree& + EliminationTree::operator=(const EliminationTree& other) + { + // Start by duplicating the tree. + roots_ = treeTraversal::CloneForest(other); + + // Assign the remaining factors - these are pointers to factors in the original factor graph and + // we do not clone them. + remainingFactors_ = other.remainingFactors_; + + return *this; + } + + /* ************************************************************************* */ + template + std::pair, boost::shared_ptr > + EliminationTree::eliminate(Eliminate function) const + { + gttic(EliminationTree_eliminate); + // Allocate result + boost::shared_ptr result = boost::make_shared(); + + // Run tree elimination algorithm + std::vector remainingFactors = inference::EliminateTree(result, *this, function); + + // Add remaining factors that were not involved with eliminated variables + boost::shared_ptr allRemainingFactors = boost::make_shared(); + allRemainingFactors->push_back(remainingFactors_.begin(), remainingFactors_.end()); + allRemainingFactors->push_back(remainingFactors.begin(), remainingFactors.end()); + + // Return result + return std::make_pair(result, allRemainingFactors); + } + + /* ************************************************************************* */ + template + void EliminationTree::print(const std::string& name, const KeyFormatter& formatter) const + { + treeTraversal::PrintForest(*this, name, formatter); + } + + /* ************************************************************************* */ + template + bool EliminationTree::equals(const This& expected, double tol) const + { + // Depth-first-traversal stacks + std::stack > stack1, stack2; + + // Add roots in sorted order + { + FastMap keys; + BOOST_FOREACH(const sharedNode& root, this->roots_) { keys.insert(std::make_pair(root->key, root)); } + typedef typename FastMap::value_type Key_Node; + BOOST_FOREACH(const Key_Node& key_node, keys) { stack1.push(key_node.second); } + } + { + FastMap keys; + BOOST_FOREACH(const sharedNode& root, expected.roots_) { keys.insert(std::make_pair(root->key, root)); } + typedef typename FastMap::value_type Key_Node; + BOOST_FOREACH(const Key_Node& key_node, keys) { stack2.push(key_node.second); } + } + + // Traverse, adding children in sorted order + while(!stack1.empty() && !stack2.empty()) { + // Pop nodes + sharedNode node1 = stack1.top(); + stack1.pop(); + sharedNode node2 = stack2.top(); + stack2.pop(); + + // Compare nodes + if(node1->key != node2->key) + return false; + if(node1->factors.size() != node2->factors.size()) { + return false; + } else { + for(typename Node::Factors::const_iterator it1 = node1->factors.begin(), it2 = node2->factors.begin(); + it1 != node1->factors.end(); ++it1, ++it2) // Only check it1 == end because we already returned false for different counts + { + if(*it1 && *it2) { + if(!(*it1)->equals(**it2, tol)) + return false; + } else if((*it1 && !*it2) || (*it2 && !*it1)) { + return false; + } + } + } + + // Add children in sorted order + { + FastMap keys; + BOOST_FOREACH(const sharedNode& node, node1->children) { keys.insert(std::make_pair(node->key, node)); } + typedef typename FastMap::value_type Key_Node; + BOOST_FOREACH(const Key_Node& key_node, keys) { stack1.push(key_node.second); } + } + { + FastMap keys; + BOOST_FOREACH(const sharedNode& node, node2->children) { keys.insert(std::make_pair(node->key, node)); } + typedef typename FastMap::value_type Key_Node; + BOOST_FOREACH(const Key_Node& key_node, keys) { stack2.push(key_node.second); } + } + } + + // If either stack is not empty, the number of nodes differed + if(!stack1.empty() || !stack2.empty()) + return false; + + return true; + } + + /* ************************************************************************* */ + template + void EliminationTree::swap(This& other) { + roots_.swap(other.roots_); + remainingFactors_.swap(other.remainingFactors_); + } + + +} diff --git a/gtsam/inference/EliminationTree.h b/gtsam/inference/EliminationTree.h index 9be5f5b6a..fd19e0a65 100644 --- a/gtsam/inference/EliminationTree.h +++ b/gtsam/inference/EliminationTree.h @@ -1,187 +1,167 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file EliminationTree.h - * @author Frank Dellaert - * @date Oct 13, 2010 - */ -#pragma once - -#include - -#include -#include -#include -#include - -class EliminationTreeTester; // for unit tests, see testEliminationTree -namespace gtsam { template class BayesNet; } - -namespace gtsam { - -/** - * An elimination tree is a data structure used intermediately during - * elimination. In future versions it will be used to save work between - * multiple eliminations. - * - * When a variable is eliminated, a new factor is created by combining that - * variable's neighboring factors. The new combined factor involves the combined - * factors' involved variables. When the lowest-ordered one of those variables - * is eliminated, it consumes that combined factor. In the elimination tree, - * that lowest-ordered variable is the parent of the variable that was eliminated to - * produce the combined factor. This yields a tree in general, and not a chain - * because of the implicit sparse structure of the resulting Bayes net. - * - * This structure is examined even more closely in a JunctionTree, which - * additionally identifies cliques in the chordal Bayes net. - * \nosubgrouping - */ -template -class EliminationTree { - -public: - - typedef EliminationTree This; ///< This class - typedef boost::shared_ptr shared_ptr; ///< Shared pointer to this class - typedef typename boost::shared_ptr sharedFactor; ///< Shared pointer to a factor - typedef gtsam::BayesNet BayesNet; ///< The BayesNet corresponding to FACTOR - typedef FACTOR Factor; - typedef typename FACTOR::KeyType KeyType; - - /** Typedef for an eliminate subroutine */ - typedef typename FactorGraph::Eliminate Eliminate; - -private: - - /** concept check */ - GTSAM_CONCEPT_TESTABLE_TYPE(FACTOR) - - typedef FastList Factors; - typedef FastList SubTrees; - typedef std::vector Conditionals; - - Index key_; ///< index associated with root // FIXME: doesn't this require that "Index" is the type of keys in the generic factor? - Factors factors_; ///< factors associated with root - SubTrees subTrees_; ///< sub-trees - -public: - - /// @name Standard Constructors - /// @{ - - /** - * Named constructor to build the elimination tree of a factor graph using - * pre-computed column structure. - * @param factorGraph The factor graph for which to build the elimination tree - * @param structure The set of factors involving each variable. If this is not - * precomputed, you can call the Create(const FactorGraph&) - * named constructor instead. - * @return The elimination tree - */ - template - static shared_ptr Create(const FactorGraph& factorGraph, const VariableIndex& structure); - - /** Named constructor to build the elimination tree of a factor graph. Note - * that this has to compute the column structure as a VariableIndex, so if you - * already have this precomputed, use the Create(const FactorGraph&, const VariableIndex&) - * named constructor instead. - * @param factorGraph The factor graph for which to build the elimination tree - */ - template - static shared_ptr Create(const FactorGraph& factorGraph); - - /// @} - /// @name Standard Interface - /// @{ - - /** Eliminate the factors to a Bayes Net - * @param function The function to use to eliminate, see the namespace functions - * in GaussianFactorGraph.h - * @return The BayesNet resulting from elimination - */ - typename BayesNet::shared_ptr eliminate(Eliminate function) const; - - /** Eliminate the factors to a Bayes Net and return the remaining factor - * @param function The function to use to eliminate, see the namespace functions - * in GaussianFactorGraph.h - * @return The BayesNet resulting from elimination and the remaining factor - */ - typename BayesNet::shared_ptr eliminatePartial(Eliminate function, size_t nrToEliminate) const; - - /// @} - /// @name Testable - /// @{ - - /** Print the tree to cout */ - void print(const std::string& name = "EliminationTree: ", - const IndexFormatter& formatter = DefaultIndexFormatter) const; - - /** Test whether the tree is equal to another */ - bool equals(const EliminationTree& other, double tol = 1e-9) const; - - /// @} - -private: - - /** default constructor, private, as you should use Create below */ - EliminationTree(Index key = 0) : key_(key) {} - - /** - * Static internal function to build a vector of parent pointers using the - * algorithm of Gilbert et al., 2001, BIT. - */ - static std::vector ComputeParents(const VariableIndex& structure); - - /** add a factor, for Create use only */ - void add(const sharedFactor& factor) { factors_.push_back(factor); } - - /** add a subtree, for Create use only */ - void add(const shared_ptr& child) { subTrees_.push_back(child); } - - /** - * Recursive routine that eliminates the factors arranged in an elimination tree - * @param Conditionals is a vector of shared pointers that will be modified in place - */ - sharedFactor eliminate_(Eliminate function, Conditionals& conditionals) const; - - /// Allow access to constructor and add methods for testing purposes - friend class ::EliminationTreeTester; - -}; - - -/** - * An exception thrown when attempting to eliminate a disconnected factor - * graph, which is not currently possible in gtsam. If you need to work with - * disconnected graphs, a workaround is to create zero-information factors to - * bridge the disconnects. To do this, create any factor type (e.g. - * BetweenFactor or RangeFactor) with the noise model - * sharedPrecision(dim, 0.0), where \c dim is the appropriate - * dimensionality for that factor. - */ -struct DisconnectedGraphException : public std::exception { - DisconnectedGraphException() {} - virtual ~DisconnectedGraphException() throw() {} - - /// Returns the string "Attempting to eliminate a disconnected graph - this is not currently possible in gtsam." - virtual const char* what() const throw() { - return - "Attempting to eliminate a disconnected graph - this is not currently possible in\n" - "GTSAM. You may add \"empty\" BetweenFactor's to join disconnected graphs, these\n" - "will affect the symbolic structure and solving complexity of the graph but not\n" - "the solution. To do this, create BetweenFactor's with zero-precision noise\n" - "models, i.e. noiseModel::Isotropic::Precision(n, 0.0);\n"; } -}; - -} - -#include +/* ---------------------------------------------------------------------------- + +* GTSAM Copyright 2010, Georgia Tech Research Corporation, +* Atlanta, Georgia 30332-0415 +* All Rights Reserved +* Authors: Frank Dellaert, et al. (see THANKS for the full author list) + +* See LICENSE for the license information + +* -------------------------------------------------------------------------- */ + +/** +* @file EliminationTree.h +* @author Frank Dellaert +* @author Richard Roberts +* @date Oct 13, 2010 +*/ +#pragma once + +#include +#include + +#include + +class EliminationTreeTester; // for unit tests, see testEliminationTree + +namespace gtsam { + + class VariableIndex; + class Ordering; + + /** + * An elimination tree is a data structure used intermediately during + * elimination. In future versions it will be used to save work between + * multiple eliminations. + * + * When a variable is eliminated, a new factor is created by combining that + * variable's neighboring factors. The new combined factor involves the combined + * factors' involved variables. When the lowest-ordered one of those variables + * is eliminated, it consumes that combined factor. In the elimination tree, + * that lowest-ordered variable is the parent of the variable that was eliminated to + * produce the combined factor. This yields a tree in general, and not a chain + * because of the implicit sparse structure of the resulting Bayes net. + * + * This structure is examined even more closely in a JunctionTree, which + * additionally identifies cliques in the chordal Bayes net. + * \nosubgrouping + */ + template + class EliminationTree + { + protected: + typedef EliminationTree This; ///< This class + typedef boost::shared_ptr shared_ptr; ///< Shared pointer to this class + + public: + typedef GRAPH FactorGraphType; ///< The factor graph type + typedef typename GRAPH::FactorType FactorType; ///< The type of factors + typedef typename boost::shared_ptr sharedFactor; ///< Shared pointer to a factor + typedef BAYESNET BayesNetType; ///< The BayesNet corresponding to FACTOR + typedef typename BayesNetType::ConditionalType ConditionalType; ///< The type of conditionals + typedef typename boost::shared_ptr sharedConditional; ///< Shared pointer to a conditional + typedef typename GRAPH::Eliminate Eliminate; + + struct Node { + typedef std::vector Factors; + typedef std::vector > Children; + + Key key; ///< key associated with root + Factors factors; ///< factors associated with root + Children children; ///< sub-trees + + sharedFactor eliminate(const boost::shared_ptr& output, + const Eliminate& function, const std::vector& childrenFactors) const; + + void print(const std::string& str, const KeyFormatter& keyFormatter) const; + }; + + typedef boost::shared_ptr sharedNode; ///< Shared pointer to Node + + protected: + /** concept check */ + GTSAM_CONCEPT_TESTABLE_TYPE(FactorType); + + std::vector roots_; + std::vector remainingFactors_; + + /// @name Standard Constructors + /// @{ + + /** + * Build the elimination tree of a factor graph using pre-computed column structure. + * @param factorGraph The factor graph for which to build the elimination tree + * @param structure The set of factors involving each variable. If this is not + * precomputed, you can call the Create(const FactorGraph&) + * named constructor instead. + * @return The elimination tree + */ + EliminationTree(const FactorGraphType& factorGraph, + const VariableIndex& structure, const Ordering& order); + + /** Build the elimination tree of a factor graph. Note that this has to compute the column + * structure as a VariableIndex, so if you already have this precomputed, use the other + * constructor instead. + * @param factorGraph The factor graph for which to build the elimination tree + */ + EliminationTree(const FactorGraphType& factorGraph, const Ordering& order); + + /** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are + * copied, factors are not cloned. */ + EliminationTree(const This& other) { *this = other; } + + /** Assignment operator - makes a deep copy of the tree structure, but only pointers to factors + * are copied, factors are not cloned. */ + This& operator=(const This& other); + + /// @} + + public: + /// @name Standard Interface + /// @{ + + /** Eliminate the factors to a Bayes net and remaining factor graph + * @param function The function to use to eliminate, see the namespace functions + * in GaussianFactorGraph.h + * @return The Bayes net and factor graph resulting from elimination + */ + std::pair, boost::shared_ptr > + eliminate(Eliminate function) const; + + /// @} + /// @name Testable + /// @{ + + /** Print the tree to cout */ + void print(const std::string& name = "EliminationTree: ", + const KeyFormatter& formatter = DefaultKeyFormatter) const; + + protected: + /** Test whether the tree is equal to another */ + bool equals(const This& other, double tol = 1e-9) const; + + /// @} + + public: + /// @name Advanced Interface + /// @{ + + /** Return the set of roots (one for a tree, multiple for a forest) */ + const std::vector& roots() const { return roots_; } + + /** Return the remaining factors that are not pulled into elimination */ + const std::vector& remainingFactors() const { return remainingFactors_; } + + /** Swap the data of this tree with another one, this operation is very fast. */ + void swap(This& other); + + protected: + /// Protected default constructor + EliminationTree() {} + + private: + /// Allow access to constructor and add methods for testing purposes + friend class ::EliminationTreeTester; + }; + +} diff --git a/gtsam/inference/Factor-inl.h b/gtsam/inference/Factor-inl.h deleted file mode 100644 index 7b22e083e..000000000 --- a/gtsam/inference/Factor-inl.h +++ /dev/null @@ -1,108 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file Factor-inl.h - * @author Richard Roberts - * @date Sep 1, 2010 - */ - -#pragma once - -#include -#include -#include -#include -#include - -namespace gtsam { - - /* ************************************************************************* */ - template - Factor::Factor(const Factor& f) : - keys_(f.keys_) { - assertInvariants(); - } - - /* ************************************************************************* */ - template - Factor::Factor(const ConditionalType& c) : - keys_(c.keys()) { -// assert(c.nrFrontals() == 1); - assertInvariants(); - } - - /* ************************************************************************* */ - template - void Factor::assertInvariants() const { -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - // Check that keys are all unique - std::multiset nonunique(keys_.begin(), keys_.end()); - std::set unique(keys_.begin(), keys_.end()); - bool correct = (nonunique.size() == unique.size()) - && std::equal(nonunique.begin(), nonunique.end(), unique.begin()); - if (!correct) throw std::logic_error( - "Factor::assertInvariants: Factor contains duplicate keys"); -#endif - } - - /* ************************************************************************* */ - template - void Factor::print(const std::string& s, - const IndexFormatter& formatter) const { - printKeys(s,formatter); - } - - /* ************************************************************************* */ - template - void Factor::printKeys(const std::string& s, const IndexFormatter& formatter) const { - std::cout << s << " "; - BOOST_FOREACH(KEY key, keys_) std::cout << " " << formatter(key); - std::cout << std::endl; - } - - /* ************************************************************************* */ - template - bool Factor::equals(const This& other, double tol) const { - return keys_ == other.keys_; - } - - /* ************************************************************************* */ -#ifdef TRACK_ELIMINATE - template - template - typename COND::shared_ptr Factor::eliminateFirst() { - assert(!keys_.empty()); - assertInvariants(); - KEY eliminated = keys_.front(); - keys_.erase(keys_.begin()); - assertInvariants(); - return typename COND::shared_ptr(new COND(eliminated, keys_)); - } - - /* ************************************************************************* */ - template - template - typename BayesNet::shared_ptr Factor::eliminate(size_t nrFrontals) { - assert(keys_.size() >= nrFrontals); - assertInvariants(); - typename BayesNet::shared_ptr fragment(new BayesNet ()); - const_iterator it = this->begin(); - for (KEY n = 0; n < nrFrontals; ++n, ++it) - fragment->push_back(COND::FromRange(it, const_iterator(this->end()), 1)); - if (nrFrontals > 0) keys_.assign(fragment->back()->beginParents(), - fragment->back()->endParents()); - assertInvariants(); - return fragment; - } -#endif - -} diff --git a/gtsam/inference/Factor.cpp b/gtsam/inference/Factor.cpp new file mode 100644 index 000000000..707193a52 --- /dev/null +++ b/gtsam/inference/Factor.cpp @@ -0,0 +1,47 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file Factor.cpp + * @brief The base class for all factors + * @author Kai Ni + * @author Frank Dellaert + * @author Richard Roberts + */ + +// \callgraph + +#include +#include + +#include + +namespace gtsam { + + /* ************************************************************************* */ + void Factor::print(const std::string& s, const KeyFormatter& formatter) const + { + return this->printKeys(s, formatter); + } + + /* ************************************************************************* */ + void Factor::printKeys(const std::string& s, const KeyFormatter& formatter) const { + std::cout << s << " "; + BOOST_FOREACH(Key key, keys_) std::cout << " " << formatter(key); + std::cout << std::endl; + } + + /* ************************************************************************* */ + bool Factor::equals(const This& other, double tol) const { + return keys_ == other.keys_; + } + +} \ No newline at end of file diff --git a/gtsam/inference/Factor.h b/gtsam/inference/Factor.h index b8ff34788..c407c6507 100644 --- a/gtsam/inference/Factor.h +++ b/gtsam/inference/Factor.h @@ -21,212 +21,151 @@ #pragma once -#include #include #include -#include -#include -#include -#include -#include + +#include +#include namespace gtsam { -template class Conditional; - -/** - * This is the base class for all factor types. It is templated on a KEY type, - * which will be the type used to label variables. Key types currently in use - * in gtsam are Index with symbolic (IndexFactor, SymbolicFactorGraph) and - * Gaussian factors (GaussianFactor, JacobianFactor, HessianFactor, GaussianFactorGraph), - * and Key with nonlinear factors (NonlinearFactor, NonlinearFactorGraph). - * though currently only IndexFactor and IndexConditional derive from this - * class, using Index keys. This class does not store any data other than its - * keys. Derived classes store data such as matrices and probability tables. - * - * Note that derived classes *must* redefine the ConditionalType and shared_ptr - * typedefs to refer to the associated conditional and shared_ptr types of the - * derived class. See IndexFactor, JacobianFactor, etc. for examples. - * - * This class is \b not virtual for performance reasons - derived symbolic classes, - * IndexFactor and IndexConditional, need to be created and destroyed quickly - * during symbolic elimination. GaussianFactor and NonlinearFactor are virtual. - * \nosubgrouping - */ -template -class Factor { - -public: - - typedef KEY KeyType; ///< The KEY template parameter - typedef Factor This; ///< This class - /** - * Typedef to the conditional type obtained by eliminating this factor, - * derived classes must redefine this. + * This is the base class for all factor types. It is templated on a KEY type, + * which will be the type used to label variables. Key types currently in use + * in gtsam are Index with symbolic (IndexFactor, SymbolicFactorGraph) and + * Gaussian factors (GaussianFactor, JacobianFactor, HessianFactor, GaussianFactorGraph), + * and Key with nonlinear factors (NonlinearFactor, NonlinearFactorGraph). + * though currently only IndexFactor and IndexConditional derive from this + * class, using Index keys. This class does not store any data other than its + * keys. Derived classes store data such as matrices and probability tables. + * + * Note that derived classes *must* redefine the ConditionalType and shared_ptr + * typedefs to refer to the associated conditional and shared_ptr types of the + * derived class. See IndexFactor, JacobianFactor, etc. for examples. + * + * This class is \b not virtual for performance reasons - derived symbolic classes, + * IndexFactor and IndexConditional, need to be created and destroyed quickly + * during symbolic elimination. GaussianFactor and NonlinearFactor are virtual. + * \nosubgrouping */ - typedef Conditional ConditionalType; + class GTSAM_EXPORT Factor + { - /// A shared_ptr to this class, derived classes must redefine this. - typedef boost::shared_ptr shared_ptr; + private: + // These typedefs are private because they must be overridden in derived classes. + typedef Factor This; ///< This class + typedef boost::shared_ptr shared_ptr; ///< A shared_ptr to this class. - /// Iterator over keys - typedef typename std::vector::iterator iterator; + public: + /// Iterator over keys + typedef std::vector::iterator iterator; - /// Const iterator over keys - typedef typename std::vector::const_iterator const_iterator; + /// Const iterator over keys + typedef std::vector::const_iterator const_iterator; -protected: + protected: - /// The keys involved in this factor - std::vector keys_; + /// The keys involved in this factor + std::vector keys_; -public: + /// @name Standard Constructors + /// @{ - /// @name Standard Constructors - /// @{ + /** Default constructor for I/O */ + Factor() {} - /** Copy constructor */ - Factor(const This& f); + /** Construct factor from container of keys. This constructor is used internally from derived factor + * constructors, either from a container of keys or from a boost::assign::list_of. */ + template + Factor(const CONTAINER& keys) : keys_(keys.begin(), keys.end()) {} - /** Construct from conditional, calls ConditionalType::toFactor() */ - Factor(const ConditionalType& c); + /** Construct factor from iterator keys. This constructor may be used internally from derived + * factor constructors, although our code currently does not use this. */ + template + Factor(ITERATOR first, ITERATOR last) : keys_(first, last) {} - /** Default constructor for I/O */ - Factor() {} + /** Construct factor from container of keys. This is called internally from derived factor static + * factor methods, as a workaround for not being able to call the protected constructors above. */ + template + static Factor FromKeys(const CONTAINER& keys) { + return Factor(keys.begin(), keys.end()); } - /** Construct unary factor */ - Factor(KeyType key) : keys_(1) { - keys_[0] = key; assertInvariants(); } + /** Construct factor from iterator keys. This is called internally from derived factor static + * factor methods, as a workaround for not being able to call the protected constructors above. */ + template + static Factor FromIterators(ITERATOR first, ITERATOR last) { + return Factor(first, last); } - /** Construct binary factor */ - Factor(KeyType key1, KeyType key2) : keys_(2) { - keys_[0] = key1; keys_[1] = key2; assertInvariants(); } + /// @} - /** Construct ternary factor */ - Factor(KeyType key1, KeyType key2, KeyType key3) : keys_(3) { - keys_[0] = key1; keys_[1] = key2; keys_[2] = key3; assertInvariants(); } + public: + /// @name Standard Interface + /// @{ - /** Construct 4-way factor */ - Factor(KeyType key1, KeyType key2, KeyType key3, KeyType key4) : keys_(4) { - keys_[0] = key1; keys_[1] = key2; keys_[2] = key3; keys_[3] = key4; assertInvariants(); } + /// First key + Key front() const { return keys_.front(); } - /** Construct 5-way factor */ - Factor(KeyType key1, KeyType key2, KeyType key3, KeyType key4, KeyType key5) : keys_(5) { - keys_[0] = key1; keys_[1] = key2; keys_[2] = key3; keys_[3] = key4; keys_[4] = key5; assertInvariants(); } + /// Last key + Key back() const { return keys_.back(); } - /** Construct 6-way factor */ - Factor(KeyType key1, KeyType key2, KeyType key3, KeyType key4, KeyType key5, KeyType key6) : keys_(6) { - keys_[0] = key1; keys_[1] = key2; keys_[2] = key3; keys_[3] = key4; keys_[4] = key5; keys_[5] = key6; assertInvariants(); } + /// find + const_iterator find(Key key) const { return std::find(begin(), end(), key); } - /// @} - /// @name Advanced Constructors - /// @{ + /// Access the factor's involved variable keys + const std::vector& keys() const { return keys_; } - /** Construct n-way factor */ - Factor(const std::set& keys) { - BOOST_FOREACH(const KeyType& key, keys) keys_.push_back(key); - assertInvariants(); - } + /** Iterator at beginning of involved variable keys */ + const_iterator begin() const { return keys_.begin(); } - /** Construct n-way factor */ - Factor(const std::vector& keys) : keys_(keys) { - assertInvariants(); - } + /** Iterator at end of involved variable keys */ + const_iterator end() const { return keys_.end(); } - /** Constructor from a collection of keys */ - template Factor(KEYITERATOR beginKey, KEYITERATOR endKey) : - keys_(beginKey, endKey) { assertInvariants(); } + /** + * @return the number of variables involved in this factor + */ + size_t size() const { return keys_.size(); } - /// @} + /// @} -#ifdef TRACK_ELIMINATE - /** - * eliminate the first variable involved in this factor - * @return a conditional on the eliminated variable - */ - template - typename CONDITIONAL::shared_ptr eliminateFirst(); - /** - * eliminate the first nrFrontals frontal variables. - */ - template - typename BayesNet::shared_ptr eliminate(size_t nrFrontals = 1); -#endif + /// @name Testable + /// @{ - /// @name Standard Interface - /// @{ + /// print + void print(const std::string& s = "Factor", const KeyFormatter& formatter = DefaultKeyFormatter) const; - /// First key - KeyType front() const { return keys_.front(); } + /// print only keys + void printKeys(const std::string& s = "Factor", const KeyFormatter& formatter = DefaultKeyFormatter) const; - /// Last key - KeyType back() const { return keys_.back(); } + protected: + /// check equality + bool equals(const This& other, double tol = 1e-9) const; - /// find - const_iterator find(KeyType key) const { return std::find(begin(), end(), key); } + /// @} - /// Access the factor's involved variable keys - const std::vector& keys() const { return keys_; } + public: + /// @name Advanced Interface + /// @{ - /** iterators */ - const_iterator begin() const { return keys_.begin(); } ///TODO: comment - const_iterator end() const { return keys_.end(); } ///TODO: comment + /** @return keys involved in this factor */ + std::vector& keys() { return keys_; } - /** - * @return the number of variables involved in this factor - */ - size_t size() const { return keys_.size(); } + /** Iterator at beginning of involved variable keys */ + iterator begin() { return keys_.begin(); } - /// @} - /// @name Testable - /// @{ + /** Iterator at end of involved variable keys */ + iterator end() { return keys_.end(); } - /// print - void print(const std::string& s = "Factor", - const IndexFormatter& formatter = DefaultIndexFormatter) const; + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(Archive & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(keys_); + } - /// print only keys - void printKeys(const std::string& s = "Factor", - const IndexFormatter& formatter = DefaultIndexFormatter) const; + /// @} - /// check equality - bool equals(const This& other, double tol = 1e-9) const; - - /// @} - /// @name Advanced Interface - /// @{ - - /** - * @return keys involved in this factor - */ - std::vector& keys() { return keys_; } - - /** mutable iterators */ - iterator begin() { return keys_.begin(); } ///TODO: comment - iterator end() { return keys_.end(); } ///TODO: comment - -protected: - friend class JacobianFactor; - friend class HessianFactor; - - /// Internal consistency check that is run frequently when in debug mode. - /// If NDEBUG is defined, this is empty and optimized out. - void assertInvariants() const; - -private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(Archive & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_NVP(keys_); - } - - /// @} - -}; + }; } - -#include diff --git a/gtsam/inference/FactorGraph-inl.h b/gtsam/inference/FactorGraph-inl.h deleted file mode 100644 index 60fc2e767..000000000 --- a/gtsam/inference/FactorGraph-inl.h +++ /dev/null @@ -1,288 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file FactorGraph-inl.h - * This is a template definition file, include it where needed (only!) - * so that the appropriate code is generated and link errors avoided. - * @brief Factor Graph Base Class - * @author Carlos Nieto - * @author Frank Dellaert - * @author Alireza Fathi - * @author Michael Kaess - */ - -#pragma once - -#include -#include -#include - -#include -#include -#include -#include - -#include -#include -#include -#include - -namespace gtsam { - - /* ************************************************************************* */ - template - template - FactorGraph::FactorGraph(const BayesNet& bayesNet) { - factors_.reserve(bayesNet.size()); - BOOST_FOREACH(const typename CONDITIONAL::shared_ptr& cond, bayesNet) { - this->push_back(cond->toFactor()); - } - } - - /* ************************************************************************* */ - template - void FactorGraph::print(const std::string& s, - const IndexFormatter& formatter) const { - std::cout << s << std::endl; - std::cout << "size: " << size() << std::endl; - for (size_t i = 0; i < factors_.size(); i++) { - std::stringstream ss; - ss << "factor " << i << ": "; - if (factors_[i] != NULL) factors_[i]->print(ss.str(), formatter); - } - } - - /* ************************************************************************* */ - template - bool FactorGraph::equals(const This& fg, double tol) const { - /** check whether the two factor graphs have the same number of factors_ */ - if (factors_.size() != fg.size()) return false; - - /** check whether the factors_ are the same */ - for (size_t i = 0; i < factors_.size(); i++) { - // TODO: Doesn't this force order of factor insertion? - sharedFactor f1 = factors_[i], f2 = fg.factors_[i]; - if (f1 == NULL && f2 == NULL) continue; - if (f1 == NULL || f2 == NULL) return false; - if (!f1->equals(*f2, tol)) return false; - } - return true; - } - - /* ************************************************************************* */ - template - size_t FactorGraph::nrFactors() const { - size_t size_ = 0; - BOOST_FOREACH(const sharedFactor& factor, factors_) - if (factor) size_++; - return size_; - } - - /* ************************************************************************* */ - template - std::set FactorGraph::keys() const { - std::set allKeys; - BOOST_FOREACH(const sharedFactor& factor, factors_) - if (factor) { - BOOST_FOREACH(Index j, factor->keys()) - allKeys.insert(j); - } - return allKeys; - } - - /* ************************************************************************* */ - template - std::pair::sharedConditional, FactorGraph > - FactorGraph::eliminateFrontals(size_t nFrontals, const Eliminate& eliminate) const - { - // Build variable index - VariableIndex variableIndex(*this); - - // Find first variable - Index firstIndex = 0; - while(firstIndex < variableIndex.size() && variableIndex[firstIndex].empty()) - ++ firstIndex; - - // Check that number of variables is in bounds - if(firstIndex + nFrontals > variableIndex.size()) - throw std::invalid_argument("Requested to eliminate more frontal variables than exist in the factor graph."); - - // Get set of involved factors - FastSet involvedFactorIs; - for(Index j = firstIndex; j < firstIndex + nFrontals; ++j) { - BOOST_FOREACH(size_t i, variableIndex[j]) { - involvedFactorIs.insert(i); - } - } - - // Separate factors into involved and remaining - FactorGraph involvedFactors; - FactorGraph remainingFactors; - FastSet::const_iterator involvedFactorIsIt = involvedFactorIs.begin(); - for(size_t i = 0; i < this->size(); ++i) { - if(involvedFactorIsIt != involvedFactorIs.end() && *involvedFactorIsIt == i) { - // If the current factor is involved, add it to involved and increment involved iterator - involvedFactors.push_back((*this)[i]); - ++ involvedFactorIsIt; - } else { - // If not involved, add to remaining - remainingFactors.push_back((*this)[i]); - } - } - - // Do dense elimination on the involved factors - typename FactorGraph::EliminationResult eliminationResult = - eliminate(involvedFactors, nFrontals); - - // Add the remaining factor back into the factor graph - remainingFactors.push_back(eliminationResult.second); - - // Return the eliminated factor and remaining factor graph - return std::make_pair(eliminationResult.first, remainingFactors); - } - - /* ************************************************************************* */ - template - std::pair::sharedConditional, FactorGraph > - FactorGraph::eliminate(const std::vector& variables, const Eliminate& eliminateFcn, - boost::optional variableIndex_) const - { - const VariableIndex& variableIndex = - variableIndex_ ? *variableIndex_ : VariableIndex(*this); - - // First find the involved factors - FactorGraph involvedFactors; - Index highestInvolvedVariable = 0; // Largest index of the variables in the involved factors - - // First get the indices of the involved factors, but uniquely in a set - FastSet involvedFactorIndices; - BOOST_FOREACH(Index variable, variables) { - involvedFactorIndices.insert(variableIndex[variable].begin(), variableIndex[variable].end()); } - - // Add the factors themselves to involvedFactors and update largest index - involvedFactors.reserve(involvedFactorIndices.size()); - BOOST_FOREACH(size_t factorIndex, involvedFactorIndices) { - const sharedFactor factor = this->at(factorIndex); - involvedFactors.push_back(factor); // Add involved factor - highestInvolvedVariable = std::max( // Updated largest index - highestInvolvedVariable, - *std::max_element(factor->begin(), factor->end())); - } - - sharedConditional conditional; - sharedFactor remainingFactor; - if(involvedFactors.size() > 0) { - // Now permute the variables to be eliminated to the front of the ordering - Permutation toFront = Permutation::PullToFront(variables, highestInvolvedVariable+1); - Permutation toFrontInverse = *toFront.inverse(); - BOOST_FOREACH(const sharedFactor& factor, involvedFactors) { - factor->permuteWithInverse(toFrontInverse); } - - // Eliminate into conditional and remaining factor - EliminationResult eliminated = eliminateFcn(involvedFactors, variables.size()); - conditional = eliminated.first; - remainingFactor = eliminated.second; - - // Undo the permutation - BOOST_FOREACH(const sharedFactor& factor, involvedFactors) { - factor->permuteWithInverse(toFront); } - conditional->permuteWithInverse(toFront); - remainingFactor->permuteWithInverse(toFront); - } else { - // Eliminate 0 variables - EliminationResult eliminated = eliminateFcn(involvedFactors, 0); - conditional = eliminated.first; - remainingFactor = eliminated.second; - } - - // Build the remaining graph, without the removed factors - FactorGraph remainingGraph; - remainingGraph.reserve(this->size() - involvedFactors.size() + 1); - FastSet::const_iterator involvedFactorIndexIt = involvedFactorIndices.begin(); - for(size_t i = 0; i < this->size(); ++i) { - if(involvedFactorIndexIt != involvedFactorIndices.end() && *involvedFactorIndexIt == i) - ++ involvedFactorIndexIt; - else - remainingGraph.push_back(this->at(i)); - } - - // Add the remaining factor if it is not empty. - if(remainingFactor->size() != 0) - remainingGraph.push_back(remainingFactor); - - return std::make_pair(conditional, remainingGraph); - - } - /* ************************************************************************* */ - template - void FactorGraph::replace(size_t index, sharedFactor factor) { - if (index >= factors_.size()) throw std::invalid_argument(boost::str( - boost::format("Factor graph does not contain a factor with index %d.") - % index)); - // Replace the factor - factors_[index] = factor; - } - - /* ************************************************************************* */ - template - FACTORGRAPH combine(const FACTORGRAPH& fg1, const FACTORGRAPH& fg2) { - // create new linear factor graph equal to the first one - FACTORGRAPH fg = fg1; - - // add the second factors_ in the graph - fg.push_back(fg2); - - return fg; - } - - /* ************************************************************************* */ - template - typename DERIVEDFACTOR::shared_ptr Combine(const FactorGraph& factors, - const FastMap >& variableSlots) { - - typedef const std::pair > KeySlotPair; - // Local functional for getting keys out of key-value pairs - struct Local { static KEY FirstOf(const KeySlotPair& pr) { return pr.first; } }; - - return typename DERIVEDFACTOR::shared_ptr(new DERIVEDFACTOR( - boost::make_transform_iterator(variableSlots.begin(), &Local::FirstOf), - boost::make_transform_iterator(variableSlots.end(), &Local::FirstOf))); - } - - /* ************************************************************************* */ - // Recursive function to add factors in cliques to vector of factors_io - template - void _FactorGraph_BayesTree_adder( - std::vector >& factors_io, - const typename BayesTree::sharedClique& clique) { - - if(clique) { - // Add factor from this clique - factors_io.push_back((*clique)->toFactor()); - - // Traverse children - typedef typename BayesTree::sharedClique sharedClique; - BOOST_FOREACH(const sharedClique& child, clique->children()) - _FactorGraph_BayesTree_adder(factors_io, child); - } - } - - /* ************************************************************************* */ - template - template - FactorGraph::FactorGraph(const BayesTree& bayesTree) { - factors_.reserve(bayesTree.size()); - _FactorGraph_BayesTree_adder(factors_, bayesTree.root()); - } - - /* ************************************************************************* */ -} // namespace gtsam diff --git a/gtsam/inference/FactorGraph-inst.h b/gtsam/inference/FactorGraph-inst.h new file mode 100644 index 000000000..8c19d4ff4 --- /dev/null +++ b/gtsam/inference/FactorGraph-inst.h @@ -0,0 +1,84 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file FactorGraph-inl.h + * @brief Factor Graph Base Class + * @author Carlos Nieto + * @author Frank Dellaert + * @author Alireza Fathi + * @author Michael Kaess + * @author Richard Roberts + */ + +#pragma once + +#include + +#include +#include + +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + void FactorGraph::print(const std::string& s, const KeyFormatter& formatter) const { + std::cout << s << std::endl; + std::cout << "size: " << size() << std::endl; + for (size_t i = 0; i < factors_.size(); i++) { + std::stringstream ss; + ss << "factor " << i << ": "; + if (factors_[i]) + factors_[i]->print(ss.str(), formatter); + } + } + + /* ************************************************************************* */ + template + bool FactorGraph::equals(const This& fg, double tol) const { + /** check whether the two factor graphs have the same number of factors_ */ + if (factors_.size() != fg.size()) return false; + + /** check whether the factors_ are the same */ + for (size_t i = 0; i < factors_.size(); i++) { + // TODO: Doesn't this force order of factor insertion? + sharedFactor f1 = factors_[i], f2 = fg.factors_[i]; + if (f1 == NULL && f2 == NULL) continue; + if (f1 == NULL || f2 == NULL) return false; + if (!f1->equals(*f2, tol)) return false; + } + return true; + } + + /* ************************************************************************* */ + template + size_t FactorGraph::nrFactors() const { + size_t size_ = 0; + BOOST_FOREACH(const sharedFactor& factor, factors_) + if (factor) size_++; + return size_; + } + + /* ************************************************************************* */ + template + FastSet FactorGraph::keys() const { + FastSet allKeys; + BOOST_FOREACH(const sharedFactor& factor, factors_) + if (factor) + allKeys.insert(factor->begin(), factor->end()); + return allKeys; + } + + /* ************************************************************************* */ +} // namespace gtsam diff --git a/gtsam/inference/FactorGraph.h b/gtsam/inference/FactorGraph.h index 0d3b10e31..672fbdaef 100644 --- a/gtsam/inference/FactorGraph.h +++ b/gtsam/inference/FactorGraph.h @@ -15,25 +15,59 @@ * @author Carlos Nieto * @author Christian Potthast * @author Michael Kaess + * @author Richard Roberts */ // \callgraph #pragma once -#include -#include -#include - #include -#include -#include +#include +#include +#include +#include + +#include +#include namespace gtsam { -// Forward declarations -template class BayesTree; -class VariableIndex; + // Forward declarations + template class BayesTree; + + /** Helper */ + template + class CRefCallPushBack + { + C& obj; + public: + CRefCallPushBack(C& obj) : obj(obj) {} + template + void operator()(const A& a) { obj.push_back(a); } + }; + + /** Helper */ + template + class RefCallPushBack + { + C& obj; + public: + RefCallPushBack(C& obj) : obj(obj) {} + template + void operator()(A& a) { obj.push_back(a); } + }; + + /** Helper */ + template + class CRefCallAddCopy + { + C& obj; + public: + CRefCallAddCopy(C& obj) : obj(obj) {} + template + void operator()(const A& a) { obj.addCopy(a); } + }; /** * A factor graph is a bipartite graph with factor nodes connected to variable nodes. @@ -44,62 +78,64 @@ class VariableIndex; class FactorGraph { public: - typedef FACTOR FactorType; ///< factor type - typedef typename FACTOR::KeyType KeyType; ///< type of Keys we use to index variables with typedef boost::shared_ptr sharedFactor; ///< Shared pointer to a factor - typedef boost::shared_ptr sharedConditional; ///< Shared pointer to a conditional - - typedef FactorGraph This; ///< Typedef for this class - typedef boost::shared_ptr shared_ptr; ///< Shared pointer for this class + typedef sharedFactor value_type; typedef typename std::vector::iterator iterator; typedef typename std::vector::const_iterator const_iterator; - /** typedef for elimination result */ - typedef std::pair EliminationResult; - - /** typedef for an eliminate subroutine */ - typedef boost::function Eliminate; + private: + typedef FactorGraph This; ///< Typedef for this class + typedef boost::shared_ptr shared_ptr; ///< Shared pointer for this class protected: - /** concept check, makes sure FACTOR defines print and equals */ GTSAM_CONCEPT_TESTABLE_TYPE(FACTOR) /** Collection of factors */ std::vector factors_; - public: - - /// @name Standard Constructor + /// @name Standard Constructors /// @{ /** Default constructor */ FactorGraph() {} + /** Constructor from iterator over factors (shared_ptr or plain objects) */ + template + FactorGraph(ITERATOR firstFactor, ITERATOR lastFactor) { push_back(firstFactor, lastFactor); } + + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit FactorGraph(const CONTAINER& factors) { push_back(factors); } + /// @} /// @name Advanced Constructors /// @{ + + // TODO: are these needed? - /** - * @brief Constructor from a Bayes net - * @param bayesNet the Bayes net to convert, type CONDITIONAL must yield compatible factor - * @return a factor graph with all the conditionals, as factors - */ - template - FactorGraph(const BayesNet& bayesNet); + ///** + // * @brief Constructor from a Bayes net + // * @param bayesNet the Bayes net to convert, type CONDITIONAL must yield compatible factor + // * @return a factor graph with all the conditionals, as factors + // */ + //template + //FactorGraph(const BayesNet& bayesNet); - /** convert from Bayes tree */ - template - FactorGraph(const BayesTree& bayesTree); + ///** convert from Bayes tree */ + //template + //FactorGraph(const BayesTree& bayesTree); - /** convert from a derived type */ - template - FactorGraph(const FactorGraph& factors) { - factors_.assign(factors.begin(), factors.end()); - } + ///** convert from a derived type */ + //template + //FactorGraph(const FactorGraph& factors) { + // factors_.assign(factors.begin(), factors.end()); + //} /// @} + + public: /// @name Adding Factors /// @{ @@ -109,69 +145,165 @@ class VariableIndex; */ void reserve(size_t size) { factors_.reserve(size); } - /** Add a factor */ + // TODO: are these needed? + + /** Add a factor directly using a shared_ptr */ template - void push_back(const boost::shared_ptr& factor) { - factors_.push_back(boost::shared_ptr(factor)); - } + typename std::enable_if::value>::type + push_back(boost::shared_ptr& factor) { + factors_.push_back(boost::shared_ptr(factor)); } - /** push back many factors */ - void push_back(const This& factors) { - factors_.insert(end(), factors.begin(), factors.end()); - } + /** Add a factor directly using a shared_ptr */ + void push_back(const sharedFactor& factor) { + factors_.push_back(factor); } - /** push back many factors with an iterator */ + /** push back many factors with an iterator over shared_ptr (factors are not copied) */ template - void push_back(ITERATOR firstFactor, ITERATOR lastFactor) { - factors_.insert(end(), firstFactor, lastFactor); + typename std::enable_if::value>::type + push_back(ITERATOR firstFactor, ITERATOR lastFactor) { + factors_.insert(end(), firstFactor, lastFactor); } + + /** push back many factors as shared_ptr's in a container (factors are not copied) */ + template + typename std::enable_if::value>::type + push_back(const CONTAINER& container) { + push_back(container.begin(), container.end()); } - /** - * @brief Add a vector of derived factors - * @param factors to add - */ - template - void push_back(const std::vector >& factors) { - factors_.insert(end(), factors.begin(), factors.end()); + /** push back a BayesTree as a collection of factors. NOTE: This should be hidden in derived + * classes in favor of a type-specialized version that calls this templated function. */ + template + typename std::enable_if::value>::type + push_back(const BayesTree& bayesTree) { + bayesTree.addFactorsToGraph(*this); } + /** += syntax for push_back, e.g. graph += f1, f2, f3 */ + //template + //boost::assign::list_inserter > + // operator+=(const T& factorOrContainer) + //{ + // return boost::assign::make_list_inserter( + // boost::bind(&This::push_back, this, _1)); + //} + + /** Add a factor directly using a shared_ptr */ + template + typename std::enable_if::value, + boost::assign::list_inserter > >::type + operator+=(boost::shared_ptr& factor) { + return boost::assign::make_list_inserter(RefCallPushBack(*this))(factor); + } + + /** Add a factor directly using a shared_ptr */ + boost::assign::list_inserter > + operator+=(const sharedFactor& factor) { + return boost::assign::make_list_inserter(CRefCallPushBack(*this))(factor); + } + + template + boost::assign::list_inserter > + operator+=(const FACTOR_OR_CONTAINER& factorOrContainer) { + return boost::assign::make_list_inserter(CRefCallPushBack(*this))(factorOrContainer); + } + + ///** Add a factor directly using a shared_ptr */ + //boost::assign::list_inserter > + // operator+=(const sharedFactor& factor) { + // return boost::assign::make_list_inserter(CRefCallPushBack(*this)); + //} + + ///** push back many factors as shared_ptr's in a container (factors are not copied) */ + //template + //typename std::enable_if::value, + // boost::assign::list_inserter > >::type + // operator+=(const CONTAINER& container) { + // return boost::assign::make_list_inserter(CRefCallPushBack(*this)); + //} + + ///** push back a BayesTree as a collection of factors. NOTE: This should be hidden in derived + // * classes in favor of a type-specialized version that calls this templated function. */ + //template + //boost::assign::list_inserter > + // operator+=(const BayesTree& bayesTree) { + // return boost::assign::make_list_inserter(CRefCallPushBack(*this)); + //} + + /** Add a factor by value, will be copy-constructed (use push_back with a shared_ptr to avoid + * the copy). */ + template + typename std::enable_if::value>::type + push_back(const DERIVEDFACTOR& factor) { + factors_.push_back(boost::make_shared(factor)); } + + /** push back many factors with an iterator over plain factors (factors are copied) */ + template + typename std::enable_if::value>::type + push_back(ITERATOR firstFactor, ITERATOR lastFactor) { + for(ITERATOR f = firstFactor; f != lastFactor; ++f) + push_back(*f); + } + + /** push back many factors as non-pointer objects in a container (factors are copied) */ + template + typename std::enable_if::value>::type + push_back(const CONTAINER& container) { + push_back(container.begin(), container.end()); + } + + //template + //boost::assign::list_inserter > + // operator*=(const FACTOR_OR_CONTAINER& factorOrContainer) { + // return boost::assign::make_list_inserter(CRefCallAddCopy(*this)); + //} + /// @} /// @name Testable /// @{ /** print out graph */ void print(const std::string& s = "FactorGraph", - const IndexFormatter& formatter = DefaultIndexFormatter) const; + const KeyFormatter& formatter = DefaultKeyFormatter) const; + protected: /** Check equality */ bool equals(const This& fg, double tol = 1e-9) const; - /// @} + + public: /// @name Standard Interface /// @{ - /** return the number of factors and NULLS */ - size_t size() const { return factors_.size();} + /** return the number of factors (including any null factors set by remove() ). */ + size_t size() const { return factors_.size(); } - /** Simple check for an empty graph - faster than comparing size() to zero */ + /** Check if the graph is empty (null factors set by remove() will cause this to return false). */ bool empty() const { return factors_.empty(); } - /** const cast to the underlying vector of factors */ - operator const std::vector&() const { return factors_; } + /** Get a specific factor by index (this checks array bounds and may throw an exception, as + * opposed to operator[] which does not). + */ + const sharedFactor at(size_t i) const { return factors_.at(i); } - /** Get a specific factor by index */ - const sharedFactor at(size_t i) const { assert(i > eliminateFrontals(size_t nFrontals, const Eliminate& eliminate) const; - - /** Factor the factor graph into a conditional and a remaining factor graph. - * Given the factor graph \f$ f(X) \f$, and \c variables to factorize out - * \f$ V \f$, this function factorizes into \f$ f(X) = f(V;Y)f(Y) \f$, where - * \f$ Y := X\V \f$ are the remaining variables. If \f$ f(X) = p(X) \f$ is - * a probability density or likelihood, the factorization produces a - * conditional probability density and a marginal \f$ p(X) = p(V|Y)p(Y) \f$. - * - * For efficiency, this function treats the variables to eliminate - * \c variables as fully-connected, so produces a dense (fully-connected) - * conditional on all of the variables in \c variables, instead of a sparse - * BayesNet. If the variables are not fully-connected, it is more efficient - * to sequentially factorize multiple times. - */ - std::pair > eliminate( - const std::vector& variables, const Eliminate& eliminateFcn, - boost::optional variableIndex = boost::none) const; - - /** Eliminate a single variable, by calling FactorGraph::eliminate. */ - std::pair > eliminateOne( - KeyType variable, const Eliminate& eliminateFcn, - boost::optional variableIndex = boost::none) const { - std::vector variables(1, variable); - return eliminate(variables, eliminateFcn, variableIndex); - } - /// @} /// @name Modifying Factor Graphs (imperative, discouraged) /// @{ @@ -222,24 +322,33 @@ class VariableIndex; /** non-const STL-style end() */ iterator end() { return factors_.end(); } - /** resize the factor graph. TODO: effects? */ + /** Directly resize the number of factors in the graph. If the new size is less than the + * original, factors at the end will be removed. If the new size is larger than the original, + * null factors will be appended. + */ void resize(size_t size) { factors_.resize(size); } /** delete factor without re-arranging indexes by inserting a NULL pointer */ - inline void remove(size_t i) { factors_[i].reset();} + void remove(size_t i) { factors_[i].reset();} /** replace a factor by index */ - void replace(size_t index, sharedFactor factor); + void replace(size_t index, sharedFactor factor) { at(index) = factor; } + + /** Erase factor and rearrange other factors to take up the empty space */ + void erase(iterator item) { factors_.erase(item); } + + /** Erase factors and rearrange other factors to take up the empty space */ + void erase(iterator first, iterator last) { factors_.erase(first, last); } /// @} /// @name Advanced Interface /// @{ - /** return the number valid factors */ + /** return the number of non-null factors */ size_t nrFactors() const; /** Potentially very slow function to return all keys involved */ - std::set keys() const; + FastSet keys() const; private: @@ -254,20 +363,4 @@ class VariableIndex; }; // FactorGraph - /** Create a combined joint factor (new style for EliminationTree). */ - template - typename DERIVEDFACTOR::shared_ptr Combine(const FactorGraph& factors, - const FastMap >& variableSlots); - - /** - * static function that combines two factor graphs - * @param fg1 Linear factor graph - * @param fg2 Linear factor graph - * @return a new combined factor graph - */ - template - FACTORGRAPH combine(const FACTORGRAPH& fg1, const FACTORGRAPH& fg2); - } // namespace gtsam - -#include diff --git a/gtsam/inference/GenericMultifrontalSolver-inl.h b/gtsam/inference/GenericMultifrontalSolver-inl.h deleted file mode 100644 index ff41d0627..000000000 --- a/gtsam/inference/GenericMultifrontalSolver-inl.h +++ /dev/null @@ -1,107 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GenericMultifrontalSolver-inl.h - * @author Richard Roberts - * @date Oct 21, 2010 - */ - -#pragma once - -#include -#include -#include - -namespace gtsam { - - /* ************************************************************************* */ - template - GenericMultifrontalSolver::GenericMultifrontalSolver( - const FactorGraph& graph) : - structure_(new VariableIndex(graph)), junctionTree_( - new JT(graph, *structure_)) { - } - - /* ************************************************************************* */ - template - GenericMultifrontalSolver::GenericMultifrontalSolver( - const sharedGraph& graph, - const VariableIndex::shared_ptr& variableIndex) : - structure_(variableIndex), junctionTree_(new JT(*graph, *structure_)) { - } - - /* ************************************************************************* */ - template - void GenericMultifrontalSolver::print(const std::string& s) const { - this->structure_->print(s + " structure:\n"); - this->junctionTree_->print(s + " jtree:"); - } - - /* ************************************************************************* */ - template - bool GenericMultifrontalSolver::equals( - const GenericMultifrontalSolver& expected, double tol) const { - if (!this->structure_->equals(*expected.structure_, tol)) return false; - if (!this->junctionTree_->equals(*expected.junctionTree_, tol)) return false; - return true; - } - - /* ************************************************************************* */ - template - void GenericMultifrontalSolver::replaceFactors(const sharedGraph& graph) { - junctionTree_.reset(new JT(*graph, *structure_)); - } - - /* ************************************************************************* */ - template - typename BayesTree::shared_ptr - GenericMultifrontalSolver::eliminate(Eliminate function) const { - - // eliminate junction tree, returns pointer to root - typename BayesTree::sharedClique - root = junctionTree_->eliminate(function); - - // create an empty Bayes tree and insert root clique - typename BayesTree::shared_ptr - bayesTree(new BayesTree); - bayesTree->insert(root); - - // return the Bayes tree - return bayesTree; - } - - /* ************************************************************************* */ - template - typename FactorGraph::shared_ptr GenericMultifrontalSolver::jointFactorGraph( - const std::vector& js, Eliminate function) const { - - // FIXME: joint for arbitrary sets of variables not present - // TODO: develop and implement theory for shortcuts of more than two variables - - if (js.size() != 2) throw std::domain_error( - "*MultifrontalSolver::joint(js) currently can only compute joint marginals\n" - "for exactly two variables. You can call marginal to compute the\n" - "marginal for one variable. *SequentialSolver::joint(js) can compute the\n" - "joint marginal over any number of variables, so use that if necessary.\n"); - - return eliminate(function)->joint(js[0], js[1], function); - } - - /* ************************************************************************* */ - template - typename boost::shared_ptr GenericMultifrontalSolver::marginalFactor( - Index j, Eliminate function) const { - return eliminate(function)->marginalFactor(j, function); - } - -} - diff --git a/gtsam/inference/GenericMultifrontalSolver.h b/gtsam/inference/GenericMultifrontalSolver.h deleted file mode 100644 index c06214122..000000000 --- a/gtsam/inference/GenericMultifrontalSolver.h +++ /dev/null @@ -1,124 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GenericMultifrontalSolver.h - * @author Richard Roberts - * @date Oct 21, 2010 - */ - -#pragma once - -#include - -#include -#include - -namespace gtsam { - - /** - * A Generic Multifrontal Solver class - * - * A solver is given a factor graph at construction, and implements - * a strategy to solve it (in this case, eliminate into a Bayes tree). - * This generic one will create a Bayes tree when eliminate() is called. - * - * Takes two template arguments: - * FACTOR the factor type, e.g., GaussianFactor, DiscreteFactor - * JUNCTIONTREE annoyingly, you also have to supply a compatible JT type - * i.e., one templated on a factor graph with the same factors - * TODO: figure why this is so and possibly fix it - * \nosubgrouping - */ - template - class GenericMultifrontalSolver { - - protected: - - /// Column structure of the factor graph - VariableIndex::shared_ptr structure_; - - /// Junction tree that performs elimination. - typename JUNCTIONTREE::shared_ptr junctionTree_; - - public: - - typedef typename FactorGraph::shared_ptr sharedGraph; - typedef typename FactorGraph::Eliminate Eliminate; - - /// @name Standard Constructors - /// @{ - - /** - * Construct the solver for a factor graph. This builds the junction - * tree, which does the symbolic elimination, identifies the cliques, - * and distributes all the factors to the right cliques. - */ - GenericMultifrontalSolver(const FactorGraph& factorGraph); - - /** - * Construct the solver with a shared pointer to a factor graph and to a - * VariableIndex. The solver will store these pointers, so this constructor - * is the fastest. - */ - GenericMultifrontalSolver(const sharedGraph& factorGraph, - const VariableIndex::shared_ptr& variableIndex); - - /// @} - /// @name Testable - /// @{ - - /** Print to cout */ - void print(const std::string& name = "GenericMultifrontalSolver: ") const; - - /** Test whether is equal to another */ - bool equals(const GenericMultifrontalSolver& other, double tol = 1e-9) const; - - /// @} - /// @name Standard Interface - /// @{ - - /** - * Replace the factor graph with a new one having the same structure. The - * This function can be used if the numerical part of the factors changes, - * such as during relinearization or adjusting of noise models. - */ - void replaceFactors(const sharedGraph& factorGraph); - - /** - * Eliminate the factor graph sequentially. Uses a column elimination tree - * to recursively eliminate. - */ - typename BayesTree::shared_ptr - eliminate(Eliminate function) const; - - /** - * Compute the marginal joint over a set of variables, by integrating out - * all of the other variables. This function returns the result as a factor - * graph. - */ - typename FactorGraph::shared_ptr jointFactorGraph( - const std::vector& js, Eliminate function) const; - - /** - * Compute the marginal density over a variable, by integrating out - * all of the other variables. This function returns the result as a factor. - */ - typename boost::shared_ptr marginalFactor(Index j, - Eliminate function) const; - - /// @} - - }; - -} // gtsam - -#include diff --git a/gtsam/inference/GenericSequentialSolver-inl.h b/gtsam/inference/GenericSequentialSolver-inl.h deleted file mode 100644 index 9dfe64fdc..000000000 --- a/gtsam/inference/GenericSequentialSolver-inl.h +++ /dev/null @@ -1,203 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GenericSequentialSolver-inl.h - * @brief Implementation for generic sequential solver - * @author Richard Roberts - * @date Oct 21, 2010 - */ - -#pragma once - -#include -#include -#include -#include -#include - -#include - -namespace gtsam { - - /* ************************************************************************* */ - template - GenericSequentialSolver::GenericSequentialSolver(const FactorGraph& factorGraph) { - gttic(GenericSequentialSolver_constructor1); - assert(factorGraph.size()); - factors_.reset(new FactorGraph(factorGraph)); - structure_.reset(new VariableIndex(factorGraph)); - eliminationTree_ = EliminationTree::Create(*factors_, *structure_); - } - - /* ************************************************************************* */ - template - GenericSequentialSolver::GenericSequentialSolver( - const sharedFactorGraph& factorGraph, - const boost::shared_ptr& variableIndex) - { - gttic(GenericSequentialSolver_constructor2); - factors_ = factorGraph; - structure_ = variableIndex; - eliminationTree_ = EliminationTree::Create(*factors_, *structure_); - } - - /* ************************************************************************* */ - template - void GenericSequentialSolver::print(const std::string& s) const { - this->factors_->print(s + " factors:"); - this->structure_->print(s + " structure:\n"); - this->eliminationTree_->print(s + " etree:"); - } - - /* ************************************************************************* */ - template - bool GenericSequentialSolver::equals( - const GenericSequentialSolver& expected, double tol) const { - if (!this->factors_->equals(*expected.factors_, tol)) - return false; - if (!this->structure_->equals(*expected.structure_, tol)) - return false; - if (!this->eliminationTree_->equals(*expected.eliminationTree_, tol)) - return false; - return true; - } - - /* ************************************************************************* */ - template - void GenericSequentialSolver::replaceFactors( - const sharedFactorGraph& factorGraph) { - // Reset this shared pointer first to deallocate if possible - for big - // problems there may not be enough memory to store two copies. - eliminationTree_.reset(); - factors_ = factorGraph; - eliminationTree_ = EliminationTree::Create(*factors_, *structure_); - } - - /* ************************************************************************* */ - template - typename GenericSequentialSolver::sharedBayesNet // - GenericSequentialSolver::eliminate(Eliminate function) const { - return eliminationTree_->eliminate(function); - } - - /* ************************************************************************* */ - template - typename GenericSequentialSolver::sharedBayesNet // - GenericSequentialSolver::eliminate(const Permutation& permutation, - Eliminate function, boost::optional nrToEliminate) const - { - gttic(GenericSequentialSolver_eliminate); - // Create inverse permutation - Permutation::shared_ptr permutationInverse(permutation.inverse()); - - // Permute the factors - NOTE that this permutes the original factors, not - // copies. Other parts of the code may hold shared_ptr's to these factors so - // we must undo the permutation before returning. - BOOST_FOREACH(const typename boost::shared_ptr& factor, *factors_) - if (factor) - factor->permuteWithInverse(*permutationInverse); - - // Eliminate using elimination tree provided - typename EliminationTree::shared_ptr etree = EliminationTree::Create(*factors_); - sharedBayesNet bayesNet; - if(nrToEliminate) - bayesNet = etree->eliminatePartial(function, *nrToEliminate); - else - bayesNet = etree->eliminate(function); - - // Undo the permutation on the original factors and on the structure. - BOOST_FOREACH(const typename boost::shared_ptr& factor, *factors_) - if (factor) - factor->permuteWithInverse(permutation); - - // Undo the permutation on the conditionals - BOOST_FOREACH(const boost::shared_ptr& c, *bayesNet) - c->permuteWithInverse(permutation); - - return bayesNet; - } - - /* ************************************************************************* */ - template - typename GenericSequentialSolver::sharedBayesNet // - GenericSequentialSolver::conditionalBayesNet( - const std::vector& js, size_t nrFrontals, - Eliminate function) const - { - gttic(GenericSequentialSolver_conditionalBayesNet); - // Compute a COLAMD permutation with the marginal variables constrained to the end. - // TODO in case of nrFrontals, the order of js has to be respected here ! - Permutation::shared_ptr permutation( - inference::PermutationCOLAMD(*structure_, js, true)); - - // Eliminate only variables J \cup F from P(J,F,S) to get P(F|S) - size_t nrVariables = structure_->size(); - size_t nrMarginalized = nrVariables - js.size(); - size_t nrToEliminate = nrMarginalized + nrFrontals; - sharedBayesNet bayesNet = eliminate(*permutation, function, nrToEliminate); - // Get rid of conditionals on variables that we want to marginalize out - for (size_t i = 0; i < nrMarginalized; i++) - bayesNet->pop_front(); - - return bayesNet; - } - - /* ************************************************************************* */ - template - typename GenericSequentialSolver::sharedBayesNet // - GenericSequentialSolver::jointBayesNet(const std::vector& js, - Eliminate function) const - { - gttic(GenericSequentialSolver_jointBayesNet); - // Compute a COLAMD permutation with the marginal variables constrained to the end. - Permutation::shared_ptr permutation( - inference::PermutationCOLAMD(*structure_, js)); - - // Eliminate all variables - sharedBayesNet bayesNet = eliminate(*permutation, function); - - // Get rid of conditionals on variables that we want to marginalize out - size_t nrMarginalized = bayesNet->size() - js.size(); - for (size_t i = 0; i < nrMarginalized; i++) - bayesNet->pop_front(); - - return bayesNet; - } - - /* ************************************************************************* */ - template - typename FactorGraph::shared_ptr // - GenericSequentialSolver::jointFactorGraph( - const std::vector& js, Eliminate function) const - { - gttic(GenericSequentialSolver_jointFactorGraph); - // Eliminate all variables - typename BayesNet::shared_ptr bayesNet = jointBayesNet(js, function); - - return boost::make_shared >(*bayesNet); - } - - /* ************************************************************************* */ - template - typename boost::shared_ptr // - GenericSequentialSolver::marginalFactor(Index j, Eliminate function) const { - gttic(GenericSequentialSolver_marginalFactor); - // Create a container for the one variable index - std::vector js(1); - js[0] = j; - - // Call joint and return the only factor in the factor graph it returns - // TODO: just call jointBayesNet and grab last conditional, then toFactor.... - return (*this->jointFactorGraph(js, function))[0]; - } - -} // namespace gtsam diff --git a/gtsam/inference/GenericSequentialSolver.h b/gtsam/inference/GenericSequentialSolver.h deleted file mode 100644 index 250ff0d73..000000000 --- a/gtsam/inference/GenericSequentialSolver.h +++ /dev/null @@ -1,179 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GenericSequentialSolver.h - * @brief generic sequential elimination - * @author Richard Roberts - * @date Oct 21, 2010 - */ - -#pragma once - -#include -#include - -#include -#include - -#include -#include - -namespace gtsam { - class VariableIndex; - class Permutation; -} -namespace gtsam { - template class EliminationTree; -} -namespace gtsam { - template class FactorGraph; -} -namespace gtsam { - template class BayesNet; -} - -namespace gtsam { - - /** - * This solver implements sequential variable elimination for factor graphs. - * Underlying this is a column elimination tree, see Gilbert 2001 BIT. - * - * The elimination ordering is "baked in" to the variable indices at this - * stage, i.e. elimination proceeds in order from '0'. - * - * This is not the most efficient algorithm we provide, most efficient is the - * MultifrontalSolver, which examines and uses the clique structure. - * However, sequential variable elimination is easier to understand so this is a good - * starting point to learn about these algorithms and our implementation. - * Additionally, the first step of MFQR is symbolic sequential elimination. - * \nosubgrouping - */ - template - class GenericSequentialSolver { - - protected: - - typedef boost::shared_ptr > sharedFactorGraph; - typedef typename FACTOR::ConditionalType Conditional; - typedef typename boost::shared_ptr sharedConditional; - typedef typename boost::shared_ptr > sharedBayesNet; - typedef std::pair, boost::shared_ptr > EliminationResult; - typedef boost::function< - EliminationResult(const FactorGraph&, size_t)> Eliminate; - - /** Store the original factors for computing marginals - * TODO Frank says: really? Marginals should be computed from result. - */ - sharedFactorGraph factors_; - - /** Store column structure of the factor graph. Why? */ - boost::shared_ptr structure_; - - /** Elimination tree that performs elimination */ - boost::shared_ptr > eliminationTree_; - - /** concept checks */ - GTSAM_CONCEPT_TESTABLE_TYPE(FACTOR) - // GTSAM_CONCEPT_TESTABLE_TYPE(EliminationTree) - - /** - * Eliminate in a different order, given a permutation - */ - sharedBayesNet eliminate(const Permutation& permutation, Eliminate function, - boost::optional nrToEliminate = boost::none ///< If given a number of variables to eliminate, will only eliminate that many - ) const; - - public: - - /// @name Standard Constructors - /// @{ - - /** - * Construct the solver for a factor graph. This builds the elimination - * tree, which already does some of the work of elimination. - */ - GenericSequentialSolver(const FactorGraph& factorGraph); - - /** - * Construct the solver with a shared pointer to a factor graph and to a - * VariableIndex. The solver will store these pointers, so this constructor - * is the fastest. - */ - GenericSequentialSolver(const sharedFactorGraph& factorGraph, - const boost::shared_ptr& variableIndex); - - /// @} - /// @name Testable - /// @{ - - /** Print to cout */ - void print(const std::string& name = "GenericSequentialSolver: ") const; - - /** Test whether is equal to another */ - bool equals(const GenericSequentialSolver& other, double tol = 1e-9) const; - - /// @} - /// @name Standard Interface - /// @{ - - /** - * Replace the factor graph with a new one having the same structure. The - * This function can be used if the numerical part of the factors changes, - * such as during relinearization or adjusting of noise models. - */ - void replaceFactors(const sharedFactorGraph& factorGraph); - - /** - * Eliminate the factor graph sequentially. Uses a column elimination tree - * to recursively eliminate. - */ - sharedBayesNet eliminate(Eliminate function) const; - - /** - * Compute a conditional density P(F|S) while marginalizing out variables J - * P(F|S) is obtained by P(J,F,S)=P(J|F,S)P(F|S)P(S) and dropping P(S) - * Returns the result as a Bayes net. - */ - sharedBayesNet - conditionalBayesNet(const std::vector& js, size_t nrFrontals, - Eliminate function) const; - - /** - * Compute the marginal joint over a set of variables, by integrating out - * all of the other variables. Returns the result as a Bayes net - */ - sharedBayesNet - jointBayesNet(const std::vector& js, Eliminate function) const; - - /** - * Compute the marginal joint over a set of variables, by integrating out - * all of the other variables. Returns the result as a factor graph. - */ - typename FactorGraph::shared_ptr - jointFactorGraph(const std::vector& js, Eliminate function) const; - - /** - * Compute the marginal Gaussian density over a variable, by integrating out - * all of the other variables. This function returns the result as a factor. - */ - typename boost::shared_ptr - marginalFactor(Index j, Eliminate function) const; - - /// @} - - } - ; -// GenericSequentialSolver - -}// namespace gtsam - -#include diff --git a/gtsam/inference/ISAM-inl.h b/gtsam/inference/ISAM-inl.h deleted file mode 100644 index 4b5333f33..000000000 --- a/gtsam/inference/ISAM-inl.h +++ /dev/null @@ -1,74 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file ISAM-inl.h - * @brief Incremental update functionality (iSAM) for BayesTree. - * @author Michael Kaess - */ - -#pragma once - -#include -#include // for operator += - -#include -#include -#include - -namespace gtsam { - - /* ************************************************************************* */ - template - ISAM::ISAM() : BayesTree() {} - - /* ************************************************************************* */ - template - template void ISAM::update_internal( - const FG& newFactors, Cliques& orphans, typename FG::Eliminate function) { - - // Remove the contaminated part of the Bayes tree - // Throw exception if disconnected - BayesNet bn; - if (!this->empty()) { - this->removeTop(newFactors.keys(), bn, orphans); - if (bn.empty()) - throw std::runtime_error( - "ISAM::update_internal(): no variables in common between existing Bayes tree and incoming factors!"); - } - FG factors(bn); - - // add the factors themselves - factors.push_back(newFactors); - - // eliminate into a Bayes net - GenericMultifrontalSolver > solver(factors); - boost::shared_ptr > bayesTree; - bayesTree = solver.eliminate(function); - this->root_ = bayesTree->root(); - this->nodes_ = bayesTree->nodes(); - - // add orphans to the bottom of the new tree - BOOST_FOREACH(sharedClique orphan, orphans) - this->insert(orphan); - } - - /* ************************************************************************* */ - template - template - void ISAM::update(const FG& newFactors, - typename FG::Eliminate function) { - Cliques orphans; - this->update_internal(newFactors, orphans, function); - } - -} -/// namespace gtsam diff --git a/gtsam/inference/ISAM-inst.h b/gtsam/inference/ISAM-inst.h new file mode 100644 index 000000000..d73673755 --- /dev/null +++ b/gtsam/inference/ISAM-inst.h @@ -0,0 +1,65 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file ISAM-inl.h + * @brief Incremental update functionality (iSAM) for BayesTree. + * @author Michael Kaess + */ + +#pragma once + +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + void ISAM::update_internal(const FactorGraphType& newFactors, Cliques& orphans, const Eliminate& function) + { + // Remove the contaminated part of the Bayes tree + // Throw exception if disconnected + BayesNetType bn; + if (!this->empty()) { + const FastSet newFactorKeys = newFactors.keys(); + this->removeTop(std::vector(newFactorKeys.begin(), newFactorKeys.end()), bn, orphans); + } + + // Add the removed top and the new factors + FactorGraphType factors; + factors += bn; + factors += newFactors; + + // Add the orphaned subtrees + BOOST_FOREACH(const sharedClique& orphan, orphans) + factors += boost::make_shared >(orphan); + + // eliminate into a Bayes net + const VariableIndex varIndex(factors); + const FastSet newFactorKeys = newFactors.keys(); + const Ordering constrainedOrdering = + Ordering::COLAMDConstrainedLast(varIndex, std::vector(newFactorKeys.begin(), newFactorKeys.end())); + Base bayesTree = *factors.eliminateMultifrontal(constrainedOrdering, function, varIndex); + this->roots_.insert(this->roots_.end(), bayesTree.roots().begin(), bayesTree.roots().end()); + this->nodes_.insert(bayesTree.nodes().begin(), bayesTree.nodes().end()); + } + + /* ************************************************************************* */ + template + void ISAM::update(const FactorGraphType& newFactors, const Eliminate& function) + { + Cliques orphans; + this->update_internal(newFactors, orphans, function); + } + +} +/// namespace gtsam diff --git a/gtsam/inference/ISAM.h b/gtsam/inference/ISAM.h index e227ea57c..d920be209 100644 --- a/gtsam/inference/ISAM.h +++ b/gtsam/inference/ISAM.h @@ -18,8 +18,6 @@ // \callgraph #pragma once -#include - namespace gtsam { /** @@ -27,12 +25,19 @@ namespace gtsam { * Given a set of new factors, it re-eliminates the invalidated part of the tree. * \nosubgrouping */ - template - class ISAM: public BayesTree { - + template + class ISAM: public BAYESTREE + { private: - typedef BayesTree Base; + typedef BAYESTREE Base; + typedef typename Base::BayesNetType BayesNetType; + typedef typename Base::FactorGraphType FactorGraphType; + typedef typename Base::Clique Clique; + typedef typename Base::sharedClique sharedClique; + typedef typename Base::Cliques Cliques; + typedef typename Base::Eliminate Eliminate; + typedef typename Base::EliminationTraitsType EliminationTraitsType; public: @@ -40,12 +45,10 @@ namespace gtsam { /// @{ /** Create an empty Bayes Tree */ - ISAM(); + ISAM() {} /** Copy constructor */ - ISAM(const Base& bayesTree) : - Base(bayesTree) { - } + ISAM(const Base& bayesTree) : Base(bayesTree) {} /// @} /// @name Advanced Interface Interface @@ -56,21 +59,14 @@ namespace gtsam { * @param newFactors is a factor graph that contains the new factors * @param function an elimination routine */ - template - void update(const FG& newFactors, typename FG::Eliminate function); - - typedef typename BayesTree::sharedClique sharedClique; ///::Cliques Cliques; /// - void update_internal(const FG& newFactors, Cliques& orphans, - typename FG::Eliminate function); + void update_internal(const FactorGraphType& newFactors, Cliques& orphans, + const Eliminate& function = EliminationTraitsType::DefaultEliminate); /// @} }; }/// namespace gtsam - -#include diff --git a/gtsam/inference/IndexConditional.cpp b/gtsam/inference/IndexConditional.cpp deleted file mode 100644 index 20e978afe..000000000 --- a/gtsam/inference/IndexConditional.cpp +++ /dev/null @@ -1,65 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file IndexConditional.cpp - * @author Richard Roberts - * @date Oct 17, 2010 - */ - -#include -#include -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-variable" -#endif -#include -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - -namespace gtsam { - - using namespace std; - using namespace boost::lambda; - - /* ************************************************************************* */ - void IndexConditional::assertInvariants() const { - // Checks for uniqueness of keys - Base::assertInvariants(); - } - - /* ************************************************************************* */ - bool IndexConditional::reduceSeparatorWithInverse(const internal::Reduction& inverseReduction) { -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - BOOST_FOREACH(KeyType key, frontals()) { assert(inverseReduction.find(key) == inverseReduction.end()); } -#endif - bool parentChanged = false; - BOOST_FOREACH(KeyType& parent, parents()) { - internal::Reduction::const_iterator it = inverseReduction.find(parent); - if(it != inverseReduction.end()) { - parentChanged = true; - parent = it->second; - } - } - assertInvariants(); - return parentChanged; - } - - /* ************************************************************************* */ - void IndexConditional::permuteWithInverse(const Permutation& inversePermutation) { - BOOST_FOREACH(Index& key, keys()) - key = inversePermutation[key]; - assertInvariants(); - } - /* ************************************************************************* */ - -} diff --git a/gtsam/inference/IndexConditional.h b/gtsam/inference/IndexConditional.h deleted file mode 100644 index a4068f6c4..000000000 --- a/gtsam/inference/IndexConditional.h +++ /dev/null @@ -1,135 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file IndexConditional.h - * @author Richard Roberts - * @date Oct 17, 2010 - */ - -#pragma once - -#include -#include -#include -#include - -namespace gtsam { - - /** - * IndexConditional serves two purposes. It is the base class for - * GaussianConditional, and also functions as a symbolic conditional with - * Index keys, produced by symbolic elimination of IndexFactor. - * - * It derives from Conditional with a key type of Index, which is an - * unsigned integer. - * \nosubgrouping - */ - class IndexConditional : public Conditional { - - protected: - - // Checks that frontal indices are sorted and lower than parent indices - GTSAM_EXPORT void assertInvariants() const; - - public: - - typedef IndexConditional This; - typedef Conditional Base; - typedef IndexFactor FactorType; - typedef boost::shared_ptr shared_ptr; - - /// @name Standard Constructors - /// @{ - - /** Empty Constructor to make serialization possible */ - IndexConditional() { assertInvariants(); } - - /** No parents */ - IndexConditional(Index j) : Base(j) { assertInvariants(); } - - /** Single parent */ - IndexConditional(Index j, Index parent) : Base(j, parent) { assertInvariants(); } - - /** Two parents */ - IndexConditional(Index j, Index parent1, Index parent2) : Base(j, parent1, parent2) { assertInvariants(); } - - /** Three parents */ - IndexConditional(Index j, Index parent1, Index parent2, Index parent3) : Base(j, parent1, parent2, parent3) { assertInvariants(); } - - /// @} - /// @name Advanced Constructors - /// @{ - - /** Constructor from a frontal variable and a vector of parents */ - IndexConditional(Index j, const std::vector& parents) : Base(j, parents) { - assertInvariants(); - } - - /** Constructor from keys and nr of frontal variables */ - IndexConditional(const std::vector& keys, size_t nrFrontals) : - Base(keys, nrFrontals) { - assertInvariants(); - } - - /** Constructor from keys and nr of frontal variables */ - IndexConditional(const std::list& keys, size_t nrFrontals) : - Base(keys, nrFrontals) { - assertInvariants(); - } - - /// @} - /// @name Standard Interface - /// @{ - - /** Named constructor directly returning a shared pointer */ - template - static shared_ptr FromKeys(const KEYS& keys, size_t nrFrontals) { - shared_ptr conditional(new IndexConditional()); - conditional->keys_.assign(keys.begin(), keys.end()); - conditional->nrFrontals_ = nrFrontals; - return conditional; - } - - /** Convert to a factor */ - IndexFactor::shared_ptr toFactor() const { - return IndexFactor::shared_ptr(new IndexFactor(*this)); - } - - /// @} - /// @name Advanced Interface - /// @{ - - /** Permute the variables when only separator variables need to be permuted. - * Returns true if any reordered variables appeared in the separator and - * false if not. - */ - GTSAM_EXPORT bool reduceSeparatorWithInverse(const internal::Reduction& inverseReduction); - - /** - * Permutes the Conditional, but for efficiency requires the permutation - * to already be inverted. - */ - GTSAM_EXPORT void permuteWithInverse(const Permutation& inversePermutation); - - private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(Archive & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); - } - - /// @} - - }; - -} diff --git a/gtsam/inference/IndexFactor.cpp b/gtsam/inference/IndexFactor.cpp deleted file mode 100644 index 77b84074b..000000000 --- a/gtsam/inference/IndexFactor.cpp +++ /dev/null @@ -1,71 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file IndexFactor.cpp - * @author Richard Roberts - * @date Oct 17, 2010 - */ - -#include -#include -#include - -using namespace std; - -namespace gtsam { - - /* ************************************************************************* */ - void IndexFactor::assertInvariants() const { - Base::assertInvariants(); - } - - /* ************************************************************************* */ - IndexFactor::IndexFactor(const IndexConditional& c) : - Base(c) { - assertInvariants(); - } - - /* ************************************************************************* */ -#ifdef TRACK_ELIMINATE - boost::shared_ptr IndexFactor::eliminateFirst() { - boost::shared_ptr result(Base::eliminateFirst< - IndexConditional>()); - assertInvariants(); - return result; - } - - /* ************************************************************************* */ - boost::shared_ptr > IndexFactor::eliminate( - size_t nrFrontals) { - boost::shared_ptr > result(Base::eliminate< - IndexConditional>(nrFrontals)); - assertInvariants(); - return result; - } -#endif - - /* ************************************************************************* */ - void IndexFactor::permuteWithInverse(const Permutation& inversePermutation) { - BOOST_FOREACH(Index& key, keys()) - key = inversePermutation[key]; - assertInvariants(); - } - - /* ************************************************************************* */ - void IndexFactor::reduceWithInverse(const internal::Reduction& inverseReduction) { - BOOST_FOREACH(Index& key, keys()) - key = inverseReduction[key]; - assertInvariants(); - } - - /* ************************************************************************* */ -} // gtsam diff --git a/gtsam/inference/IndexFactor.h b/gtsam/inference/IndexFactor.h deleted file mode 100644 index d18c70086..000000000 --- a/gtsam/inference/IndexFactor.h +++ /dev/null @@ -1,179 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file IndexFactor.h - * @author Richard Roberts - * @date Oct 17, 2010 - */ - -#pragma once - -#include -#include - -namespace gtsam { - - // Forward declaration of IndexConditional - class IndexConditional; - - /** - * IndexFactor serves two purposes. It is the base class for all linear - * factors (GaussianFactor, JacobianFactor, HessianFactor), and also - * functions as a symbolic factor with Index keys, used to do symbolic - * elimination by JunctionTree. - * - * It derives from Factor with a key type of Index, an unsigned integer. - * \nosubgrouping - */ - class IndexFactor: public Factor { - - protected: - - /// @name Advanced Interface - /// @{ - - /// Internal function for checking class invariants (unique keys for this factor) - GTSAM_EXPORT void assertInvariants() const; - - /// @} - - public: - - typedef IndexFactor This; - typedef Factor Base; - - /** Elimination produces an IndexConditional */ - typedef IndexConditional ConditionalType; - - /** Overriding the shared_ptr typedef */ - typedef boost::shared_ptr shared_ptr; - - /// @name Standard Interface - /// @{ - - /** Copy constructor */ - IndexFactor(const This& f) : - Base(f) { - assertInvariants(); - } - - /** Construct from derived type */ - GTSAM_EXPORT IndexFactor(const IndexConditional& c); - - /** Default constructor for I/O */ - IndexFactor() { - assertInvariants(); - } - - /** Construct unary factor */ - IndexFactor(Index j) : - Base(j) { - assertInvariants(); - } - - /** Construct binary factor */ - IndexFactor(Index j1, Index j2) : - Base(j1, j2) { - assertInvariants(); - } - - /** Construct ternary factor */ - IndexFactor(Index j1, Index j2, Index j3) : - Base(j1, j2, j3) { - assertInvariants(); - } - - /** Construct 4-way factor */ - IndexFactor(Index j1, Index j2, Index j3, Index j4) : - Base(j1, j2, j3, j4) { - assertInvariants(); - } - - /** Construct 5-way factor */ - IndexFactor(Index j1, Index j2, Index j3, Index j4, Index j5) : - Base(j1, j2, j3, j4, j5) { - assertInvariants(); - } - - /** Construct 6-way factor */ - IndexFactor(Index j1, Index j2, Index j3, Index j4, Index j5, Index j6) : - Base(j1, j2, j3, j4, j5, j6) { - assertInvariants(); - } - - /// @} - /// @name Advanced Constructors - /// @{ - - /** Construct n-way factor */ - IndexFactor(const std::set& js) : - Base(js) { - assertInvariants(); - } - - /** Construct n-way factor */ - IndexFactor(const std::vector& js) : - Base(js) { - assertInvariants(); - } - - /** Constructor from a collection of keys */ - template IndexFactor(KeyIterator beginKey, - KeyIterator endKey) : - Base(beginKey, endKey) { - assertInvariants(); - } - - /// @} - -#ifdef TRACK_ELIMINATE - /** - * eliminate the first variable involved in this factor - * @return a conditional on the eliminated variable - */ - GTSAM_EXPORT boost::shared_ptr eliminateFirst(); - - /** eliminate the first nrFrontals frontal variables.*/ - GTSAM_EXPORT boost::shared_ptr > eliminate(size_t nrFrontals = - 1); -#endif - - /// @name Advanced Interface - /// @{ - - /** - * Permutes the factor, but for efficiency requires the permutation - * to already be inverted. - */ - GTSAM_EXPORT void permuteWithInverse(const Permutation& inversePermutation); - - /** - * Apply a reduction, which is a remapping of variable indices. - */ - GTSAM_EXPORT void reduceWithInverse(const internal::Reduction& inverseReduction); - - virtual ~IndexFactor() { - } - - private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); - } - - /// @} - - }; // IndexFactor - -} diff --git a/gtsam/inference/JunctionTree-inl.h b/gtsam/inference/JunctionTree-inl.h deleted file mode 100644 index 1978bde38..000000000 --- a/gtsam/inference/JunctionTree-inl.h +++ /dev/null @@ -1,205 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file JunctionTree-inl.h - * @date Feb 4, 2010 - * @author Kai Ni - * @author Frank Dellaert - * @brief The junction tree, template bodies - */ - -#pragma once - -#include -#include -#include -#include - -#include - -namespace gtsam { - - /* ************************************************************************* */ - template - void JunctionTree::construct(const FG& fg, const VariableIndex& variableIndex) { - gttic(JT_symbolic_ET); - const typename EliminationTree::shared_ptr symETree = - EliminationTree::Create(fg, variableIndex); - assert(symETree.get()); - gttoc(JT_symbolic_ET); - gttic(JT_symbolic_eliminate); - SymbolicBayesNet::shared_ptr sbn = symETree->eliminate(&EliminateSymbolic); - assert(sbn.get()); - gttoc(JT_symbolic_eliminate); - gttic(symbolic_BayesTree); - SymbolicBayesTree sbt(*sbn); - gttoc(symbolic_BayesTree); - - // distribute factors - gttic(distributeFactors); - this->root_ = distributeFactors(fg, sbt.root()); - gttoc(distributeFactors); - } - - /* ************************************************************************* */ - template - JunctionTree::JunctionTree(const FG& fg) { - gttic(VariableIndex); - VariableIndex varIndex(fg); - gttoc(VariableIndex); - construct(fg, varIndex); - } - - /* ************************************************************************* */ - template - JunctionTree::JunctionTree(const FG& fg, const VariableIndex& variableIndex) { - construct(fg, variableIndex); - } - - /* ************************************************************************* */ - template - typename JunctionTree::sharedClique JunctionTree::distributeFactors( - const FG& fg, const SymbolicBayesTree::sharedClique& bayesClique) { - - // Build "target" index. This is an index for each variable of the factors - // that involve this variable as their *lowest-ordered* variable. For each - // factor, it is the lowest-ordered variable of that factor that pulls the - // factor into elimination, after which all of the information in the - // factor is contained in the eliminated factors that are passed up the - // tree as elimination continues. - - // Two stages - first build an array of the lowest-ordered variable in each - // factor and find the last variable to be eliminated. - std::vector lowestOrdered(fg.size(), std::numeric_limits::max()); - Index maxVar = 0; - for(size_t i=0; ibegin(), fg[i]->end()); - if(min != fg[i]->end()) { - lowestOrdered[i] = *min; - maxVar = std::max(maxVar, *min); - } - } - - // Now add each factor to the list corresponding to its lowest-ordered - // variable. - std::vector > targets(maxVar+1); - for(size_t i=0; i::max()) - targets[lowestOrdered[i]].push_back(i); - - // Now call the recursive distributeFactors - return distributeFactors(fg, targets, bayesClique); - } - - /* ************************************************************************* */ - template - typename JunctionTree::sharedClique JunctionTree::distributeFactors(const FG& fg, - const std::vector >& targets, - const SymbolicBayesTree::sharedClique& bayesClique) { - - if(bayesClique) { - // create a new clique in the junction tree - sharedClique clique(new Clique((*bayesClique)->beginFrontals(), (*bayesClique)->endFrontals(), - (*bayesClique)->beginParents(), (*bayesClique)->endParents())); - - // count the factors for this cluster to pre-allocate space - { - size_t nFactors = 0; - BOOST_FOREACH(const Index frontal, clique->frontal) { - // There may be less variables in "targets" than there really are if - // some of the highest-numbered variables do not pull in any factors. - if(frontal < targets.size()) - nFactors += targets[frontal].size(); } - clique->reserve(nFactors); - } - // add the factors to this cluster - BOOST_FOREACH(const Index frontal, clique->frontal) { - if(frontal < targets.size()) { - BOOST_FOREACH(const size_t factorI, targets[frontal]) { - clique->push_back(fg[factorI]); } } } - - // recursively call the children - BOOST_FOREACH(const typename SymbolicBayesTree::sharedClique bayesChild, bayesClique->children()) { - sharedClique child = distributeFactors(fg, targets, bayesChild); - clique->addChild(child); - child->parent() = clique; - } - return clique; - } else - return sharedClique(); - } - - /* ************************************************************************* */ - template - std::pair::BTClique::shared_ptr, - typename FG::sharedFactor> JunctionTree::eliminateOneClique( - typename FG::Eliminate function, - const boost::shared_ptr& current) const { - - FG fg; // factor graph will be assembled from local factors and marginalized children - fg.reserve(current->size() + current->children().size()); - fg.push_back(*current); // add the local factors - - // receive the factors from the child and its clique point - std::list children; - BOOST_FOREACH(const boost::shared_ptr& child, current->children()) { - std::pair tree_factor( - eliminateOneClique(function, child)); - children.push_back(tree_factor.first); - fg.push_back(tree_factor.second); - } - - // eliminate the combined factors - // warning: fg is being eliminated in-place and will contain marginal afterwards - - // Now that we know which factors and variables, and where variables - // come from and go to, create and eliminate the new joint factor. - gttic(CombineAndEliminate); - typename FG::EliminationResult eliminated(function(fg, - current->frontal.size())); - gttoc(CombineAndEliminate); - -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - assert(std::equal(eliminated.second->begin(), eliminated.second->end(), current->separator.begin())); -#endif - - gttic(Update_tree); - // create a new clique corresponding the combined factors - typename BTClique::shared_ptr new_clique(BTClique::Create(eliminated)); - new_clique->children_ = children; - - BOOST_FOREACH(typename BTClique::shared_ptr& childRoot, children) { - childRoot->parent_ = new_clique; - } - gttoc(Update_tree); - - return std::make_pair(new_clique, eliminated.second); - } - - /* ************************************************************************* */ - template - typename BTCLIQUE::shared_ptr JunctionTree::eliminate( - typename FG::Eliminate function) const { - if (this->root()) { - gttic(JT_eliminate); - std::pair ret = - this->eliminateOneClique(function, this->root()); - if (ret.second->size() != 0) throw std::runtime_error( - "JuntionTree::eliminate: elimination failed because of factors left over!"); - gttoc(JT_eliminate); - return ret.first; - } else - return typename BTClique::shared_ptr(); - } - -} //namespace gtsam diff --git a/gtsam/inference/JunctionTree-inst.h b/gtsam/inference/JunctionTree-inst.h new file mode 100644 index 000000000..92cd40665 --- /dev/null +++ b/gtsam/inference/JunctionTree-inst.h @@ -0,0 +1,313 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file JunctionTree-inl.h + * @date Feb 4, 2010 + * @author Kai Ni + * @author Frank Dellaert + * @author Richard Roberts + * @brief The junction tree, template bodies + */ + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace gtsam { + + namespace { + /* ************************************************************************* */ + template + struct ConstructorTraversalData { + ConstructorTraversalData* const parentData; + typename JunctionTree::sharedNode myJTNode; + std::vector childSymbolicConditionals; + std::vector childSymbolicFactors; + ConstructorTraversalData(ConstructorTraversalData* _parentData) : parentData(_parentData) {} + }; + + /* ************************************************************************* */ + // Pre-order visitor function + template + ConstructorTraversalData ConstructorTraversalVisitorPre( + const boost::shared_ptr& node, + ConstructorTraversalData& parentData) + { + // On the pre-order pass, before children have been visited, we just set up a traversal data + // structure with its own JT node, and create a child pointer in its parent. + ConstructorTraversalData myData = ConstructorTraversalData(&parentData); + myData.myJTNode = boost::make_shared::Node>(); + myData.myJTNode->keys.push_back(node->key); + myData.myJTNode->factors.insert(myData.myJTNode->factors.begin(), node->factors.begin(), node->factors.end()); + parentData.myJTNode->children.push_back(myData.myJTNode); + return myData; + } + + /* ************************************************************************* */ + // Post-order visitor function + template + void ConstructorTraversalVisitorPost( + const boost::shared_ptr& ETreeNode, + const ConstructorTraversalData& myData) + { + // In this post-order visitor, we combine the symbolic elimination results from the + // elimination tree children and symbolically eliminate the current elimination tree node. We + // then check whether each of our elimination tree child nodes should be merged with us. The + // check for this is that our number of symbolic elimination parents is exactly 1 less than + // our child's symbolic elimination parents - this condition indicates that eliminating the + // current node did not introduce any parents beyond those already in the child. + + // Do symbolic elimination for this node + SymbolicFactorGraph symbolicFactors; + symbolicFactors.reserve(ETreeNode->factors.size() + myData.childSymbolicFactors.size()); + // Add symbolic versions of the ETree node factors + BOOST_FOREACH(const typename GRAPH::sharedFactor& factor, ETreeNode->factors) { + symbolicFactors.push_back(boost::make_shared( + SymbolicFactor::FromKeys(*factor))); } + // Add symbolic factors passed up from children + symbolicFactors.push_back(myData.childSymbolicFactors.begin(), myData.childSymbolicFactors.end()); + Ordering keyAsOrdering; keyAsOrdering.push_back(ETreeNode->key); + std::pair symbolicElimResult = + EliminateSymbolic(symbolicFactors, keyAsOrdering); + + // Store symbolic elimination results in the parent + myData.parentData->childSymbolicConditionals.push_back(symbolicElimResult.first); + myData.parentData->childSymbolicFactors.push_back(symbolicElimResult.second); + + // Merge our children if they are in our clique - if our conditional has exactly one fewer + // parent than our child's conditional. + const size_t myNrParents = symbolicElimResult.first->nrParents(); + size_t nrMergedChildren = 0; + assert(myData.myJTNode->children.size() == myData.childSymbolicConditionals.size()); + // Loop over children + int combinedProblemSize = (int)symbolicElimResult.first->size(); + for(size_t child = 0; child < myData.childSymbolicConditionals.size(); ++child) { + // Check if we should merge the child + if(myNrParents + 1 == myData.childSymbolicConditionals[child]->nrParents()) { + // Get a reference to the child, adjusting the index to account for children previously + // merged and removed from the child list. + const typename JunctionTree::Node& childToMerge = + *myData.myJTNode->children[child - nrMergedChildren]; + // Merge keys, factors, and children. + myData.myJTNode->keys.insert(myData.myJTNode->keys.begin(), childToMerge.keys.begin(), childToMerge.keys.end()); + myData.myJTNode->factors.insert(myData.myJTNode->factors.end(), childToMerge.factors.begin(), childToMerge.factors.end()); + myData.myJTNode->children.insert(myData.myJTNode->children.end(), childToMerge.children.begin(), childToMerge.children.end()); + // Remove child from list. + myData.myJTNode->children.erase(myData.myJTNode->children.begin() + (child - nrMergedChildren)); + // Increment number of merged children + ++ nrMergedChildren; + // Increment problem size + combinedProblemSize = std::max(combinedProblemSize, childToMerge.problemSize_); + } + } + myData.myJTNode->problemSize_ = combinedProblemSize; + } + + /* ************************************************************************* */ + // Elimination traversal data - stores a pointer to the parent data and collects the factors + // resulting from elimination of the children. Also sets up BayesTree cliques with parent and + // child pointers. + template + struct EliminationData { + EliminationData* const parentData; + size_t myIndexInParent; + std::vector childFactors; + boost::shared_ptr bayesTreeNode; + EliminationData(EliminationData* _parentData, size_t nChildren) : + parentData(_parentData), + bayesTreeNode(boost::make_shared()) + { + if(parentData) { + myIndexInParent = parentData->childFactors.size(); + parentData->childFactors.push_back(typename JUNCTIONTREE::sharedFactor()); + } else { + myIndexInParent = 0; + } + // Set up BayesTree parent and child pointers + if(parentData) { + if(parentData->parentData) // If our parent is not the dummy node + bayesTreeNode->parent_ = parentData->bayesTreeNode; + parentData->bayesTreeNode->children.push_back(bayesTreeNode); + } + } + }; + + /* ************************************************************************* */ + // Elimination pre-order visitor - just creates the EliminationData structure for the visited + // node. + template + EliminationData eliminationPreOrderVisitor( + const typename JUNCTIONTREE::sharedNode& node, EliminationData& parentData) + { + return EliminationData(&parentData, node->children.size()); + } + + /* ************************************************************************* */ + // Elimination post-order visitor - combine the child factors with our own factors, add the + // resulting conditional to the BayesTree, and add the remaining factor to the parent. + template + struct EliminationPostOrderVisitor + { + const typename JUNCTIONTREE::Eliminate& eliminationFunction; + typename JUNCTIONTREE::BayesTreeType::Nodes& nodesIndex; + EliminationPostOrderVisitor(const typename JUNCTIONTREE::Eliminate& eliminationFunction, + typename JUNCTIONTREE::BayesTreeType::Nodes& nodesIndex) : + eliminationFunction(eliminationFunction), nodesIndex(nodesIndex) {} + void operator()(const typename JUNCTIONTREE::sharedNode& node, EliminationData& myData) + { + // Typedefs + typedef typename JUNCTIONTREE::sharedFactor sharedFactor; + typedef typename JUNCTIONTREE::FactorType FactorType; + typedef typename JUNCTIONTREE::FactorGraphType FactorGraphType; + typedef typename JUNCTIONTREE::ConditionalType ConditionalType; + typedef typename JUNCTIONTREE::BayesTreeType::Node BTNode; + + // Gather factors + FactorGraphType gatheredFactors; + gatheredFactors.reserve(node->factors.size() + node->children.size()); + gatheredFactors += node->factors; + gatheredFactors += myData.childFactors; + + // Check for Bayes tree orphan subtrees, and add them to our children + BOOST_FOREACH(const sharedFactor& f, node->factors) + { + if(const BayesTreeOrphanWrapper* asSubtree = dynamic_cast*>(f.get())) + { + myData.bayesTreeNode->children.push_back(asSubtree->clique); + asSubtree->clique->parent_ = myData.bayesTreeNode; + } + } + + // Do dense elimination step + std::pair, boost::shared_ptr > eliminationResult = + eliminationFunction(gatheredFactors, Ordering(node->keys)); + + // Store conditional in BayesTree clique, and in the case of ISAM2Clique also store the remaining factor + myData.bayesTreeNode->setEliminationResult(eliminationResult); + + // Fill nodes index - we do this here instead of calling insertRoot at the end to avoid + // putting orphan subtrees in the index - they'll already be in the index of the ISAM2 + // object they're added to. + BOOST_FOREACH(const Key& j, myData.bayesTreeNode->conditional()->frontals()) + nodesIndex.insert(std::make_pair(j, myData.bayesTreeNode)).second; + + // Store remaining factor in parent's gathered factors + if(!eliminationResult.second->empty()) + myData.parentData->childFactors[myData.myIndexInParent] = eliminationResult.second; + } + }; + } + + /* ************************************************************************* */ + template + void JunctionTree::Node::print( + const std::string& s, const KeyFormatter& keyFormatter) const + { + std::cout << s; + BOOST_FOREACH(Key j, keys) + std::cout << j << " "; + std::cout << "problemSize = " << problemSize_ << std::endl; + } + + /* ************************************************************************* */ + template + void JunctionTree::print( + const std::string& s, const KeyFormatter& keyFormatter) const + { + treeTraversal::PrintForest(*this, s, keyFormatter); + } + + /* ************************************************************************* */ + template + template + JunctionTree + JunctionTree::FromEliminationTree(const ETREE& eliminationTree) + { + gttic(JunctionTree_FromEliminationTree); + // Here we rely on the BayesNet having been produced by this elimination tree, such that the + // conditionals are arranged in DFS post-order. We traverse the elimination tree, and inspect + // the symbolic conditional corresponding to each node. The elimination tree node is added to + // the same clique with its parent if it has exactly one more Bayes net conditional parent than + // does its elimination tree parent. + + // Traverse the elimination tree, doing symbolic elimination and merging nodes as we go. Gather + // the created junction tree roots in a dummy Node. + typedef typename ETREE::Node ETreeNode; + ConstructorTraversalData rootData(0); + rootData.myJTNode = boost::make_shared(); // Make a dummy node to gather the junction tree roots + treeTraversal::DepthFirstForest(eliminationTree, rootData, + ConstructorTraversalVisitorPre, ConstructorTraversalVisitorPost); + + // Assign roots from the dummy node + This result; + result.roots_ = rootData.myJTNode->children; + + // Transfer remaining factors from elimination tree + result.remainingFactors_ = eliminationTree.remainingFactors(); + + return result; + } + + /* ************************************************************************* */ + template + JunctionTree& JunctionTree::operator=(const This& other) + { + // Start by duplicating the tree. + roots_ = treeTraversal::CloneForest(other); + + // Assign the remaining factors - these are pointers to factors in the original factor graph and + // we do not clone them. + remainingFactors_ = other.remainingFactors_; + + return *this; + } + + /* ************************************************************************* */ + template + std::pair, boost::shared_ptr > + JunctionTree::eliminate(const Eliminate& function) const + { + gttic(JunctionTree_eliminate); + // Do elimination (depth-first traversal). The rootsContainer stores a 'dummy' BayesTree node + // that contains all of the roots as its children. rootsContainer also stores the remaining + // uneliminated factors passed up from the roots. + boost::shared_ptr result = boost::make_shared(); + EliminationData rootsContainer(0, roots_.size()); + EliminationPostOrderVisitor visitorPost(function, result->nodes_); + //tbb::task_scheduler_init init(1); + treeTraversal::DepthFirstForest/*Parallel*/(*this, rootsContainer, + eliminationPreOrderVisitor, visitorPost/*, 10*/); + + // Create BayesTree from roots stored in the dummy BayesTree node. + result->roots_.insert(result->roots_.end(), rootsContainer.bayesTreeNode->children.begin(), rootsContainer.bayesTreeNode->children.end()); + + // Add remaining factors that were not involved with eliminated variables + boost::shared_ptr allRemainingFactors = boost::make_shared(); + allRemainingFactors->reserve(remainingFactors_.size() + rootsContainer.childFactors.size()); + allRemainingFactors->push_back(remainingFactors_.begin(), remainingFactors_.end()); + BOOST_FOREACH(const sharedFactor& factor, rootsContainer.childFactors) + if(factor) + allRemainingFactors->push_back(factor); + + // Return result + return std::make_pair(result, allRemainingFactors); + } + +} //namespace gtsam diff --git a/gtsam/inference/JunctionTree.h b/gtsam/inference/JunctionTree.h index c8ecc0576..e7df4e32d 100644 --- a/gtsam/inference/JunctionTree.h +++ b/gtsam/inference/JunctionTree.h @@ -14,18 +14,14 @@ * @date Feb 4, 2010 * @author Kai Ni * @author Frank Dellaert - * @brief: The junction tree + * @author Richard Roberts + * @brief The junction tree */ #pragma once -#include -#include -#include -#include -#include -#include -#include +#include +#include namespace gtsam { @@ -46,90 +42,103 @@ namespace gtsam { * The tree structure and elimination method are exactly analagous to the EliminationTree, * except that in the JunctionTree, at each node multiple variables are eliminated at a time. * - * * \addtogroup Multifrontal * \nosubgrouping */ - template::Clique> - class JunctionTree: public ClusterTree { + template + class JunctionTree { public: - /// In a junction tree each cluster is associated with a clique - typedef typename ClusterTree::Cluster Clique; - typedef typename Clique::shared_ptr sharedClique; ///< Shared pointer to a clique + typedef GRAPH FactorGraphType; ///< The factor graph type + typedef typename GRAPH::FactorType FactorType; ///< The type of factors + typedef JunctionTree This; ///< This class + typedef boost::shared_ptr shared_ptr; ///< Shared pointer to this class + typedef boost::shared_ptr sharedFactor; ///< Shared pointer to a factor + typedef BAYESTREE BayesTreeType; ///< The BayesTree type produced by elimination + typedef typename BayesTreeType::ConditionalType ConditionalType; ///< The type of conditionals + typedef boost::shared_ptr sharedConditional; ///< Shared pointer to a conditional + typedef typename FactorGraphType::Eliminate Eliminate; ///< Typedef for an eliminate subroutine - /// The BayesTree type produced by elimination - typedef BTCLIQUE BTClique; + struct Node { + typedef std::vector Keys; + typedef std::vector Factors; + typedef std::vector > Children; - /// Shared pointer to this class - typedef boost::shared_ptr > shared_ptr; + Keys keys; ///< Frontal keys of this node + Factors factors; ///< Factors associated with this node + Children children; ///< sub-trees + int problemSize_; - /// We will frequently refer to a symbolic Bayes tree, used to find the clique structure - typedef gtsam::BayesTree SymbolicBayesTree; + int problemSize() const { return problemSize_; } + + /** print this node */ + void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + }; + + typedef boost::shared_ptr sharedNode; ///< Shared pointer to Node private: - /// @name Advanced Interface - /// @{ + /** concept check */ + GTSAM_CONCEPT_TESTABLE_TYPE(FactorType); - /// distribute the factors along the cluster tree - sharedClique distributeFactors(const FG& fg, - const SymbolicBayesTree::sharedClique& clique); + std::vector roots_; + std::vector remainingFactors_; - /// distribute the factors along the cluster tree - sharedClique distributeFactors(const FG& fg, const std::vector >& targets, - const SymbolicBayesTree::sharedClique& clique); - - /// recursive elimination function - std::pair - eliminateOneClique(typename FG::Eliminate function, - const boost::shared_ptr& clique) const; - - /// internal constructor - void construct(const FG& fg, const VariableIndex& variableIndex); - - /// @} - - public: + protected: /// @name Standard Constructors /// @{ - /** Default constructor */ - JunctionTree() {} + /** Build the junction tree from an elimination tree. */ + template + static This FromEliminationTree(const ETREE& eliminationTree); + + /** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are + * copied, factors are not cloned. */ + JunctionTree(const This& other) { *this = other; } - /** Named constructor to build the junction tree of a factor graph. Note - * that this has to compute the column structure as a VariableIndex, so if you - * already have this precomputed, use the JunctionTree(const FG&, const VariableIndex&) - * constructor instead. - * @param factorGraph The factor graph for which to build the elimination tree - */ - JunctionTree(const FG& factorGraph); - - /** Construct from a factor graph and pre-computed variable index. - * @param fg The factor graph for which to build the junction tree - * @param structure The set of factors involving each variable. If this is not - * precomputed, you can call the JunctionTree(const FG&) - * constructor instead. - */ - JunctionTree(const FG& fg, const VariableIndex& variableIndex); + /** Assignment operator - makes a deep copy of the tree structure, but only pointers to factors + * are copied, factors are not cloned. */ + This& operator=(const This& other); /// @} + + public: + /// @name Standard Interface /// @{ - /** Eliminate the factors in the subgraphs to produce a BayesTree. - * @param function The function used to eliminate, see the namespace functions - * in GaussianFactorGraph.h - * @return The BayesTree resulting from elimination - */ - typename BTClique::shared_ptr eliminate(typename FG::Eliminate function) const; + /** Eliminate the factors to a Bayes net and remaining factor graph + * @param function The function to use to eliminate, see the namespace functions + * in GaussianFactorGraph.h + * @return The Bayes net and factor graph resulting from elimination + */ + std::pair, boost::shared_ptr > + eliminate(const Eliminate& function) const; + + /** Print the junction tree */ + void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; /// @} - }; // JunctionTree + /// @name Advanced Interface + /// @{ + + /** Return the set of roots (one for a tree, multiple for a forest) */ + const std::vector& roots() const { return roots_; } -} // namespace gtsam + /** Return the remaining factors that are not pulled into elimination */ + const std::vector& remainingFactors() const { return remainingFactors_; } -#include + /// @} + + private: + + // Private default constructor (used in static construction methods) + JunctionTree() {} + + }; + +} \ No newline at end of file diff --git a/gtsam/nonlinear/Key.cpp b/gtsam/inference/Key.cpp similarity index 83% rename from gtsam/nonlinear/Key.cpp rename to gtsam/inference/Key.cpp index c4b21e681..b986154df 100644 --- a/gtsam/nonlinear/Key.cpp +++ b/gtsam/inference/Key.cpp @@ -22,19 +22,11 @@ #include #include -#include +#include #include namespace gtsam { -/* ************************************************************************* */ -std::string _defaultKeyFormatter(Key key) { - const Symbol asSymbol(key); - if(asSymbol.chr() > 0) - return (std::string)asSymbol; - else - return boost::lexical_cast(key); - } /* ************************************************************************* */ std::string _multirobotKeyFormatter(gtsam::Key key) { diff --git a/gtsam/nonlinear/Key.h b/gtsam/inference/Key.h similarity index 74% rename from gtsam/nonlinear/Key.h rename to gtsam/inference/Key.h index edeea396f..21f75e8d4 100644 --- a/gtsam/nonlinear/Key.h +++ b/gtsam/inference/Key.h @@ -27,19 +27,6 @@ namespace gtsam { - /// Integer nonlinear key type - typedef size_t Key; - - /// Typedef for a function to format a key, i.e. to convert it to a string - typedef boost::function KeyFormatter; - - // Helper function for DefaultKeyFormatter - GTSAM_EXPORT std::string _defaultKeyFormatter(Key key); - - /// The default KeyFormatter, which is used if no KeyFormatter is passed to - /// a nonlinear 'print' function. Automatically detects plain integer keys - /// and Symbol keys. - static const KeyFormatter DefaultKeyFormatter = &_defaultKeyFormatter; // Helper function for Multi-robot Key Formatter GTSAM_EXPORT std::string _multirobotKeyFormatter(gtsam::Key key); diff --git a/gtsam/inference/Ordering.cpp b/gtsam/inference/Ordering.cpp new file mode 100644 index 000000000..b0ac9669f --- /dev/null +++ b/gtsam/inference/Ordering.cpp @@ -0,0 +1,232 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file Ordering.cpp + * @author Richard Roberts + * @date Sep 2, 2010 + */ + +#include +#include + +#include + +#include +#include + +using namespace std; + +namespace gtsam { + + /* ************************************************************************* */ + FastMap Ordering::invert() const + { + FastMap inverted; + for(size_t pos = 0; pos < this->size(); ++pos) + inverted.insert(make_pair((*this)[pos], pos)); + return inverted; + } + + /* ************************************************************************* */ + Ordering Ordering::COLAMD(const VariableIndex& variableIndex) + { + // Call constrained version with all groups set to zero + vector dummy_groups(variableIndex.size(), 0); + return Ordering::COLAMDConstrained(variableIndex, dummy_groups); + } + + /* ************************************************************************* */ + Ordering Ordering::COLAMDConstrained( + const VariableIndex& variableIndex, std::vector& cmember) + { + gttic(Ordering_COLAMDConstrained); + + gttic(Prepare); + size_t nEntries = variableIndex.nEntries(), nFactors = variableIndex.nFactors(), nVars = variableIndex.size(); + // Convert to compressed column major format colamd wants it in (== MATLAB format!) + size_t Alen = ccolamd_recommended((int)nEntries, (int)nFactors, (int)nVars); /* colamd arg 3: size of the array A */ + vector A = vector(Alen); /* colamd arg 4: row indices of A, of size Alen */ + vector p = vector(nVars + 1); /* colamd arg 5: column pointers of A, of size n_col+1 */ + + // Fill in input data for COLAMD + p[0] = 0; + int count = 0; + vector keys(nVars); // Array to store the keys in the order we add them so we can retrieve them in permuted order + size_t index = 0; + BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex) { + // Arrange factor indices into COLAMD format + const VariableIndex::Factors& column = key_factors.second; + size_t lastFactorId = numeric_limits::max(); + BOOST_FOREACH(size_t factorIndex, column) { + if(lastFactorId != numeric_limits::max()) + assert(factorIndex > lastFactorId); + A[count++] = (int)factorIndex; // copy sparse column + } + p[index+1] = count; // column j (base 1) goes from A[j-1] to A[j]-1 + // Store key in array and increment index + keys[index] = key_factors.first; + ++ index; + } + + assert((size_t)count == variableIndex.nEntries()); + + //double* knobs = NULL; /* colamd arg 6: parameters (uses defaults if NULL) */ + double knobs[CCOLAMD_KNOBS]; + ccolamd_set_defaults(knobs); + knobs[CCOLAMD_DENSE_ROW]=-1; + knobs[CCOLAMD_DENSE_COL]=-1; + + int stats[CCOLAMD_STATS]; /* colamd arg 7: colamd output statistics and error codes */ + + gttoc(Prepare); + + // call colamd, result will be in p + /* returns (1) if successful, (0) otherwise*/ + if(nVars > 0) { + gttic(ccolamd); + int rv = ccolamd((int)nFactors, (int)nVars, (int)Alen, &A[0], &p[0], knobs, stats, &cmember[0]); + if(rv != 1) + throw runtime_error((boost::format("ccolamd failed with return value %1%")%rv).str()); + } + + // ccolamd_report(stats); + + gttic(Fill_Ordering); + // Convert elimination ordering in p to an ordering + Ordering result; + result.resize(nVars); + for(size_t j = 0; j < nVars; ++j) + result[j] = keys[p[j]]; + gttoc(Fill_Ordering); + + return result; + } + + /* ************************************************************************* */ + Ordering Ordering::COLAMDConstrainedLast( + const VariableIndex& variableIndex, const std::vector& constrainLast, bool forceOrder) + { + gttic(Ordering_COLAMDConstrainedLast); + + size_t n = variableIndex.size(); + std::vector cmember(n, 0); + + // Build a mapping to look up sorted Key indices by Key + FastMap keyIndices; + size_t j = 0; + BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex) + keyIndices.insert(keyIndices.end(), make_pair(key_factors.first, j++)); + + // If at least some variables are not constrained to be last, constrain the + // ones that should be constrained. + int group = (constrainLast.size() != n ? 1 : 0); + BOOST_FOREACH(Key key, constrainLast) { + cmember[keyIndices.at(key)] = group; + if(forceOrder) + ++ group; + } + + return Ordering::COLAMDConstrained(variableIndex, cmember); + } + + /* ************************************************************************* */ + Ordering Ordering::COLAMDConstrainedFirst( + const VariableIndex& variableIndex, const std::vector& constrainFirst, bool forceOrder) + { + gttic(Ordering_COLAMDConstrainedFirst); + + const int none = -1; + size_t n = variableIndex.size(); + std::vector cmember(n, none); + + // Build a mapping to look up sorted Key indices by Key + FastMap keyIndices; + size_t j = 0; + BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex) + keyIndices.insert(keyIndices.end(), make_pair(key_factors.first, j++)); + + // If at least some variables are not constrained to be last, constrain the + // ones that should be constrained. + int group = 0; + BOOST_FOREACH(Key key, constrainFirst) { + cmember[keyIndices.at(key)] = group; + if(forceOrder) + ++ group; + } + + if(!forceOrder) + ++ group; + BOOST_FOREACH(int& c, cmember) + if(c == none) + c = group; + + return Ordering::COLAMDConstrained(variableIndex, cmember); + } + + /* ************************************************************************* */ + Ordering Ordering::COLAMDConstrained(const VariableIndex& variableIndex, + const FastMap& groups) + { + gttic(Ordering_COLAMDConstrained); + size_t n = variableIndex.size(); + std::vector cmember(n, 0); + + // Build a mapping to look up sorted Key indices by Key + FastMap keyIndices; + size_t j = 0; + BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex) + keyIndices.insert(keyIndices.end(), make_pair(key_factors.first, j++)); + + // Assign groups + typedef FastMap::value_type key_group; + BOOST_FOREACH(const key_group& p, groups) { + // FIXME: check that no groups are skipped + cmember[keyIndices.at(p.first)] = p.second; + } + + return Ordering::COLAMDConstrained(variableIndex, cmember); + } + + /* ************************************************************************* */ + void Ordering::print(const std::string& str, const KeyFormatter& keyFormatter) const + { + cout << str; + // Print ordering in index order + // Print the ordering with varsPerLine ordering entries printed on each line, + // for compactness. + static const size_t varsPerLine = 10; + bool endedOnNewline = false; + for(size_t i = 0; i < size(); ++i) { + if(i % varsPerLine == 0) + cout << "Position " << i << ": "; + if(i % varsPerLine != 0) + cout << ", "; + cout << keyFormatter(at(i)); + if(i % varsPerLine == varsPerLine - 1) { + cout << "\n"; + endedOnNewline = true; + } else { + endedOnNewline = false; + } + } + if(!endedOnNewline) + cout << "\n"; + cout.flush(); + } + + /* ************************************************************************* */ + bool Ordering::equals(const Ordering& other, double tol) const + { + return (*this) == other; + } + +} diff --git a/gtsam/inference/Ordering.h b/gtsam/inference/Ordering.h new file mode 100644 index 000000000..ea2ad7eda --- /dev/null +++ b/gtsam/inference/Ordering.h @@ -0,0 +1,161 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file Ordering.h + * @author Richard Roberts + * @date Sep 2, 2010 + */ + +#pragma once + +#include +#include + +#include +#include +#include + +namespace gtsam { + class Ordering : public std::vector { + protected: + typedef std::vector Base; + + public: + typedef Ordering This; ///< Typedef to this class + typedef boost::shared_ptr shared_ptr; ///< shared_ptr to this class + + /// Create an empty ordering + GTSAM_EXPORT Ordering() {} + + /// Create from a container + template + explicit Ordering(const KEYS& keys) : Base(keys.begin(), keys.end()) {} + + /// Create an ordering using iterators over keys + template + Ordering(ITERATOR firstKey, ITERATOR lastKey) : Base(firstKey, lastKey) {} + + /// Add new variables to the ordering as ordering += key1, key2, ... Equivalent to calling + /// push_back. + boost::assign::list_inserter > + operator+=(Key key) { + return boost::assign::make_list_inserter(boost::assign_detail::call_push_back(*this))(key); + } + + /// Invert (not reverse) the ordering - returns a map from key to order position + FastMap invert() const; + + /// @name Fill-reducing Orderings @{ + + /// Compute a fill-reducing ordering using COLAMD from a factor graph (see details for note on + /// performance). This internally builds a VariableIndex so if you already have a VariableIndex, + /// it is faster to use COLAMD(const VariableIndex&) + template + static Ordering COLAMD(const FactorGraph& graph) { + return COLAMD(VariableIndex(graph)); } + + /// Compute a fill-reducing ordering using COLAMD from a VariableIndex. + static GTSAM_EXPORT Ordering COLAMD(const VariableIndex& variableIndex); + + /// Compute a fill-reducing ordering using constrained COLAMD from a factor graph (see details + /// for note on performance). This internally builds a VariableIndex so if you already have a + /// VariableIndex, it is faster to use COLAMD(const VariableIndex&). This function constrains + /// the variables in \c constrainLast to the end of the ordering, and orders all other variables + /// before in a fill-reducing ordering. If \c forceOrder is true, the variables in \c + /// constrainLast will be ordered in the same order specified in the vector \c + /// constrainLast. If \c forceOrder is false, the variables in \c constrainLast will be + /// ordered after all the others, but will be rearranged by CCOLAMD to reduce fill-in as well. + template + static Ordering COLAMDConstrainedLast(const FactorGraph& graph, + const std::vector& constrainLast, bool forceOrder = false) { + return COLAMDConstrainedLast(VariableIndex(graph), constrainLast, forceOrder); } + + /// Compute a fill-reducing ordering using constrained COLAMD from a VariableIndex. This + /// function constrains the variables in \c constrainLast to the end of the ordering, and orders + /// all other variables before in a fill-reducing ordering. If \c forceOrder is true, the + /// variables in \c constrainLast will be ordered in the same order specified in the vector + /// \c constrainLast. If \c forceOrder is false, the variables in \c constrainLast will be + /// ordered after all the others, but will be rearranged by CCOLAMD to reduce fill-in as well. + static GTSAM_EXPORT Ordering COLAMDConstrainedLast(const VariableIndex& variableIndex, + const std::vector& constrainLast, bool forceOrder = false); + + /// Compute a fill-reducing ordering using constrained COLAMD from a factor graph (see details + /// for note on performance). This internally builds a VariableIndex so if you already have a + /// VariableIndex, it is faster to use COLAMD(const VariableIndex&). This function constrains + /// the variables in \c constrainLast to the end of the ordering, and orders all other variables + /// before in a fill-reducing ordering. If \c forceOrder is true, the variables in \c + /// constrainLast will be ordered in the same order specified in the vector \c + /// constrainLast. If \c forceOrder is false, the variables in \c constrainFirst will be + /// ordered after all the others, but will be rearranged by CCOLAMD to reduce fill-in as well. + template + static Ordering COLAMDConstrainedFirst(const FactorGraph& graph, + const std::vector& constrainFirst, bool forceOrder = false) { + return COLAMDConstrainedFirst(VariableIndex(graph), constrainFirst, forceOrder); } + + /// Compute a fill-reducing ordering using constrained COLAMD from a VariableIndex. This + /// function constrains the variables in \c constrainFirst to the front of the ordering, and + /// orders all other variables after in a fill-reducing ordering. If \c forceOrder is true, the + /// variables in \c constrainFirst will be ordered in the same order specified in the + /// vector \c constrainFirst. If \c forceOrder is false, the variables in \c + /// constrainFirst will be ordered after all the others, but will be rearranged by CCOLAMD to + /// reduce fill-in as well. + static GTSAM_EXPORT Ordering COLAMDConstrainedFirst(const VariableIndex& variableIndex, + const std::vector& constrainFirst, bool forceOrder = false); + + /// Compute a fill-reducing ordering using constrained COLAMD from a factor graph (see details + /// for note on performance). This internally builds a VariableIndex so if you already have a + /// VariableIndex, it is faster to use COLAMD(const VariableIndex&). In this function, a group + /// for each variable should be specified in \c groups, and each group of variables will appear + /// in the ordering in group index order. \c groups should be a map from Key to group index. + /// The group indices used should be consecutive starting at 0, but may appear in \c groups in + /// arbitrary order. Any variables not present in \c groups will be assigned to group 0. This + /// function simply fills the \c cmember argument to CCOLAMD with the supplied indices, see the + /// CCOLAMD documentation for more information. + template + static Ordering COLAMDConstrained(const FactorGraph& graph, + const FastMap& groups) { + return COLAMDConstrained(VariableIndex(graph), groups); } + + /// Compute a fill-reducing ordering using constrained COLAMD from a VariableIndex. In this + /// function, a group for each variable should be specified in \c groups, and each group of + /// variables will appear in the ordering in group index order. \c groups should be a map from + /// Key to group index. The group indices used should be consecutive starting at 0, but may + /// appear in \c groups in arbitrary order. Any variables not present in \c groups will be + /// assigned to group 0. This function simply fills the \c cmember argument to CCOLAMD with the + /// supplied indices, see the CCOLAMD documentation for more information. + static GTSAM_EXPORT Ordering COLAMDConstrained(const VariableIndex& variableIndex, + const FastMap& groups); + + /// @} + + /// @name Testable @{ + + GTSAM_EXPORT void print(const std::string& str = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + + GTSAM_EXPORT bool equals(const Ordering& other, double tol = 1e-9) const; + + /// @} + + private: + /// Internal COLAMD function + static GTSAM_EXPORT Ordering COLAMDConstrained( + const VariableIndex& variableIndex, std::vector& cmember); + + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } + }; +} + diff --git a/gtsam/inference/Permutation.cpp b/gtsam/inference/Permutation.cpp deleted file mode 100644 index 94528f64f..000000000 --- a/gtsam/inference/Permutation.cpp +++ /dev/null @@ -1,226 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file Permutation.cpp - * @author Richard Roberts - * @date Oct 9, 2010 - */ - -#include - -#include -#include -#include -#include - -using namespace std; - -namespace gtsam { - -/* ************************************************************************* */ -Permutation Permutation::Identity(Index nVars) { - Permutation ret(nVars); - for(Index i=0; i& toFront, size_t size, bool filterDuplicates) { - - Permutation ret(size); - - // Mask of which variables have been pulled, used to reorder - vector pulled(size, false); - - // Put the pulled variables at the front of the permutation and set up the - // pulled flags. - size_t toFrontUniqueSize = 0; - for(Index j=0; j& js) const { - BOOST_FOREACH(Index& j, js) { - j = this->find(j)->second; - } - } - - /* ************************************************************************* */ - Permutation Reduction::inverse() const { - Index maxIndex = 0; - BOOST_FOREACH(const value_type& target_source, *this) { - if(target_source.second > maxIndex) - maxIndex = target_source.second; - } - Permutation result(maxIndex + 1); - BOOST_FOREACH(const value_type& target_source, *this) { - result[target_source.second] = target_source.first; - } - return result; - } - - /* ************************************************************************* */ - const Index& Reduction::operator[](const Index& j) { - iterator it = this->find(j); - if(it == this->end()) - return j; - else - return it->second; - } - - /* ************************************************************************* */ - const Index& Reduction::operator[](const Index& j) const { - const_iterator it = this->find(j); - if(it == this->end()) - return j; - else - return it->second; - } - - /* ************************************************************************* */ - void Reduction::print(const std::string& s) const { - cout << s << " reduction:" << endl; - BOOST_FOREACH(const value_type& p, *this) - cout << " " << p.first << " : " << p.second << endl; - } - - /* ************************************************************************* */ - bool Reduction::equals(const Reduction& other, double tol) const { - return (const Base&)(*this) == (const Base&)other; - } - - /* ************************************************************************* */ - Permutation createReducingPermutation(const std::set& indices) { - Permutation p(indices.size()); - Index newJ = 0; - BOOST_FOREACH(Index j, indices) { - p[newJ] = j; - ++ newJ; - } - return p; - } -} // \namespace internal - -/* ************************************************************************* */ -} // \namespace gtsam diff --git a/gtsam/inference/Permutation.h b/gtsam/inference/Permutation.h deleted file mode 100644 index 213055781..000000000 --- a/gtsam/inference/Permutation.h +++ /dev/null @@ -1,215 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file Permutation.h - * @author Richard Roberts - * @date Sep 12, 2010 - */ - -#pragma once - -#include -#include -#include -#include -#include - -#include -#include - -namespace gtsam { - - /** - * A permutation reorders variables, for example to reduce fill-in during - * elimination. To save computation, the permutation can be applied to - * the necessary data structures only once, then multiple computations - * performed on the permuted problem. For example, in an iterative - * non-linear setting, a permutation can be created from the symbolic graph - * structure and applied to the ordering of nonlinear variables once, so - * that linearized factor graphs are already correctly ordered and need - * not be permuted. - * - * Consistent with their mathematical definition, permutations are functions - * that accept the destination index and return the source index. For example, - * to permute data structure A into a new data structure B using permutation P, - * the mapping is B[i] = A[P[i]]. This is the behavior implemented by - * B = A.permute(P). - * - * For some data structures in GTSAM, for efficiency it is necessary to have - * the inverse of the desired permutation. In this case, the data structure - * will implement permuteWithInverse(P) instead of permute(P). You may already - * have the inverse permutation by construction, or may instead compute it with - * Pinv = P.inverse(). - * - * Permutations can be composed and inverted - * in order to create new indexing for a structure. - * \nosubgrouping - */ -class GTSAM_EXPORT Permutation { -protected: - std::vector rangeIndices_; - -public: - typedef boost::shared_ptr shared_ptr; - typedef std::vector::const_iterator const_iterator; - typedef std::vector::iterator iterator; - - /// @name Standard Constructors - /// @{ - - /** - * Create an empty permutation. This cannot do anything, but you can later - * assign to it. - */ - Permutation() {} - - /** - * Create an uninitialized permutation. You must assign all values using the - * square bracket [] operator or they will be undefined! - */ - Permutation(Index nVars) : rangeIndices_(nVars) {} - - /// @} - /// @name Testable - /// @{ - - /** Print */ - void print(const std::string& str = "Permutation: ") const; - - /** Check equality */ - bool equals(const Permutation& rhs, double tol=0.0) const { return rangeIndices_ == rhs.rangeIndices_; } - - - /// @} - /// @name Standard Interface - /// @{ - - /** - * Return the source index of the supplied destination index. - */ - Index operator[](Index variable) const { check(variable); return rangeIndices_[variable]; } - - /** - * Return the source index of the supplied destination index. This version allows modification. - */ - Index& operator[](Index variable) { check(variable); return rangeIndices_[variable]; } - - /** - * Return the source index of the supplied destination index. Synonym for operator[](Index). - */ - Index at(Index variable) const { return operator[](variable); } - - /** - * Return the source index of the supplied destination index. This version allows modification. Synonym for operator[](Index). - */ - Index& at(Index variable) { return operator[](variable); } - - /** - * The number of variables in the range of this permutation, i.e. the - * destination space. - */ - Index size() const { return rangeIndices_.size(); } - - /** Whether the permutation contains any entries */ - bool empty() const { return rangeIndices_.empty(); } - - /** - * Resize the permutation. You must fill in the new entries if new new size - * is larger than the old one. If the new size is smaller, entries from the - * end are discarded. - */ - void resize(size_t newSize) { rangeIndices_.resize(newSize); } - - /** - * Return an identity permutation. - */ - static Permutation Identity(Index nVars); - - /** - * Create a permutation that pulls the given variables to the front while - * pushing the rest to the back. - */ - static Permutation PullToFront(const std::vector& toFront, size_t size, bool filterDuplicates = false); - - /** - * Create a permutation that pushes the given variables to the back while - * pulling the rest to the front. - */ - static Permutation PushToBack(const std::vector& toBack, size_t size, bool filterDuplicates = false); - - - /** - * Permute the permutation, p1.permute(p2)[i] is equivalent to p1[p2[i]]. - */ - Permutation::shared_ptr permute(const Permutation& permutation) const; - - /** - * Return the inverse permutation. This is only possible if this is a non- - * reducing permutation, that is, (*this)[i] < this->size() for all - * i < size(). If NDEBUG is not defined, this conditional will be checked. - */ - Permutation::shared_ptr inverse() const; - - const_iterator begin() const { return rangeIndices_.begin(); } ///< Iterate through the indices - const_iterator end() const { return rangeIndices_.end(); } ///< Iterate through the indices - - /** Apply the permutation to a collection, which must have operator[] defined. - * Note that permutable gtsam data structures typically have their own - * permute function to apply a permutation. Permutation::applyToCollection is - * a generic function, e.g. for STL classes. - * @param input The input collection. - * @param output The preallocated output collection, which is assigned output[i] = input[permutation[i]] - */ - template - void applyToCollection(OUTPUT_COLLECTION& output, const INPUT_COLLECTION& input) const { - for(size_t i = 0; i < output.size(); ++i) output[i] = input[(*this)[i]]; } - - - /// @} - /// @name Advanced Interface - /// @{ - - iterator begin() { return rangeIndices_.begin(); } ///< Iterate through the indices - iterator end() { return rangeIndices_.end(); } ///< Iterate through the indices - -protected: - void check(Index variable) const { assert(variable < rangeIndices_.size()); } - - /// @} -}; - - -namespace internal { - /** - * An internal class used for storing and applying a permutation from a map - */ - class Reduction : public gtsam::FastMap { - public: - typedef gtsam::FastMap Base; - - GTSAM_EXPORT static Reduction CreateAsInverse(const Permutation& p); - GTSAM_EXPORT static Reduction CreateFromPartialPermutation(const Permutation& selector, const Permutation& p); - GTSAM_EXPORT void applyInverse(std::vector& js) const; - GTSAM_EXPORT Permutation inverse() const; - GTSAM_EXPORT const Index& operator[](const Index& j); - GTSAM_EXPORT const Index& operator[](const Index& j) const; - - GTSAM_EXPORT void print(const std::string& s="") const; - GTSAM_EXPORT bool equals(const Reduction& other, double tol = 1e-9) const; - }; - - /** - * Reduce the variable indices so that those in the set are mapped to start at zero - */ - GTSAM_EXPORT Permutation createReducingPermutation(const std::set& indices); -} // \namespace internal -} // \namespace gtsam diff --git a/gtsam/inference/SymbolicFactorGraph.cpp b/gtsam/inference/SymbolicFactorGraph.cpp deleted file mode 100644 index 7f71ab54a..000000000 --- a/gtsam/inference/SymbolicFactorGraph.cpp +++ /dev/null @@ -1,132 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file SymbolicFactorGraph.cpp - * @date Oct 29, 2009 - * @author Frank Dellaert - */ - -#include -#include -#include -#include - -#include - -namespace gtsam { - - using namespace std; - - /* ************************************************************************* */ - SymbolicFactorGraph::SymbolicFactorGraph(const SymbolicBayesNet& bayesNet) : - FactorGraph(bayesNet) {} - - /* ************************************************************************* */ - SymbolicFactorGraph::SymbolicFactorGraph(const SymbolicBayesTree& bayesTree) : - FactorGraph(bayesTree) {} - - /* ************************************************************************* */ - void SymbolicFactorGraph::push_factor(Index key) { - push_back(boost::make_shared(key)); - } - - /** Push back binary factor */ - void SymbolicFactorGraph::push_factor(Index key1, Index key2) { - push_back(boost::make_shared(key1,key2)); - } - - /** Push back ternary factor */ - void SymbolicFactorGraph::push_factor(Index key1, Index key2, Index key3) { - push_back(boost::make_shared(key1,key2,key3)); - } - - /** Push back 4-way factor */ - void SymbolicFactorGraph::push_factor(Index key1, Index key2, Index key3, Index key4) { - push_back(boost::make_shared(key1,key2,key3,key4)); - } - - /* ************************************************************************* */ - FastSet - SymbolicFactorGraph::keys() const { - FastSet keys; - BOOST_FOREACH(const sharedFactor& factor, *this) { - if(factor) keys.insert(factor->begin(), factor->end()); } - return keys; - } - - /* ************************************************************************* */ - std::pair - SymbolicFactorGraph::eliminateFrontals(size_t nFrontals) const - { - return FactorGraph::eliminateFrontals(nFrontals, EliminateSymbolic); - } - - /* ************************************************************************* */ - std::pair - SymbolicFactorGraph::eliminate(const std::vector& variables) const - { - return FactorGraph::eliminate(variables, EliminateSymbolic); - } - - /* ************************************************************************* */ - std::pair - SymbolicFactorGraph::eliminateOne(Index variable) const - { - return FactorGraph::eliminateOne(variable, EliminateSymbolic); - } - - /* ************************************************************************* */ - void SymbolicFactorGraph::permuteWithInverse( - const Permutation& inversePermutation) { - BOOST_FOREACH(const sharedFactor& factor, factors_) { - if(factor) - factor->permuteWithInverse(inversePermutation); - } - } - - /* ************************************************************************* */ - void SymbolicFactorGraph::reduceWithInverse( - const internal::Reduction& inverseReduction) { - BOOST_FOREACH(const sharedFactor& factor, factors_) { - if(factor) - factor->reduceWithInverse(inverseReduction); - } - } - - /* ************************************************************************* */ - IndexFactor::shared_ptr CombineSymbolic( - const FactorGraph& factors, const FastMap >& variableSlots) { - IndexFactor::shared_ptr combined(Combine (factors, variableSlots)); -// combined->assertInvariants(); - return combined; - } - - /* ************************************************************************* */ - pair // - EliminateSymbolic(const FactorGraph& factors, size_t nrFrontals) { - - FastSet keys; - BOOST_FOREACH(const IndexFactor::shared_ptr& factor, factors) - BOOST_FOREACH(Index var, *factor) - keys.insert(var); - - if (keys.size() < nrFrontals) throw invalid_argument( - "EliminateSymbolic requested to eliminate more variables than exist in graph."); - - vector newKeys(keys.begin(), keys.end()); - return make_pair(boost::make_shared(newKeys, nrFrontals), - boost::make_shared(newKeys.begin() + nrFrontals, newKeys.end())); - } - - /* ************************************************************************* */ -} diff --git a/gtsam/inference/SymbolicFactorGraph.h b/gtsam/inference/SymbolicFactorGraph.h deleted file mode 100644 index 31e2b2b88..000000000 --- a/gtsam/inference/SymbolicFactorGraph.h +++ /dev/null @@ -1,151 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file SymbolicFactorGraph.h - * @date Oct 29, 2009 - * @author Frank Dellaert - */ - -#pragma once - -#include -#include -#include - -namespace gtsam { template class EliminationTree; } -namespace gtsam { template class BayesNet; } -namespace gtsam { template class BayesTree; } -namespace gtsam { class IndexConditional; } - -namespace gtsam { - - typedef EliminationTree SymbolicEliminationTree; - typedef BayesNet SymbolicBayesNet; - typedef BayesTree SymbolicBayesTree; - - /** Symbolic IndexFactor Graph - * \nosubgrouping - */ - class SymbolicFactorGraph: public FactorGraph { - - public: - - /// @name Standard Constructors - /// @{ - - /** Construct empty factor graph */ - SymbolicFactorGraph() { - } - - /** Construct from a BayesNet */ - GTSAM_EXPORT SymbolicFactorGraph(const SymbolicBayesNet& bayesNet); - - /** Construct from a BayesTree */ - GTSAM_EXPORT SymbolicFactorGraph(const SymbolicBayesTree& bayesTree); - - /** - * Construct from a factor graph of any type - */ - template - SymbolicFactorGraph(const FactorGraph& fg); - - /** Eliminate the first \c n frontal variables, returning the resulting - * conditional and remaining factor graph - this is very inefficient for - * eliminating all variables, to do that use EliminationTree or - * JunctionTree. Note that this version simply calls - * FactorGraph::eliminateFrontals with EliminateSymbolic - * as the eliminate function argument. - */ - GTSAM_EXPORT std::pair eliminateFrontals(size_t nFrontals) const; - - /** Factor the factor graph into a conditional and a remaining factor graph. - * Given the factor graph \f$ f(X) \f$, and \c variables to factorize out - * \f$ V \f$, this function factorizes into \f$ f(X) = f(V;Y)f(Y) \f$, where - * \f$ Y := X\V \f$ are the remaining variables. If \f$ f(X) = p(X) \f$ is - * a probability density or likelihood, the factorization produces a - * conditional probability density and a marginal \f$ p(X) = p(V|Y)p(Y) \f$. - * - * For efficiency, this function treats the variables to eliminate - * \c variables as fully-connected, so produces a dense (fully-connected) - * conditional on all of the variables in \c variables, instead of a sparse - * BayesNet. If the variables are not fully-connected, it is more efficient - * to sequentially factorize multiple times. - * Note that this version simply calls - * FactorGraph::eliminate with EliminateSymbolic as the eliminate - * function argument. - */ - GTSAM_EXPORT std::pair eliminate(const std::vector& variables) const; - - /** Eliminate a single variable, by calling SymbolicFactorGraph::eliminate. */ - GTSAM_EXPORT std::pair eliminateOne(Index variable) const; - - /// @} - /// @name Standard Interface - /// @{ - - /** - * Return the set of variables involved in the factors (computes a set - * union). - */ - GTSAM_EXPORT FastSet keys() const; - - /// @} - /// @name Advanced Interface - /// @{ - - /** Push back unary factor */ - GTSAM_EXPORT void push_factor(Index key); - - /** Push back binary factor */ - GTSAM_EXPORT void push_factor(Index key1, Index key2); - - /** Push back ternary factor */ - GTSAM_EXPORT void push_factor(Index key1, Index key2, Index key3); - - /** Push back 4-way factor */ - GTSAM_EXPORT void push_factor(Index key1, Index key2, Index key3, Index key4); - - /** Permute the variables in the factors */ - GTSAM_EXPORT void permuteWithInverse(const Permutation& inversePermutation); - - /** Apply a reduction, which is a remapping of variable indices. */ - GTSAM_EXPORT void reduceWithInverse(const internal::Reduction& inverseReduction); - - }; - - /** Create a combined joint factor (new style for EliminationTree). */ - GTSAM_EXPORT IndexFactor::shared_ptr CombineSymbolic(const FactorGraph& factors, - const FastMap >& variableSlots); - - /** - * CombineAndEliminate provides symbolic elimination. - * Combine and eliminate can also be called separately, but for this and - * derived classes calling them separately generally does extra work. - */ - GTSAM_EXPORT std::pair, boost::shared_ptr > - EliminateSymbolic(const FactorGraph&, size_t nrFrontals = 1); - - /// @} - - /** Template function implementation */ - template - SymbolicFactorGraph::SymbolicFactorGraph(const FactorGraph& fg) { - for (size_t i = 0; i < fg.size(); i++) { - if (fg[i]) { - IndexFactor::shared_ptr factor(new IndexFactor(*fg[i])); - push_back(factor); - } else - push_back(IndexFactor::shared_ptr()); - } - } - -} // namespace gtsam diff --git a/gtsam/inference/SymbolicMultifrontalSolver.cpp b/gtsam/inference/SymbolicMultifrontalSolver.cpp deleted file mode 100644 index 7072954a9..000000000 --- a/gtsam/inference/SymbolicMultifrontalSolver.cpp +++ /dev/null @@ -1,25 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file SymbolicMultifrontalSolver.cpp - * @author Richard Roberts - * @date Oct 22, 2010 - */ - -#include -#include - -namespace gtsam { - -//template class GenericMultifrontalSolver > >; - -} diff --git a/gtsam/inference/SymbolicMultifrontalSolver.h b/gtsam/inference/SymbolicMultifrontalSolver.h deleted file mode 100644 index 802b10d72..000000000 --- a/gtsam/inference/SymbolicMultifrontalSolver.h +++ /dev/null @@ -1,71 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file SymbolicMultifrontalSolver.h - * @author Richard Roberts - * @date Oct 22, 2010 - */ - -#pragma once - -#include -#include - -namespace gtsam { - - class SymbolicMultifrontalSolver : GenericMultifrontalSolver > > { - - protected: - typedef GenericMultifrontalSolver > > Base; - - public: - /** - * Construct the solver for a factor graph. This builds the junction - * tree, which does the symbolic elimination, identifies the cliques, - * and distributes all the factors to the right cliques. - */ - SymbolicMultifrontalSolver(const SymbolicFactorGraph& factorGraph) : Base(factorGraph) {}; - - /** - * Construct the solver with a shared pointer to a factor graph and to a - * VariableIndex. The solver will store these pointers, so this constructor - * is the fastest. - */ - SymbolicMultifrontalSolver(const SymbolicFactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex) : Base(factorGraph, variableIndex) {}; - - /** Print to cout */ - void print(const std::string& name = "SymbolicMultifrontalSolver: ") const { Base::print(name); }; - - /** Test whether is equal to another */ - bool equals(const SymbolicMultifrontalSolver& other, double tol = 1e-9) const { return Base::equals(other, tol); }; - - /** - * Eliminate the factor graph sequentially. Uses a column elimination tree - * to recursively eliminate. - */ - SymbolicBayesTree::shared_ptr eliminate() const { return Base::eliminate(&EliminateSymbolic); }; - - /** - * Compute the marginal joint over a set of variables, by integrating out - * all of the other variables. Returns the result as a factor graph. - */ - SymbolicFactorGraph::shared_ptr jointFactorGraph(const std::vector& js) const { return Base::jointFactorGraph(js, &EliminateSymbolic); }; - - /** - * Compute the marginal Gaussian density over a variable, by integrating out - * all of the other variables. This function returns the result as a factor. - */ - IndexFactor::shared_ptr marginalFactor(Index j) const { return Base::marginalFactor(j, &EliminateSymbolic); }; - }; - -} diff --git a/gtsam/inference/SymbolicSequentialSolver.cpp b/gtsam/inference/SymbolicSequentialSolver.cpp deleted file mode 100644 index 937623886..000000000 --- a/gtsam/inference/SymbolicSequentialSolver.cpp +++ /dev/null @@ -1,26 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file SymbolicSequentialSolver.cpp - * @author Richard Roberts - * @date Oct 21, 2010 - */ - -#include -#include - -namespace gtsam { - -// An explicit instantiation to be compiled into the library -//template class GenericSequentialSolver; - -} diff --git a/gtsam/inference/SymbolicSequentialSolver.h b/gtsam/inference/SymbolicSequentialSolver.h deleted file mode 100644 index 5bff88430..000000000 --- a/gtsam/inference/SymbolicSequentialSolver.h +++ /dev/null @@ -1,88 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file SymbolicSequentialSolver.h - * @author Richard Roberts - * @date Oct 21, 2010 - */ -#pragma once - -#include -#include - -namespace gtsam { - - class SymbolicSequentialSolver : GenericSequentialSolver { - - protected: - typedef GenericSequentialSolver Base; - - public: - /** - * Construct the solver for a factor graph. This builds the junction - * tree, which does the symbolic elimination, identifies the cliques, - * and distributes all the factors to the right cliques. - */ - SymbolicSequentialSolver(const SymbolicFactorGraph& factorGraph) : Base(factorGraph) {}; - - /** - * Construct the solver with a shared pointer to a factor graph and to a - * VariableIndex. The solver will store these pointers, so this constructor - * is the fastest. - */ - SymbolicSequentialSolver(const SymbolicFactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex) : Base(factorGraph, variableIndex) {}; - - /** Print to cout */ - void print(const std::string& name = "SymbolicSequentialSolver: ") const { Base::print(name); }; - - /** Test whether is equal to another */ - bool equals(const SymbolicSequentialSolver& other, double tol = 1e-9) const { return Base::equals(other, tol); }; - - /** - * Eliminate the factor graph sequentially. Uses a column elimination tree - * to recursively eliminate. - */ - SymbolicBayesNet::shared_ptr eliminate() const - { return Base::eliminate(&EliminateSymbolic); }; - - /** - * Compute a conditional density P(F|S) while marginalizing out variables J - * P(F|S) is obtained by P(J,F,S)=P(J|F,S)P(F|S)P(S) and dropping P(S) - * Returns the result as a Bayes net. - */ - SymbolicBayesNet::shared_ptr conditionalBayesNet(const std::vector& js, size_t nrFrontals) const - { return Base::conditionalBayesNet(js, nrFrontals, &EliminateSymbolic); }; - - /** - * Compute the marginal joint over a set of variables, by integrating out - * all of the other variables. Returns the result as a Bayes net. - */ - SymbolicBayesNet::shared_ptr jointBayesNet(const std::vector& js) const - { return Base::jointBayesNet(js, &EliminateSymbolic); }; - - /** - * Compute the marginal joint over a set of variables, by integrating out - * all of the other variables. Returns the result as a factor graph. - */ - SymbolicFactorGraph::shared_ptr jointFactorGraph(const std::vector& js) const - { return Base::jointFactorGraph(js, &EliminateSymbolic); }; - - /** - * Compute the marginal Gaussian density over a variable, by integrating out - * all of the other variables. This function returns the result as a factor. - */ - IndexFactor::shared_ptr marginalFactor(Index j) const { return Base::marginalFactor(j, &EliminateSymbolic); }; - }; - -} - diff --git a/gtsam/inference/VariableIndex-inl.h b/gtsam/inference/VariableIndex-inl.h new file mode 100644 index 000000000..1ce108321 --- /dev/null +++ b/gtsam/inference/VariableIndex-inl.h @@ -0,0 +1,83 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file VariableIndex-inl.h + * @author Richard Roberts + * @date March 26, 2013 + */ + +#pragma once + +#include + +namespace gtsam { + +/* ************************************************************************* */ +template +void VariableIndex::augment(const FG& factors) +{ + gttic(VariableIndex_augment); + + // Save original number of factors for keeping track of indices + const size_t originalNFactors = nFactors_; + + // Augment index for each factor + for(size_t i = 0; i < factors.size(); ++i) { + if(factors[i]) { + const size_t globalI = originalNFactors + i; + BOOST_FOREACH(const Key key, *factors[i]) { + index_[key].push_back(globalI); + ++ nEntries_; + } + } + ++ nFactors_; // Increment factor count even if factors are null, to keep indices consistent + } +} + +/* ************************************************************************* */ +template +void VariableIndex::remove(ITERATOR firstFactor, ITERATOR lastFactor, const FG& factors) +{ + gttic(VariableIndex_remove); + + // NOTE: We intentionally do not decrement nFactors_ because the factor + // indices need to remain consistent. Removing factors from a factor graph + // does not shift the indices of other factors. Also, we keep nFactors_ + // one greater than the highest-numbered factor referenced in a VariableIndex. + ITERATOR factorIndex = firstFactor; + size_t i = 0; + for( ; factorIndex != lastFactor; ++factorIndex, ++i) { + if(i >= factors.size()) + throw std::invalid_argument("Internal error, requested inconsistent number of factor indices and factors in VariableIndex::remove"); + if(factors[i]) { + BOOST_FOREACH(Key j, *factors[i]) { + Factors& factorEntries = internalAt(j); + Factors::iterator entry = std::find(factorEntries.begin(), factorEntries.end(), *factorIndex); + if(entry == factorEntries.end()) + throw std::invalid_argument("Internal error, indices and factors passed into VariableIndex::remove are not consistent with the existing variable index"); + factorEntries.erase(entry); + -- nEntries_; + } + } + } +} + +/* ************************************************************************* */ +template +void VariableIndex::removeUnusedVariables(ITERATOR firstKey, ITERATOR lastKey) { + for(ITERATOR key = firstKey; key != lastKey; ++key) { + assert(internalAt(*key).empty()); + index_.erase(*key); + } +} + +} diff --git a/gtsam/inference/VariableIndex.cpp b/gtsam/inference/VariableIndex.cpp index 91036e82a..26ba191dd 100644 --- a/gtsam/inference/VariableIndex.cpp +++ b/gtsam/inference/VariableIndex.cpp @@ -1,104 +1,58 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file VariableIndex.cpp - * @author Richard Roberts - * @date Oct 22, 2010 - */ - -#include - -#include -#include - -namespace gtsam { - -using namespace std; - -/* ************************************************************************* */ -bool VariableIndex::equals(const VariableIndex& other, double tol) const { - if(this->nEntries_ == other.nEntries_ && this->nFactors_ == other.nFactors_) { - for(size_t var=0; var < max(this->index_.size(), other.index_.size()); ++var) - if(var >= this->index_.size() || var >= other.index_.size()) { - if(!((var >= this->index_.size() && other.index_[var].empty()) || - (var >= other.index_.size() && this->index_[var].empty()))) - return false; - } else if(this->index_[var] != other.index_[var]) - return false; - } else - return false; - return true; -} - -/* ************************************************************************* */ -void VariableIndex::print(const string& str) const { - cout << str; - cout << "nEntries = " << nEntries() << ", nFactors = " << nFactors() << "\n"; - for(Index var = 0; var < size(); ++var) { - cout << "var " << var << ":"; - BOOST_FOREACH(const size_t factor, index_[var]) - cout << " " << factor; - cout << "\n"; - } - cout << flush; -} - -/* ************************************************************************* */ -void VariableIndex::outputMetisFormat(ostream& os) const { - os << size() << " " << nFactors() << "\n"; - // run over variables, which will be hyper-edges. - BOOST_FOREACH(const Factors& variable, index_) { - // every variable is a hyper-edge covering its factors - BOOST_FOREACH(const size_t factor, variable) - os << (factor+1) << " "; // base 1 - os << "\n"; - } - os << flush; -} - -/* ************************************************************************* */ -void VariableIndex::permuteInPlace(const Permutation& permutation) { - // Create new index and move references to data into it in permuted order - vector newIndex(this->size()); - for(Index i = 0; i < newIndex.size(); ++i) - newIndex[i].swap(this->index_[permutation[i]]); - - // Move reference to entire index into the VariableIndex - index_.swap(newIndex); -} - -/* ************************************************************************* */ -void VariableIndex::permuteInPlace(const Permutation& selector, const Permutation& permutation) { - if(selector.size() != permutation.size()) - throw invalid_argument("VariableIndex::permuteInPlace (partial permutation version) called with selector and permutation of different sizes."); - // Create new index the size of the permuted entries - vector newIndex(selector.size()); - // Permute the affected entries into the new index - for(size_t dstSlot = 0; dstSlot < selector.size(); ++dstSlot) - newIndex[dstSlot].swap(this->index_[selector[permutation[dstSlot]]]); - // Put the affected entries back in the new order - for(size_t slot = 0; slot < selector.size(); ++slot) - this->index_[selector[slot]].swap(newIndex[slot]); -} - -/* ************************************************************************* */ -void VariableIndex::removeUnusedAtEnd(size_t nToRemove) { -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - for(size_t i = this->size() - nToRemove; i < this->size(); ++i) - if(!(*this)[i].empty()) - throw std::invalid_argument("Attempting to remove non-empty variables with VariableIndex::removeUnusedAtEnd()"); -#endif - - index_.resize(this->size() - nToRemove); -} - -} +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file VariableIndex.cpp + * @author Richard Roberts + * @date March 26, 2013 + */ + +#include + +#include + +namespace gtsam { + +using namespace std; + +/* ************************************************************************* */ +bool VariableIndex::equals(const VariableIndex& other, double tol) const { + return this->nEntries_ == other.nEntries_ && this->nFactors_ == other.nFactors_ + && this->index_ == other.index_; +} + +/* ************************************************************************* */ +void VariableIndex::print(const string& str) const { + cout << str; + cout << "nEntries = " << nEntries() << ", nFactors = " << nFactors() << "\n"; + BOOST_FOREACH(KeyMap::value_type key_factors, index_) { + cout << "var " << key_factors.first << ":"; + BOOST_FOREACH(const size_t factor, key_factors.second) + cout << " " << factor; + cout << "\n"; + } + cout.flush(); +} + +/* ************************************************************************* */ +void VariableIndex::outputMetisFormat(ostream& os) const { + os << size() << " " << nFactors() << "\n"; + // run over variables, which will be hyper-edges. + BOOST_FOREACH(KeyMap::value_type key_factors, index_) { + // every variable is a hyper-edge covering its factors + BOOST_FOREACH(const size_t factor, key_factors.second) + os << (factor+1) << " "; // base 1 + os << "\n"; + } + os << flush; +} + +} diff --git a/gtsam/inference/VariableIndex.h b/gtsam/inference/VariableIndex.h index 570369e0a..14fbc2072 100644 --- a/gtsam/inference/VariableIndex.h +++ b/gtsam/inference/VariableIndex.h @@ -1,292 +1,179 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file VariableIndex.h - * @author Richard Roberts - * @date Sep 12, 2010 - */ - -#pragma once - -#include -#include -#include -#include - -#include -#include -#include - -namespace gtsam { - - class Permutation; - -/** - * The VariableIndex class computes and stores the block column structure of a - * factor graph. The factor graph stores a collection of factors, each of - * which involves a set of variables. In contrast, the VariableIndex is built - * from a factor graph prior to elimination, and stores the list of factors - * that involve each variable. This information is stored as a deque of - * lists of factor indices. - * \nosubgrouping - */ -class GTSAM_EXPORT VariableIndex { -public: - - typedef boost::shared_ptr shared_ptr; - typedef FastList Factors; - typedef Factors::iterator Factor_iterator; - typedef Factors::const_iterator Factor_const_iterator; - -protected: - std::vector index_; - size_t nFactors_; // Number of factors in the original factor graph. - size_t nEntries_; // Sum of involved variable counts of each factor. - -public: - - /// @name Standard Constructors - /// @{ - - /** Default constructor, creates an empty VariableIndex */ - VariableIndex() : nFactors_(0), nEntries_(0) {} - - /** - * Create a VariableIndex that computes and stores the block column structure - * of a factor graph. This constructor is used when the number of variables - * is known beforehand. - */ - template VariableIndex(const FG& factorGraph, Index nVariables); - - /** - * Create a VariableIndex that computes and stores the block column structure - * of a factor graph. - */ - template VariableIndex(const FG& factorGraph); - - /// @} - /// @name Standard Interface - /// @{ - - /** - * The number of variable entries. This is one greater than the variable - * with the highest index. - */ - Index size() const { return index_.size(); } - - /** The number of factors in the original factor graph */ - size_t nFactors() const { return nFactors_; } - - /** The number of nonzero blocks, i.e. the number of variable-factor entries */ - size_t nEntries() const { return nEntries_; } - - /** Access a list of factors by variable */ - const Factors& operator[](Index variable) const { checkVar(variable); return index_[variable]; } - - /// @} - /// @name Testable - /// @{ - - /** Test for equality (for unit tests and debug assertions). */ - bool equals(const VariableIndex& other, double tol=0.0) const; - - /** Print the variable index (for unit tests and debugging). */ - void print(const std::string& str = "VariableIndex: ") const; - - /** - * Output dual hypergraph to Metis file format for use with hmetis - * In the dual graph, variables are hyperedges, factors are nodes. - */ - void outputMetisFormat(std::ostream& os) const; - - - /// @} - /// @name Advanced Interface - /// @{ - - /** - * Augment the variable index with new factors. This can be used when - * solving problems incrementally. - */ - template void augment(const FG& factors); - - /** - * Remove entries corresponding to the specified factors. - * NOTE: We intentionally do not decrement nFactors_ because the factor - * indices need to remain consistent. Removing factors from a factor graph - * does not shift the indices of other factors. Also, we keep nFactors_ - * one greater than the highest-numbered factor referenced in a VariableIndex. - * - * @param indices The indices of the factors to remove, which must match \c factors - * @param factors The factors being removed, which must symbolically correspond - * exactly to the factors with the specified \c indices that were added. - */ - template void remove(const CONTAINER& indices, const FG& factors); - - /// Permute the variables in the VariableIndex according to the given permutation - void permuteInPlace(const Permutation& permutation); - - /// Permute the variables in the VariableIndex according to the given partial permutation - void permuteInPlace(const Permutation& selector, const Permutation& permutation); - - /** Remove unused empty variables at the end of the ordering (in debug mode - * verifies they are empty). - * @param nToRemove The number of unused variables at the end to remove - */ - void removeUnusedAtEnd(size_t nToRemove); - -protected: - Factor_iterator factorsBegin(Index variable) { checkVar(variable); return index_[variable].begin(); } - Factor_iterator factorsEnd(Index variable) { checkVar(variable); return index_[variable].end(); } - - Factor_const_iterator factorsBegin(Index variable) const { checkVar(variable); return index_[variable].begin(); } - Factor_const_iterator factorsEnd(Index variable) const { checkVar(variable); return index_[variable].end(); } - - /// Internal constructor to allocate a VariableIndex of the requested size - VariableIndex(size_t nVars) : index_(nVars), nFactors_(0), nEntries_(0) {} - - /// Internal check of the validity of a variable - void checkVar(Index variable) const { assert(variable < index_.size()); } - - /// Internal function to populate the variable index from a factor graph - template void fill(const FG& factorGraph); - - /// @} - -private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_NVP(index_); - ar & BOOST_SERIALIZATION_NVP(nFactors_); - ar & BOOST_SERIALIZATION_NVP(nEntries_); - } -}; - -/* ************************************************************************* */ -template -void VariableIndex::fill(const FG& factorGraph) { - gttic(VariableIndex_fill); - // Build index mapping from variable id to factor index - for(size_t fi=0; fikeys()) { - if(key < index_.size()) { - index_[key].push_back(fi); - ++ nEntries_; - } - } - } - ++ nFactors_; // Increment factor count even if factors are null, to keep indices consistent - } -} - -/* ************************************************************************* */ -template -VariableIndex::VariableIndex(const FG& factorGraph) : - nFactors_(0), nEntries_(0) -{ - gttic(VariableIndex_constructor); - // If the factor graph is empty, return an empty index because inside this - // if block we assume at least one factor. - if(factorGraph.size() > 0) { - gttic(VariableIndex_constructor_find_highest); - // Find highest-numbered variable - Index maxVar = 0; - BOOST_FOREACH(const typename FG::sharedFactor& factor, factorGraph) { - if(factor) { - BOOST_FOREACH(const Index key, factor->keys()) { - if(key > maxVar) - maxVar = key; - } - } - } - gttoc(VariableIndex_constructor_find_highest); - - // Allocate array - gttic(VariableIndex_constructor_allocate); - index_.resize(maxVar+1); - gttoc(VariableIndex_constructor_allocate); - - fill(factorGraph); - } -} - -/* ************************************************************************* */ -template -VariableIndex::VariableIndex(const FG& factorGraph, Index nVariables) : - nFactors_(0), nEntries_(0) -{ - gttic(VariableIndex_constructor_allocate); - index_.resize(nVariables); - gttoc(VariableIndex_constructor_allocate); - fill(factorGraph); -} - -/* ************************************************************************* */ -template -void VariableIndex::augment(const FG& factors) { - gttic(VariableIndex_augment); - // If the factor graph is empty, return an empty index because inside this - // if block we assume at least one factor. - if(factors.size() > 0) { - // Find highest-numbered variable - Index maxVar = 0; - BOOST_FOREACH(const typename FG::sharedFactor& factor, factors) { - if(factor) { - BOOST_FOREACH(const Index key, factor->keys()) { - if(key > maxVar) - maxVar = key; - } - } - } - - // Allocate index - index_.resize(std::max(index_.size(), maxVar+1)); - - // Augment index mapping from variable id to factor index - size_t orignFactors = nFactors_; - for(size_t fi=0; fikeys()) { - index_[key].push_back(orignFactors + fi); - ++ nEntries_; - } - } - ++ nFactors_; // Increment factor count even if factors are null, to keep indices consistent - } - } -} - -/* ************************************************************************* */ -template -void VariableIndex::remove(const CONTAINER& indices, const FG& factors) { - gttic(VariableIndex_remove); - // NOTE: We intentionally do not decrement nFactors_ because the factor - // indices need to remain consistent. Removing factors from a factor graph - // does not shift the indices of other factors. Also, we keep nFactors_ - // one greater than the highest-numbered factor referenced in a VariableIndex. - for(size_t fi=0; fikeys().size(); ++ji) { - Factors& factorEntries = index_[factors[fi]->keys()[ji]]; - Factors::iterator entry = std::find(factorEntries.begin(), factorEntries.end(), indices[fi]); - if(entry == factorEntries.end()) - throw std::invalid_argument("Internal error, indices and factors passed into VariableIndex::remove are not consistent with the existing variable index"); - factorEntries.erase(entry); - -- nEntries_; - } - } -} - -} +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file VariableIndex.h + * @author Richard Roberts + * @date March 26, 2013 + */ + +#pragma once + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace gtsam { + +/** + * The VariableIndex class computes and stores the block column structure of a + * factor graph. The factor graph stores a collection of factors, each of + * which involves a set of variables. In contrast, the VariableIndex is built + * from a factor graph prior to elimination, and stores the list of factors + * that involve each variable. This information is stored as a deque of + * lists of factor indices. + * \nosubgrouping + */ +class GTSAM_EXPORT VariableIndex { +public: + + typedef boost::shared_ptr shared_ptr; + typedef FastList Factors; + typedef Factors::iterator Factor_iterator; + typedef Factors::const_iterator Factor_const_iterator; + +protected: + typedef FastMap KeyMap; + KeyMap index_; + size_t nFactors_; // Number of factors in the original factor graph. + size_t nEntries_; // Sum of involved variable counts of each factor. + +public: + typedef KeyMap::const_iterator const_iterator; + typedef KeyMap::const_iterator iterator; + typedef KeyMap::value_type value_type; + +public: + + /// @name Standard Constructors + /// @{ + + /** Default constructor, creates an empty VariableIndex */ + VariableIndex() : nFactors_(0), nEntries_(0) {} + + /** + * Create a VariableIndex that computes and stores the block column structure + * of a factor graph. + */ + template + VariableIndex(const FG& factorGraph) : nFactors_(0), nEntries_(0) { augment(factorGraph); } + + /// @} + /// @name Standard Interface + /// @{ + + /** + * The number of variable entries. This is one greater than the variable + * with the highest index. + */ + Index size() const { return index_.size(); } + + /** The number of factors in the original factor graph */ + size_t nFactors() const { return nFactors_; } + + /** The number of nonzero blocks, i.e. the number of variable-factor entries */ + size_t nEntries() const { return nEntries_; } + + /** Access a list of factors by variable */ + const Factors& operator[](Key variable) const { + KeyMap::const_iterator item = index_.find(variable); + if(item == index_.end()) + throw std::invalid_argument("Requested non-existent variable from VariableIndex"); + else + return item->second; + } + + /// @} + /// @name Testable + /// @{ + + /** Test for equality (for unit tests and debug assertions). */ + bool equals(const VariableIndex& other, double tol=0.0) const; + + /** Print the variable index (for unit tests and debugging). */ + void print(const std::string& str = "VariableIndex: ") const; + + /** + * Output dual hypergraph to Metis file format for use with hmetis + * In the dual graph, variables are hyperedges, factors are nodes. + */ + void outputMetisFormat(std::ostream& os) const; + + + /// @} + /// @name Advanced Interface + /// @{ + + /** + * Augment the variable index with new factors. This can be used when + * solving problems incrementally. + */ + template + void augment(const FG& factors); + + /** + * Remove entries corresponding to the specified factors. NOTE: We intentionally do not decrement + * nFactors_ because the factor indices need to remain consistent. Removing factors from a factor + * graph does not shift the indices of other factors. Also, we keep nFactors_ one greater than + * the highest-numbered factor referenced in a VariableIndex. + * + * @param indices The indices of the factors to remove, which must match \c factors + * @param factors The factors being removed, which must symbolically correspond exactly to the + * factors with the specified \c indices that were added. + */ + template + void remove(ITERATOR firstFactor, ITERATOR lastFactor, const FG& factors); + + /** Remove unused empty variables (in debug mode verifies they are empty). */ + template + void removeUnusedVariables(ITERATOR firstKey, ITERATOR lastKey); + + /** Iterator to the first variable entry */ + const_iterator begin() const { return index_.begin(); } + + /** Iterator to the first variable entry */ + const_iterator end() const { return index_.end(); } + + /** Find the iterator for the requested variable entry */ + const_iterator find(Key key) const { return index_.find(key); } + +protected: + Factor_iterator factorsBegin(Index variable) { return internalAt(variable).begin(); } + Factor_iterator factorsEnd(Index variable) { return internalAt(variable).end(); } + + Factor_const_iterator factorsBegin(Index variable) const { return internalAt(variable).begin(); } + Factor_const_iterator factorsEnd(Index variable) const { return internalAt(variable).end(); } + + /// Internal version of 'at' that asserts existence + const Factors& internalAt(Key variable) const { + const KeyMap::const_iterator item = index_.find(variable); + assert(item != index_.end()); + return item->second; } + + /// Internal version of 'at' that asserts existence + Factors& internalAt(Key variable) { + const KeyMap::iterator item = index_.find(variable); + assert(item != index_.end()); + return item->second; } + + /// @} +}; + +} + +#include diff --git a/gtsam/inference/VariableSlots.cpp b/gtsam/inference/VariableSlots.cpp index a06aad6cb..b1519c120 100644 --- a/gtsam/inference/VariableSlots.cpp +++ b/gtsam/inference/VariableSlots.cpp @@ -37,7 +37,7 @@ void VariableSlots::print(const std::string& str) const { for(size_t i=0; ibegin()->second.size(); ++i) { cout << " \t"; BOOST_FOREACH(const value_type& slot, *this) { - if(slot.second[i] == numeric_limits::max()) + if(slot.second[i] == numeric_limits::max()) cout << "x" << "\t"; else cout << slot.second[i] << "\t"; diff --git a/gtsam/inference/VariableSlots.h b/gtsam/inference/VariableSlots.h index 28367a647..7e80e86cd 100644 --- a/gtsam/inference/VariableSlots.h +++ b/gtsam/inference/VariableSlots.h @@ -11,15 +11,16 @@ /** * @file VariableSlots.h - * @brief VariableSlots describes the structure of a combined factor in terms of where each block comes from in the source factors. + * @brief VariableSlots describes the structure of a combined factor in terms of where each block + * comes from in the source factors. * @author Richard Roberts - * @date Oct 4, 2010 - */ + * @date Oct 4, 2010 */ #pragma once #include #include +#include #include @@ -31,31 +32,29 @@ namespace gtsam { -/** - * A combined factor is assembled as one block of rows for each component - * factor. In each row-block (factor), some of the column-blocks (variables) - * may be empty since factors involving different sets of variables are - * interleaved. - * - * VariableSlots describes the 2D block structure of the combined factor. It - * is essentially a map >. The Index is the real - * variable index of the combined factor slot. The vector tells, for - * each row-block (factor), which column-block (variable slot) from the - * component factor appears in this block of the combined factor. - * - * As an example, if the combined factor contains variables 1, 3, and 5, then - * "variableSlots[3][2] == 0" indicates that column-block 1 (corresponding to - * variable index 3), row-block 2 (also meaning component factor 2), comes from - * column-block 0 of component factor 2. - * - * \nosubgrouping - */ +/** A combined factor is assembled as one block of rows for each component +* factor. In each row-block (factor), some of the column-blocks (variables) +* may be empty since factors involving different sets of variables are +* interleaved. +* +* VariableSlots describes the 2D block structure of the combined factor. It +* is a map >. The Index is the real +* variable index of the combined factor slot. The vector tells, for +* each row-block (factor), which column-block (variable slot) from the +* component factor appears in this block of the combined factor. +* +* As an example, if the combined factor contains variables 1, 3, and 5, then +* "variableSlots[3][2] == 0" indicates that column-block 1 (corresponding to +* variable index 3), row-block 2 (also meaning component factor 2), comes from +* column-block 0 of component factor 2. +* +* \nosubgrouping */ -class VariableSlots : public FastMap > { +class VariableSlots : public FastMap > { public: - typedef FastMap > Base; + typedef FastMap > Base; /// @name Standard Constructors /// @{ @@ -69,6 +68,7 @@ public: VariableSlots(const FG& factorGraph); /// @} + /// @name Testable /// @{ @@ -79,12 +79,13 @@ public: GTSAM_EXPORT bool equals(const VariableSlots& rhs, double tol = 0.0) const; /// @} - }; /* ************************************************************************* */ template -VariableSlots::VariableSlots(const FG& factorGraph) { +VariableSlots::VariableSlots(const FG& factorGraph) +{ + gttic(VariableSlots_constructor); static const bool debug = false; // Compute a mapping (called variableSlots) *from* each involved @@ -96,7 +97,7 @@ VariableSlots::VariableSlots(const FG& factorGraph) { size_t jointFactorPos = 0; BOOST_FOREACH(const typename FG::sharedFactor& factor, factorGraph) { assert(factor); - Index factorVarSlot = 0; + size_t factorVarSlot = 0; BOOST_FOREACH(const Index involvedVariable, *factor) { // Set the slot in this factor for this variable. If the // variable was not already discovered, create an array for it @@ -105,9 +106,9 @@ VariableSlots::VariableSlots(const FG& factorGraph) { // the array entry for each factor that will indicate the factor // does not involve the variable. iterator thisVarSlots; bool inserted; - boost::tie(thisVarSlots, inserted) = this->insert(make_pair(involvedVariable, std::vector())); + boost::tie(thisVarSlots, inserted) = this->insert(make_pair(involvedVariable, std::vector())); if(inserted) - thisVarSlots->second.resize(factorGraph.size(), std::numeric_limits::max()); + thisVarSlots->second.resize(factorGraph.size(), std::numeric_limits::max()); thisVarSlots->second[jointFactorPos] = factorVarSlot; if(debug) std::cout << " var " << involvedVariable << " rowblock " << jointFactorPos << " comes from factor's slot " << factorVarSlot << std::endl; ++ factorVarSlot; diff --git a/gtsam/inference/inference-inl.h b/gtsam/inference/inference-inl.h deleted file mode 100644 index 5c5cedbc3..000000000 --- a/gtsam/inference/inference-inl.h +++ /dev/null @@ -1,88 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file inference-inl.h - * @brief - * @author Richard Roberts - * @date Mar 3, 2012 - */ - -#pragma once - -#include - -// Only for Eclipse parser, inference-inl.h (this file) is included at the bottom of inference.h -#include - -#include - -namespace gtsam { - -namespace inference { - -/* ************************************************************************* */ -template -Permutation::shared_ptr PermutationCOLAMD( - const VariableIndex& variableIndex, const CONSTRAINED& constrainLast, bool forceOrder) { - gttic(PermutationCOLAMD_constrained); - - size_t n = variableIndex.size(); - std::vector cmember(n, 0); - - // If at least some variables are not constrained to be last, constrain the - // ones that should be constrained. - if(constrainLast.size() < n) { - BOOST_FOREACH(Index var, constrainLast) { - assert(var < n); - cmember[var] = 1; - } - } - - Permutation::shared_ptr permutation = PermutationCOLAMD_(variableIndex, cmember); - if (forceOrder) { - Index j=n; - BOOST_REVERSE_FOREACH(Index c, constrainLast) - permutation->operator[](--j) = c; - } - return permutation; -} - -/* ************************************************************************* */ -template -Permutation::shared_ptr PermutationCOLAMDGrouped( - const VariableIndex& variableIndex, const CONSTRAINED_MAP& constraints) { - gttic(PermutationCOLAMD_grouped); - size_t n = variableIndex.size(); - std::vector cmember(n, 0); - - typedef typename CONSTRAINED_MAP::value_type constraint_pair; - BOOST_FOREACH(const constraint_pair& p, constraints) { - assert(p.first < n); - // FIXME: check that no groups are skipped - cmember[p.first] = p.second; - } - - return PermutationCOLAMD_(variableIndex, cmember); -} - -/* ************************************************************************* */ -inline Permutation::shared_ptr PermutationCOLAMD(const VariableIndex& variableIndex) { - gttic(PermutationCOLAMD_unconstrained); - size_t n = variableIndex.size(); - std::vector cmember(n, 0); - return PermutationCOLAMD_(variableIndex, cmember); -} - -} // namespace inference -} // namespace gtsam - - diff --git a/gtsam/inference/inference-inst.h b/gtsam/inference/inference-inst.h new file mode 100644 index 000000000..1d4e7c0dc --- /dev/null +++ b/gtsam/inference/inference-inst.h @@ -0,0 +1,96 @@ +/* ---------------------------------------------------------------------------- + +* GTSAM Copyright 2010, Georgia Tech Research Corporation, +* Atlanta, Georgia 30332-0415 +* All Rights Reserved +* Authors: Frank Dellaert, et al. (see THANKS for the full author list) + +* See LICENSE for the license information + +* -------------------------------------------------------------------------- */ + +/** +* @file inference-inst.h +* @brief Contains *generic* inference algorithms that convert between templated +* graphical models, i.e., factor graphs, Bayes nets, and Bayes trees +* @author Frank Dellaert +* @author Richard Roberts +*/ + +#pragma once + +#include +#include +#include + +#include + +namespace gtsam { + namespace inference { + + namespace { + /* ************************************************************************* */ + template + struct EliminationData { + EliminationData* const parentData; + std::vector childFactors; + EliminationData(EliminationData* _parentData, size_t nChildren) : + parentData(_parentData) { childFactors.reserve(nChildren); } + }; + + /* ************************************************************************* */ + template + EliminationData eliminationPreOrderVisitor( + const typename TREE::sharedNode& node, EliminationData& parentData) + { + // This function is called before visiting the children. Here, we create this node's data, + // which includes a pointer to the parent data and space for the factors of the children. + return EliminationData(&parentData, node->children.size()); + } + + /* ************************************************************************* */ + template + struct EliminationPostOrderVisitor + { + RESULT& result; + const typename TREE::Eliminate& eliminationFunction; + EliminationPostOrderVisitor(RESULT& result, const typename TREE::Eliminate& eliminationFunction) : + result(result), eliminationFunction(eliminationFunction) {} + void operator()(const typename TREE::sharedNode& node, EliminationData& myData) + { + // Call eliminate on the node and add the result to the parent's gathered factors + typename TREE::sharedFactor childFactor = node->eliminate(result, eliminationFunction, myData.childFactors); + if(!childFactor->empty()) + myData.parentData->childFactors.push_back(childFactor); + } + }; + } + + /* ************************************************************************* */ + /** Eliminate an elimination tree or a Bayes tree (used internally). Requires + * TREE::BayesNetType, TREE::FactorGraphType, TREE::sharedConditional, TREE::sharedFactor, + * TREE::Node, TREE::sharedNode, TREE::Node::factors, TREE::Node::children. */ + template + std::vector + EliminateTree(RESULT& result, const TREE& tree, const typename TREE::Eliminate& function) + { + // Typedefs + typedef typename TREE::sharedNode sharedNode; + typedef typename TREE::sharedFactor sharedFactor; + + // Do elimination using a depth-first traversal. During the pre-order visit (see + // eliminationPreOrderVisitor), we store a pointer to the parent data (where we'll put the + // remaining factor) and reserve a vector of factors to store the children elimination + // results. During the post-order visit (see eliminationPostOrderVisitor), we call dense + // elimination (using the gathered child factors) and store the result in the parent's + // gathered factors. + EliminationData rootData(0, tree.roots().size()); + EliminationPostOrderVisitor visitorPost(result, function); + treeTraversal::DepthFirstForest(tree, rootData, eliminationPreOrderVisitor, visitorPost); + + // Return remaining factors + return rootData.childFactors; + } + + } +} diff --git a/gtsam/inference/inference.cpp b/gtsam/inference/inference.cpp deleted file mode 100644 index 2704e8c26..000000000 --- a/gtsam/inference/inference.cpp +++ /dev/null @@ -1,106 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file inference.cpp - * @brief inference definitions - * @author Frank Dellaert - * @author Richard Roberts - */ - -#include -#include - -#include -#include -#include -#include - -#include - -using namespace std; - -namespace gtsam { -namespace inference { - -/* ************************************************************************* */ -Permutation::shared_ptr PermutationCOLAMD_(const VariableIndex& variableIndex, std::vector& cmember) { - gttic(PermutationCOLAMD_internal); - - gttic(Prepare); - size_t nEntries = variableIndex.nEntries(), nFactors = variableIndex.nFactors(), nVars = variableIndex.size(); - // Convert to compressed column major format colamd wants it in (== MATLAB format!) - int Alen = ccolamd_recommended((int)nEntries, (int)nFactors, (int)nVars); /* colamd arg 3: size of the array A */ - vector A = vector(Alen); /* colamd arg 4: row indices of A, of size Alen */ - vector p = vector(nVars + 1); /* colamd arg 5: column pointers of A, of size n_col+1 */ - - static const bool debug = false; - - p[0] = 0; - int count = 0; - for(Index var = 0; var < variableIndex.size(); ++var) { - const VariableIndex::Factors& column(variableIndex[var]); - size_t lastFactorId = numeric_limits::max(); - BOOST_FOREACH(size_t factorIndex, column) { - if(lastFactorId != numeric_limits::max()) - assert(factorIndex > lastFactorId); - A[count++] = (int)factorIndex; // copy sparse column - if(debug) cout << "A[" << count-1 << "] = " << factorIndex << endl; - } - p[var+1] = count; // column j (base 1) goes from A[j-1] to A[j]-1 - } - - assert((size_t)count == variableIndex.nEntries()); - - if(debug) - for(size_t i=0; ioperator[](j) = j; - // else - permutation->operator[](j) = p[j]; - if(debug) cout << "COLAMD: " << j << "->" << p[j] << endl; - } - if(debug) cout << "COLAMD: p[" << nVars << "] = " << p[nVars] << endl; - gttoc(Create_permutation); - - return permutation; -} - -/* ************************************************************************* */ -} // \namespace inference -} // \namespace gtsam diff --git a/gtsam/inference/inference.h b/gtsam/inference/inference.h deleted file mode 100644 index 099887ab6..000000000 --- a/gtsam/inference/inference.h +++ /dev/null @@ -1,82 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file inference.h - * @brief Contains *generic* inference algorithms that convert between templated - * graphical models, i.e., factor graphs, Bayes nets, and Bayes trees - * @author Frank Dellaert - * @author Richard Roberts - */ - -#pragma once - -#include -#include - -#include -#include - -#include - -namespace gtsam { - - namespace inference { - - /** - * Compute a permutation (variable ordering) using colamd - */ - GTSAM_EXPORT Permutation::shared_ptr PermutationCOLAMD( - const VariableIndex& variableIndex); - - /** - * Compute a permutation (variable ordering) using constrained colamd to move - * a set of variables to the end of the ordering - * @param variableIndex is the variable index lookup from a graph - * @param constrainlast is a vector of keys that should be constrained - * @tparam constrainLast is a std::vector (or similar structure) - * @param forceOrder if true, will not allow re-ordering of constrained variables - */ - template - Permutation::shared_ptr PermutationCOLAMD( - const VariableIndex& variableIndex, const CONSTRAINED& constrainLast, bool forceOrder=false); - - /** - * Compute a permutation of variable ordering using constrained colamd to - * move variables to the end in groups (0 = unconstrained, higher numbers at - * the end). - * @param variableIndex is the variable index lookup from a graph - * @param constraintMap is a map from variable index -> group number for constrained variables - * @tparam CONSTRAINED_MAP is an associative structure (like std::map), from size_t->int - */ - template - Permutation::shared_ptr PermutationCOLAMDGrouped( - const VariableIndex& variableIndex, const CONSTRAINED_MAP& constraints); - - /** - * Compute a CCOLAMD permutation using the constraint groups in cmember. - * The format for cmember is a part of ccolamd. - * - * @param variableIndex is the variable structure from a graph - * @param cmember is the constraint group list for each variable, where - * 0 is the default, unconstrained group, and higher numbers move further to - * the back of the list - * - * AGC: does cmember change? - */ - GTSAM_EXPORT Permutation::shared_ptr PermutationCOLAMD_( - const VariableIndex& variableIndex, std::vector& cmember); - - } // \namespace inference - -} // \namespace gtsam - -#include diff --git a/gtsam/inference/inferenceExceptions.h b/gtsam/inference/inferenceExceptions.h new file mode 100644 index 000000000..edd0e0aa5 --- /dev/null +++ b/gtsam/inference/inferenceExceptions.h @@ -0,0 +1,42 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file inferenceExceptions.h + * @brief Exceptions that may be thrown by inference algorithms + * @author Richard Roberts + * @date Apr 25, 2013 + */ +#pragma once + +#include +#include +#include + +namespace gtsam { + + /** An inference algorithm was called with inconsistent arguments. The factor graph, ordering, or + * variable index were inconsistent with each other, or a full elimination routine was called + * with an ordering that does not include all of the variables. */ + class InconsistentEliminationRequested : public std::exception { + public: + InconsistentEliminationRequested() throw() {} + virtual ~InconsistentEliminationRequested() throw() {} + virtual const char* what() const throw() { + return + "An inference algorithm was called with inconsistent arguments. The\n" + "factor graph, ordering, or variable index were inconsistent with each\n" + "other, or a full elimination routine was called with an ordering that\n" + "does not include all of the variables."; + } + }; + +} diff --git a/gtsam/inference/tests/testBayesTree.cpp b/gtsam/inference/tests/testBayesTree.cpp deleted file mode 100644 index 4943dedb4..000000000 --- a/gtsam/inference/tests/testBayesTree.cpp +++ /dev/null @@ -1,561 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testBayesTree.cpp - * @brief Unit tests for Bayes Tree - * @author Frank Dellaert - * @author Michael Kaess - * @author Viorela Ila - */ - -#include // for operator += -#include -#include -#include -using namespace boost::assign; - -#include -#include - -#include -#include -#include -#include - -using namespace std; -using namespace gtsam; - -///* ************************************************************************* */ -//// SLAM example from RSS sqrtSAM paper -static const Index _x3_=0, _x2_=1; -//static const Index _x1_=2, _l2_=3, _l1_=4; // unused -//IndexConditional::shared_ptr -// x3(new IndexConditional(_x3_)), -// x2(new IndexConditional(_x2_,_x3_)), -// x1(new IndexConditional(_x1_,_x2_,_x3_)), -// l1(new IndexConditional(_l1_,_x1_,_x2_)), -// l2(new IndexConditional(_l2_,_x1_,_x3_)); -// -//// Bayes Tree for sqrtSAM example -//SymbolicBayesTree createSlamSymbolicBayesTree(){ -// // Create using insert -//// Ordering slamOrdering; slamOrdering += _x3_, _x2_, _x1_, _l2_, _l1_; -// SymbolicBayesTree bayesTree_slam; -// bayesTree_slam.insert(x3); -// bayesTree_slam.insert(x2); -// bayesTree_slam.insert(x1); -// bayesTree_slam.insert(l2); -// bayesTree_slam.insert(l1); -// return bayesTree_slam; -//} - -/* ************************************************************************* */ -// Conditionals for ASIA example from the tutorial with A and D evidence -static const Index _X_=0, _T_=1, _S_=2, _E_=3, _L_=4, _B_=5; -static IndexConditional::shared_ptr - B(new IndexConditional(_B_)), - L(new IndexConditional(_L_, _B_)), - E(new IndexConditional(_E_, _L_, _B_)), - S(new IndexConditional(_S_, _L_, _B_)), - T(new IndexConditional(_T_, _E_, _L_)), - X(new IndexConditional(_X_, _E_)); - -// Cliques -static IndexConditional::shared_ptr - ELB(IndexConditional::FromKeys(cref_list_of<3>(_E_)(_L_)(_B_), 3)); - -// Bayes Tree for Asia example -static SymbolicBayesTree createAsiaSymbolicBayesTree() { - SymbolicBayesTree bayesTree; -// Ordering asiaOrdering; asiaOrdering += _X_, _T_, _S_, _E_, _L_, _B_; - SymbolicBayesTree::insert(bayesTree, B); - SymbolicBayesTree::insert(bayesTree, L); - SymbolicBayesTree::insert(bayesTree, E); - SymbolicBayesTree::insert(bayesTree, S); - SymbolicBayesTree::insert(bayesTree, T); - SymbolicBayesTree::insert(bayesTree, X); - return bayesTree; -} - -/* ************************************************************************* */ -TEST( BayesTree, constructor ) -{ - // Create using insert - SymbolicBayesTree bayesTree = createAsiaSymbolicBayesTree(); - - // Check Size - LONGS_EQUAL(4,bayesTree.size()); - EXPECT(!bayesTree.empty()); - - // Check root - boost::shared_ptr actual_root = bayesTree.root()->conditional(); - CHECK(assert_equal(*ELB,*actual_root)); - - // Create from symbolic Bayes chain in which we want to discover cliques - BayesNet ASIA; - ASIA.push_back(X); - ASIA.push_back(T); - ASIA.push_back(S); - ASIA.push_back(E); - ASIA.push_back(L); - ASIA.push_back(B); - SymbolicBayesTree bayesTree2(ASIA); - - // Check whether the same - CHECK(assert_equal(bayesTree,bayesTree2)); - - // CHECK findParentClique, should *not depend on order of parents* -// Ordering ordering; ordering += _X_, _T_, _S_, _E_, _L_, _B_; -// IndexTable index(ordering); - - list parents1; parents1 += _E_, _L_; - CHECK(assert_equal(_E_, bayesTree.findParentClique(parents1))); - - list parents2; parents2 += _L_, _E_; - CHECK(assert_equal(_E_, bayesTree.findParentClique(parents2))); - - list parents3; parents3 += _L_, _B_; - CHECK(assert_equal(_L_, bayesTree.findParentClique(parents3))); -} - -/* ************************************************************************* */ -TEST(BayesTree, clear) -{ -// SymbolicBayesTree bayesTree = createAsiaSymbolicBayesTree(); -// bayesTree.clear(); -// -// SymbolicBayesTree expected; -// -// // Check whether cleared BayesTree is equal to a new BayesTree -// CHECK(assert_equal(expected, bayesTree)); -} - -/* ************************************************************************* * -Bayes Tree for testing conversion to a forest of orphans needed for incremental. - A,B - C|A E|B - D|C F|E - */ -/* ************************************************************************* */ -TEST( BayesTree, removePath ) -{ - const Index _A_=5, _B_=4, _C_=3, _D_=2, _E_=1, _F_=0; - IndexConditional::shared_ptr - A(new IndexConditional(_A_)), - B(new IndexConditional(_B_, _A_)), - C(new IndexConditional(_C_, _A_)), - D(new IndexConditional(_D_, _C_)), - E(new IndexConditional(_E_, _B_)), - F(new IndexConditional(_F_, _E_)); - SymbolicBayesTree bayesTree; - EXPECT(bayesTree.empty()); -// Ordering ord; ord += _A_,_B_,_C_,_D_,_E_,_F_; - SymbolicBayesTree::insert(bayesTree, A); - SymbolicBayesTree::insert(bayesTree, B); - SymbolicBayesTree::insert(bayesTree, C); - SymbolicBayesTree::insert(bayesTree, D); - SymbolicBayesTree::insert(bayesTree, E); - SymbolicBayesTree::insert(bayesTree, F); - - // remove C, expected outcome: factor graph with ABC, - // Bayes Tree now contains two orphan trees: D|C and E|B,F|E - SymbolicFactorGraph expected; - expected.push_factor(_B_,_A_); -// expected.push_factor(_A_); - expected.push_factor(_C_,_A_); - SymbolicBayesTree::Cliques expectedOrphans; - expectedOrphans += bayesTree[_D_], bayesTree[_E_]; - - BayesNet bn; - SymbolicBayesTree::Cliques orphans; - bayesTree.removePath(bayesTree[_C_], bn, orphans); - SymbolicFactorGraph factors(bn); - CHECK(assert_equal((SymbolicFactorGraph)expected, factors)); - CHECK(assert_equal(expectedOrphans, orphans)); - - // remove E: factor graph with EB; E|B removed from second orphan tree - SymbolicFactorGraph expected2; - expected2.push_factor(_E_,_B_); - SymbolicBayesTree::Cliques expectedOrphans2; - expectedOrphans2 += bayesTree[_F_]; - - BayesNet bn2; - SymbolicBayesTree::Cliques orphans2; - bayesTree.removePath(bayesTree[_E_], bn2, orphans2); - SymbolicFactorGraph factors2(bn2); - CHECK(assert_equal((SymbolicFactorGraph)expected2, factors2)); - CHECK(assert_equal(expectedOrphans2, orphans2)); -} - -/* ************************************************************************* */ -TEST( BayesTree, removePath2 ) -{ - SymbolicBayesTree bayesTree = createAsiaSymbolicBayesTree(); - - // Call remove-path with clique B - BayesNet bn; - SymbolicBayesTree::Cliques orphans; - bayesTree.removePath(bayesTree[_B_], bn, orphans); - SymbolicFactorGraph factors(bn); - - // Check expected outcome - SymbolicFactorGraph expected; - expected.push_factor(_E_,_L_,_B_); -// expected.push_factor(_L_,_B_); -// expected.push_factor(_B_); - CHECK(assert_equal(expected, factors)); - SymbolicBayesTree::Cliques expectedOrphans; - expectedOrphans += bayesTree[_S_], bayesTree[_T_], bayesTree[_X_]; - CHECK(assert_equal(expectedOrphans, orphans)); -} - -/* ************************************************************************* */ -TEST( BayesTree, removePath3 ) -{ - SymbolicBayesTree bayesTree = createAsiaSymbolicBayesTree(); - - // Call remove-path with clique S - BayesNet bn; - SymbolicBayesTree::Cliques orphans; - bayesTree.removePath(bayesTree[_S_], bn, orphans); - SymbolicFactorGraph factors(bn); - - // Check expected outcome - SymbolicFactorGraph expected; - expected.push_factor(_E_,_L_,_B_); -// expected.push_factor(_L_,_B_); -// expected.push_factor(_B_); - expected.push_factor(_S_,_L_,_B_); - CHECK(assert_equal(expected, factors)); - SymbolicBayesTree::Cliques expectedOrphans; - expectedOrphans += bayesTree[_T_], bayesTree[_X_]; - CHECK(assert_equal(expectedOrphans, orphans)); -} - -void getAllCliques(const SymbolicBayesTree::sharedClique& subtree, SymbolicBayesTree::Cliques& cliques) { - // Check if subtree exists - if (subtree) { - cliques.push_back(subtree); - // Recursive call over all child cliques - BOOST_FOREACH(SymbolicBayesTree::sharedClique& childClique, subtree->children()) { - getAllCliques(childClique,cliques); - } - } -} - -/* ************************************************************************* */ -TEST( BayesTree, shortcutCheck ) -{ - const Index _A_=6, _B_=5, _C_=4, _D_=3, _E_=2, _F_=1, _G_=0; - IndexConditional::shared_ptr - A(new IndexConditional(_A_)), - B(new IndexConditional(_B_, _A_)), - C(new IndexConditional(_C_, _A_)), - D(new IndexConditional(_D_, _C_)), - E(new IndexConditional(_E_, _B_)), - F(new IndexConditional(_F_, _E_)), - G(new IndexConditional(_G_, _F_)); - SymbolicBayesTree bayesTree; -// Ordering ord; ord += _A_,_B_,_C_,_D_,_E_,_F_; - SymbolicBayesTree::insert(bayesTree, A); - SymbolicBayesTree::insert(bayesTree, B); - SymbolicBayesTree::insert(bayesTree, C); - SymbolicBayesTree::insert(bayesTree, D); - SymbolicBayesTree::insert(bayesTree, E); - SymbolicBayesTree::insert(bayesTree, F); - SymbolicBayesTree::insert(bayesTree, G); - - //bayesTree.print("BayesTree"); - //bayesTree.saveGraph("BT1.dot"); - - SymbolicBayesTree::sharedClique rootClique= bayesTree.root(); - //rootClique->printTree(); - SymbolicBayesTree::Cliques allCliques; - getAllCliques(rootClique,allCliques); - - BayesNet bn; - BOOST_FOREACH(SymbolicBayesTree::sharedClique& clique, allCliques) { - //clique->print("Clique#"); - bn = clique->shortcut(rootClique, &EliminateSymbolic); - //bn.print("Shortcut:\n"); - //cout << endl; - } - - // Check if all the cached shortcuts are cleared - rootClique->deleteCachedShortcuts(); - BOOST_FOREACH(SymbolicBayesTree::sharedClique& clique, allCliques) { - bool notCleared = clique->cachedSeparatorMarginal(); - CHECK( notCleared == false); - } - EXPECT_LONGS_EQUAL(0, rootClique->numCachedSeparatorMarginals()); - -// BOOST_FOREACH(SymbolicBayesTree::sharedClique& clique, allCliques) { -// clique->print("Clique#"); -// if(clique->cachedShortcut()){ -// bn = clique->cachedShortcut().get(); -// bn.print("Shortcut:\n"); -// } -// else -// cout << "Not Initialized" << endl; -// cout << endl; -// } -} - -/* ************************************************************************* */ -TEST( BayesTree, removeTop ) -{ - SymbolicBayesTree bayesTree = createAsiaSymbolicBayesTree(); - - // create a new factor to be inserted - boost::shared_ptr newFactor(new IndexFactor(_S_,_B_)); - - // Remove the contaminated part of the Bayes tree - BayesNet bn; - SymbolicBayesTree::Cliques orphans; - list keys; keys += _B_,_S_; - bayesTree.removeTop(keys, bn, orphans); - SymbolicFactorGraph factors(bn); - - // Check expected outcome - SymbolicFactorGraph expected; - expected.push_factor(_E_,_L_,_B_); -// expected.push_factor(_L_,_B_); -// expected.push_factor(_B_); - expected.push_factor(_S_,_L_,_B_); - CHECK(assert_equal(expected, factors)); - SymbolicBayesTree::Cliques expectedOrphans; - expectedOrphans += bayesTree[_T_], bayesTree[_X_]; - CHECK(assert_equal(expectedOrphans, orphans)); - - // Try removeTop again with a factor that should not change a thing - boost::shared_ptr newFactor2(new IndexFactor(_B_)); - BayesNet bn2; - SymbolicBayesTree::Cliques orphans2; - keys.clear(); keys += _B_; - bayesTree.removeTop(keys, bn2, orphans2); - SymbolicFactorGraph factors2(bn2); - SymbolicFactorGraph expected2; - CHECK(assert_equal(expected2, factors2)); - SymbolicBayesTree::Cliques expectedOrphans2; - CHECK(assert_equal(expectedOrphans2, orphans2)); -} - -/* ************************************************************************* */ -TEST( BayesTree, removeTop2 ) -{ - SymbolicBayesTree bayesTree = createAsiaSymbolicBayesTree(); - - // create two factors to be inserted - SymbolicFactorGraph newFactors; - newFactors.push_factor(_B_); - newFactors.push_factor(_S_); - - // Remove the contaminated part of the Bayes tree - BayesNet bn; - SymbolicBayesTree::Cliques orphans; - list keys; keys += _B_,_S_; - bayesTree.removeTop(keys, bn, orphans); - SymbolicFactorGraph factors(bn); - - // Check expected outcome - SymbolicFactorGraph expected; - expected.push_factor(_E_,_L_,_B_); -// expected.push_factor(_L_,_B_); -// expected.push_factor(_B_); - expected.push_factor(_S_,_L_,_B_); - CHECK(assert_equal(expected, factors)); - SymbolicBayesTree::Cliques expectedOrphans; - expectedOrphans += bayesTree[_T_], bayesTree[_X_]; - CHECK(assert_equal(expectedOrphans, orphans)); -} - -/* ************************************************************************* */ -TEST( BayesTree, removeTop3 ) -{ - const Index _x4_=5, _l5_=6; - // simple test case that failed after COLAMD was fixed/activated - IndexConditional::shared_ptr - X(new IndexConditional(_l5_)), - A(new IndexConditional(_x4_, _l5_)), - B(new IndexConditional(_x2_, _x4_)), - C(new IndexConditional(_x3_, _x2_)); - -// Ordering newOrdering; -// newOrdering += _x3_, _x2_, _x1_, _l2_, _l1_, _x4_, _l5_; - SymbolicBayesTree bayesTree; - SymbolicBayesTree::insert(bayesTree, X); - SymbolicBayesTree::insert(bayesTree, A); - SymbolicBayesTree::insert(bayesTree, B); - SymbolicBayesTree::insert(bayesTree, C); - - // remove all - list keys; - keys += _l5_, _x2_, _x3_, _x4_; - BayesNet bn; - SymbolicBayesTree::Cliques orphans; - bayesTree.removeTop(keys, bn, orphans); - SymbolicFactorGraph factors(bn); - - CHECK(orphans.size() == 0); -} - -/* ************************************************************************* */ -TEST( BayesTree, permute ) -{ - // creates a permutation and ensures that the nodes listing is updated - - // initial keys - more than just 6 variables - for a system with 9 variables - const Index _A0_=8, _B0_=7, _C0_=6, _D0_=5, _E0_=4, _F0_=0; - - // reduced keys - back to just 6 variables - const Index _A_=5, _B_=4, _C_=3, _D_=2, _E_=1, _F_=0; - - // Create and verify the permutation - std::set indices; indices += _A0_, _B0_, _C0_, _D0_, _E0_, _F0_; - Permutation actReducingPermutation = gtsam::internal::createReducingPermutation(indices); - Permutation expReducingPermutation(6); - expReducingPermutation[_A_] = _A0_; - expReducingPermutation[_B_] = _B0_; - expReducingPermutation[_C_] = _C0_; - expReducingPermutation[_D_] = _D0_; - expReducingPermutation[_E_] = _E0_; - expReducingPermutation[_F_] = _F0_; - EXPECT(assert_equal(expReducingPermutation, actReducingPermutation)); - - // Invert the permutation - gtsam::internal::Reduction inv_reduction = gtsam::internal::Reduction::CreateAsInverse(expReducingPermutation); - - // Build a bayes tree around reduced keys as if just eliminated from subset of factors/variables - IndexConditional::shared_ptr - A(new IndexConditional(_A_)), - B(new IndexConditional(_B_, _A_)), - C(new IndexConditional(_C_, _A_)), - D(new IndexConditional(_D_, _C_)), - E(new IndexConditional(_E_, _B_)), - F(new IndexConditional(_F_, _E_)); - SymbolicBayesTree bayesTreeReduced; - SymbolicBayesTree::insert(bayesTreeReduced, A); - SymbolicBayesTree::insert(bayesTreeReduced, B); - SymbolicBayesTree::insert(bayesTreeReduced, C); - SymbolicBayesTree::insert(bayesTreeReduced, D); - SymbolicBayesTree::insert(bayesTreeReduced, E); - SymbolicBayesTree::insert(bayesTreeReduced, F); - -// bayesTreeReduced.print("Reduced bayes tree"); -// P( 4 5) -// P( 3 | 5) -// P( 2 | 3) -// P( 1 | 4) -// P( 0 | 1) - - // Apply the permutation - should add placeholders for variables not present in nodes - SymbolicBayesTree actBayesTree = *bayesTreeReduced.clone(); - actBayesTree.permuteWithInverse(expReducingPermutation); - -// actBayesTree.print("Full bayes tree"); -// P( 7 8) -// P( 6 | 8) -// P( 5 | 6) -// P( 4 | 7) -// P( 0 | 4) - - // check keys in cliques - std::vector expRootIndices; expRootIndices += _B0_, _A0_; - IndexConditional::shared_ptr - expRoot(new IndexConditional(expRootIndices, 2)), // root - A0(new IndexConditional(_A0_)), - B0(new IndexConditional(_B0_, _A0_)), - C0(new IndexConditional(_C0_, _A0_)), // leaf level 1 - D0(new IndexConditional(_D0_, _C0_)), // leaf level 2 - E0(new IndexConditional(_E0_, _B0_)), // leaf level 2 - F0(new IndexConditional(_F0_, _E0_)); // leaf level 3 - - CHECK(actBayesTree.root()); - EXPECT(assert_equal(*expRoot, *actBayesTree.root()->conditional())); - EXPECT(assert_equal(*C0, *actBayesTree.root()->children().front()->conditional())); - EXPECT(assert_equal(*D0, *actBayesTree.root()->children().front()->children().front()->conditional())); - EXPECT(assert_equal(*E0, *actBayesTree.root()->children().back()->conditional())); - EXPECT(assert_equal(*F0, *actBayesTree.root()->children().back()->children().front()->conditional())); - - // check nodes structure - LONGS_EQUAL(9, actBayesTree.nodes().size()); - - SymbolicBayesTree expFullTree; - SymbolicBayesTree::insert(expFullTree, A0); - SymbolicBayesTree::insert(expFullTree, B0); - SymbolicBayesTree::insert(expFullTree, C0); - SymbolicBayesTree::insert(expFullTree, D0); - SymbolicBayesTree::insert(expFullTree, E0); - SymbolicBayesTree::insert(expFullTree, F0); - - EXPECT(assert_equal(expFullTree, actBayesTree)); -} - -///* ************************************************************************* */ -///** -// * x2 - x3 - x4 - x5 -// * | / \ | -// * x1 / \ x6 -// */ -//TEST( BayesTree, insert ) -//{ -// // construct bayes tree by split the graph along the separator x3 - x4 -// const Index _x1_=0, _x2_=1, _x6_=2, _x5_=3, _x3_=4, _x4_=5; -// SymbolicFactorGraph fg1, fg2, fg3; -// fg1.push_factor(_x3_, _x4_); -// fg2.push_factor(_x1_, _x2_); -// fg2.push_factor(_x2_, _x3_); -// fg2.push_factor(_x1_, _x3_); -// fg3.push_factor(_x5_, _x4_); -// fg3.push_factor(_x6_, _x5_); -// fg3.push_factor(_x6_, _x4_); -// -//// Ordering ordering1; ordering1 += _x3_, _x4_; -//// Ordering ordering2; ordering2 += _x1_, _x2_; -//// Ordering ordering3; ordering3 += _x6_, _x5_; -// -// BayesNet bn1, bn2, bn3; -// bn1 = *SymbolicSequentialSolver::EliminateUntil(fg1, _x4_+1); -// bn2 = *SymbolicSequentialSolver::EliminateUntil(fg2, _x2_+1); -// bn3 = *SymbolicSequentialSolver::EliminateUntil(fg3, _x5_+1); -// -// // insert child cliques -// SymbolicBayesTree actual; -// list children; -// SymbolicBayesTree::sharedClique r1 = actual.insert(bn2, children); -// SymbolicBayesTree::sharedClique r2 = actual.insert(bn3, children); -// -// // insert root clique -// children.push_back(r1); -// children.push_back(r2); -// actual.insert(bn1, children, true); -// -// // traditional way -// SymbolicFactorGraph fg; -// fg.push_factor(_x3_, _x4_); -// fg.push_factor(_x1_, _x2_); -// fg.push_factor(_x2_, _x3_); -// fg.push_factor(_x1_, _x3_); -// fg.push_factor(_x5_, _x4_); -// fg.push_factor(_x6_, _x5_); -// fg.push_factor(_x6_, _x4_); -// -//// Ordering ordering; ordering += _x1_, _x2_, _x6_, _x5_, _x3_, _x4_; -// BayesNet bn(*SymbolicSequentialSolver(fg).eliminate()); -// SymbolicBayesTree expected(bn); -// CHECK(assert_equal(expected, actual)); -// -//} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testClusterTree.cpp b/gtsam/inference/tests/testClusterTree.cpp deleted file mode 100644 index 0464aa546..000000000 --- a/gtsam/inference/tests/testClusterTree.cpp +++ /dev/null @@ -1,43 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testClusterTree.cpp - * @brief Unit tests for Bayes Tree - * @author Kai Ni - * @author Frank Dellaert - */ - -#include // for operator += -using namespace boost::assign; - -#include - -#include -#include - -using namespace gtsam; - -// explicit instantiation and typedef -namespace gtsam { template class ClusterTree; } -typedef ClusterTree SymbolicClusterTree; - -/* ************************************************************************* */ -TEST(ClusterTree, constructor) { - SymbolicClusterTree tree; -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testEliminationTree.cpp b/gtsam/inference/tests/testEliminationTree.cpp deleted file mode 100644 index 00679083a..000000000 --- a/gtsam/inference/tests/testEliminationTree.cpp +++ /dev/null @@ -1,119 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testEliminationTree.cpp - * @brief - * @author Richard Roberts - * @date Oct 14, 2010 - */ - -#include -#include - -#include -#include - -using namespace gtsam; -using namespace std; - -class EliminationTreeTester { -public: - // build hardcoded tree - static SymbolicEliminationTree::shared_ptr buildHardcodedTree(const SymbolicFactorGraph& fg) { - - SymbolicEliminationTree::shared_ptr leaf0(new SymbolicEliminationTree); - leaf0->add(fg[0]); - leaf0->add(fg[1]); - - SymbolicEliminationTree::shared_ptr node1(new SymbolicEliminationTree(1)); - node1->add(fg[2]); - node1->add(leaf0); - - SymbolicEliminationTree::shared_ptr node2(new SymbolicEliminationTree(2)); - node2->add(fg[3]); - node2->add(node1); - - SymbolicEliminationTree::shared_ptr leaf3(new SymbolicEliminationTree(3)); - leaf3->add(fg[4]); - - SymbolicEliminationTree::shared_ptr etree(new SymbolicEliminationTree(4)); - etree->add(leaf3); - etree->add(node2); - - return etree; - } -}; - -TEST(EliminationTree, Create) -{ - // create example factor graph - SymbolicFactorGraph fg; - fg.push_factor(0, 1); - fg.push_factor(0, 2); - fg.push_factor(1, 4); - fg.push_factor(2, 4); - fg.push_factor(3, 4); - - SymbolicEliminationTree::shared_ptr expected = EliminationTreeTester::buildHardcodedTree(fg); - - // Build from factor graph - SymbolicEliminationTree::shared_ptr actual = SymbolicEliminationTree::Create(fg); - - CHECK(assert_equal(*expected,*actual)); -} - -/* ************************************************************************* */ -// Test to drive elimination tree development -// graph: f(0,1) f(0,2) f(1,4) f(2,4) f(3,4) -/* ************************************************************************* */ - -TEST(EliminationTree, eliminate ) -{ - // create expected Chordal bayes Net - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(4)); - expected.push_front(boost::make_shared(3,4)); - expected.push_front(boost::make_shared(2,4)); - expected.push_front(boost::make_shared(1,2,4)); - expected.push_front(boost::make_shared(0,1,2)); - - // Create factor graph - SymbolicFactorGraph fg; - fg.push_factor(0, 1); - fg.push_factor(0, 2); - fg.push_factor(1, 4); - fg.push_factor(2, 4); - fg.push_factor(3, 4); - - // eliminate - SymbolicBayesNet actual = *SymbolicSequentialSolver(fg).eliminate(); - - CHECK(assert_equal(expected,actual)); -} - -/* ************************************************************************* */ -TEST(EliminationTree, disconnected_graph) { - SymbolicFactorGraph fg; - fg.push_factor(0, 1); - fg.push_factor(0, 2); - fg.push_factor(1, 2); - fg.push_factor(3, 4); - - CHECK_EXCEPTION(SymbolicEliminationTree::Create(fg), DisconnectedGraphException); -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testFactorGraph.cpp b/gtsam/inference/tests/testFactorGraph.cpp deleted file mode 100644 index 43402d2cb..000000000 --- a/gtsam/inference/tests/testFactorGraph.cpp +++ /dev/null @@ -1,65 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testFactorgraph.cpp - * @brief Unit tests for IndexFactor Graphs - * @author Christian Potthast - **/ - -/*STL/C++*/ -#include -#include -#include -#include -#include // for operator += -#include -using namespace boost::assign; - -#include - -#include -#include - -using namespace std; -using namespace gtsam; - -/* ************************************************************************* */ -TEST(FactorGraph, eliminateFrontals) { - - SymbolicFactorGraph sfgOrig; - sfgOrig.push_factor(0,1); - sfgOrig.push_factor(0,2); - sfgOrig.push_factor(1,3); - sfgOrig.push_factor(1,4); - sfgOrig.push_factor(2,3); - sfgOrig.push_factor(4,5); - - IndexConditional::shared_ptr actualCond; - SymbolicFactorGraph actualSfg; - boost::tie(actualCond, actualSfg) = sfgOrig.eliminateFrontals(2); - - vector condIndices; - condIndices += 0,1,2,3,4; - IndexConditional expectedCond(condIndices, 2); - - SymbolicFactorGraph expectedSfg; - expectedSfg.push_factor(2,3); - expectedSfg.push_factor(4,5); - expectedSfg.push_factor(2,3,4); - - EXPECT(assert_equal(expectedSfg, actualSfg)); - EXPECT(assert_equal(expectedCond, *actualCond)); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testInference.cpp b/gtsam/inference/tests/testInference.cpp deleted file mode 100644 index b25703cb6..000000000 --- a/gtsam/inference/tests/testInference.cpp +++ /dev/null @@ -1,118 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testInference.cpp - * @author Richard Roberts - * @author Alex Cunningham - * @date Dec 6, 2010 - */ - -#include - -#include -#include -#include - -using namespace gtsam; - -/* ************************************************************************* */ -TEST(inference, UnobservedVariables) { - SymbolicFactorGraph sfg; - - // Create a factor graph that skips some variables - sfg.push_factor(0,1); - sfg.push_factor(1,3); - sfg.push_factor(3,5); - - VariableIndex variableIndex(sfg); - - // Computes a permutation with known variables first, skipped variables last - // Actual 0 1 3 5 2 4 - Permutation::shared_ptr actual(inference::PermutationCOLAMD(variableIndex)); - Permutation expected(6); - expected[0] = 0; - expected[1] = 1; - expected[2] = 3; - expected[3] = 5; - expected[4] = 2; - expected[5] = 4; - EXPECT(assert_equal(expected, *actual)); -} - -/* ************************************************************************* */ -TEST(inference, constrained_ordering) { - SymbolicFactorGraph sfg; - - // create graph with wanted variable set = 2, 4 - sfg.push_factor(0,1); - sfg.push_factor(1,2); - sfg.push_factor(2,3); - sfg.push_factor(3,4); - sfg.push_factor(4,5); - - VariableIndex variableIndex(sfg); - - // unconstrained version - Permutation::shared_ptr actUnconstrained(inference::PermutationCOLAMD(variableIndex)); - Permutation expUnconstrained = Permutation::Identity(6); - EXPECT(assert_equal(expUnconstrained, *actUnconstrained)); - - // constrained version - push one set to the end - std::vector constrainLast; - constrainLast.push_back(2); - constrainLast.push_back(4); - Permutation::shared_ptr actConstrained(inference::PermutationCOLAMD(variableIndex, constrainLast)); - Permutation expConstrained(6); - expConstrained[0] = 0; - expConstrained[1] = 1; - expConstrained[2] = 5; - expConstrained[3] = 3; - expConstrained[4] = 4; - expConstrained[5] = 2; - EXPECT(assert_equal(expConstrained, *actConstrained)); -} - -/* ************************************************************************* */ -TEST(inference, grouped_constrained_ordering) { - SymbolicFactorGraph sfg; - - // create graph with constrained groups: - // 1: 2, 4 - // 2: 5 - sfg.push_factor(0,1); - sfg.push_factor(1,2); - sfg.push_factor(2,3); - sfg.push_factor(3,4); - sfg.push_factor(4,5); - - VariableIndex variableIndex(sfg); - - // constrained version - push one set to the end - FastMap constraints; - constraints[2] = 1; - constraints[4] = 1; - constraints[5] = 2; - - Permutation::shared_ptr actConstrained(inference::PermutationCOLAMDGrouped(variableIndex, constraints)); - Permutation expConstrained(6); - expConstrained[0] = 0; - expConstrained[1] = 1; - expConstrained[2] = 3; - expConstrained[3] = 2; - expConstrained[4] = 4; - expConstrained[5] = 5; - EXPECT(assert_equal(expConstrained, *actConstrained)); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testJunctionTree.cpp b/gtsam/inference/tests/testJunctionTree.cpp deleted file mode 100644 index ba7175e24..000000000 --- a/gtsam/inference/tests/testJunctionTree.cpp +++ /dev/null @@ -1,99 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testJunctionTree.cpp - * @brief Unit tests for Junction Tree - * @author Kai Ni - * @author Frank Dellaert - */ - -#include // for operator += -#include // for operator += -#include // for operator += -using namespace boost::assign; - -#include -#include - -#include -#include -#include -#include -#include -#include - -using namespace gtsam; -using namespace std; - -typedef JunctionTree SymbolicJunctionTree; - -/* ************************************************************************* * - * x1 - x2 - x3 - x4 - * x3 x4 - * x2 x1 : x3 - ****************************************************************************/ -TEST( JunctionTree, constructor ) -{ - const Index x2=0, x1=1, x3=2, x4=3; - SymbolicFactorGraph fg; - fg.push_factor(x2,x1); - fg.push_factor(x2,x3); - fg.push_factor(x3,x4); - - SymbolicJunctionTree actual(fg); - - vector frontal1; frontal1 += x3, x4; - vector frontal2; frontal2 += x2, x1; - vector sep1; - vector sep2; sep2 += x3; - CHECK(assert_equal(frontal1, actual.root()->frontal)); - CHECK(assert_equal(sep1, actual.root()->separator)); - LONGS_EQUAL(1, actual.root()->size()); - CHECK(assert_equal(frontal2, actual.root()->children().front()->frontal)); - CHECK(assert_equal(sep2, actual.root()->children().front()->separator)); - LONGS_EQUAL(2, actual.root()->children().front()->size()); - CHECK(assert_equal(*fg[2], *(*actual.root())[0])); - CHECK(assert_equal(*fg[0], *(*actual.root()->children().front())[0])); - CHECK(assert_equal(*fg[1], *(*actual.root()->children().front())[1])); -} - -/* ************************************************************************* * - * x1 - x2 - x3 - x4 - * x3 x4 - * x2 x1 : x3 - ****************************************************************************/ -TEST( JunctionTree, eliminate) -{ - const Index x2=0, x1=1, x3=2, x4=3; - SymbolicFactorGraph fg; - fg.push_factor(x2,x1); - fg.push_factor(x2,x3); - fg.push_factor(x3,x4); - - SymbolicJunctionTree jt(fg); - SymbolicBayesTree::sharedClique actual = jt.eliminate(&EliminateSymbolic); - - BayesNet bn(*SymbolicSequentialSolver(fg).eliminate()); - SymbolicBayesTree expected(bn); - -// cout << "BT from JT:\n"; -// actual->printTree(""); - - CHECK(assert_equal(*expected.root(), *actual)); -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testOrdering.cpp b/gtsam/inference/tests/testOrdering.cpp new file mode 100644 index 000000000..2443dba2a --- /dev/null +++ b/gtsam/inference/tests/testOrdering.cpp @@ -0,0 +1,82 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testOrdering + * @author Alex Cunningham + */ + +#include +#include +#include +#include +#include + +#include + +using namespace std; +using namespace gtsam; +using namespace boost::assign; + +/* ************************************************************************* */ +TEST(Ordering, constrained_ordering) { + SymbolicFactorGraph sfg; + + // create graph with wanted variable set = 2, 4 + sfg.push_factor(0,1); + sfg.push_factor(1,2); + sfg.push_factor(2,3); + sfg.push_factor(3,4); + sfg.push_factor(4,5); + + // unconstrained version + Ordering actUnconstrained = Ordering::COLAMD(sfg); + Ordering expUnconstrained = Ordering(list_of(0)(1)(2)(3)(4)(5)); + EXPECT(assert_equal(expUnconstrained, actUnconstrained)); + + // constrained version - push one set to the end + Ordering actConstrained = Ordering::COLAMDConstrainedLast(sfg, list_of(2)(4)); + Ordering expConstrained = Ordering(list_of(0)(1)(5)(3)(4)(2)); + EXPECT(assert_equal(expConstrained, actConstrained)); + + // constrained version - push one set to the start + Ordering actConstrained2 = Ordering::COLAMDConstrainedFirst(sfg, list_of(2)(4)); + Ordering expConstrained2 = Ordering(list_of(2)(4)(0)(1)(3)(5)); + EXPECT(assert_equal(expConstrained2, actConstrained2)); +} + +/* ************************************************************************* */ +TEST(Ordering, grouped_constrained_ordering) { + SymbolicFactorGraph sfg; + + // create graph with constrained groups: + // 1: 2, 4 + // 2: 5 + sfg.push_factor(0,1); + sfg.push_factor(1,2); + sfg.push_factor(2,3); + sfg.push_factor(3,4); + sfg.push_factor(4,5); + + // constrained version - push one set to the end + FastMap constraints; + constraints[2] = 1; + constraints[4] = 1; + constraints[5] = 2; + + Ordering actConstrained = Ordering::COLAMDConstrained(sfg, constraints); + Ordering expConstrained = list_of(0)(1)(3)(2)(4)(5); + EXPECT(assert_equal(expConstrained, actConstrained)); +} + +/* ************************************************************************* */ +int main() { TestResult tr; return TestRegistry::runAllTests(tr); } +/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testPermutation.cpp b/gtsam/inference/tests/testPermutation.cpp deleted file mode 100644 index 5aa83422f..000000000 --- a/gtsam/inference/tests/testPermutation.cpp +++ /dev/null @@ -1,134 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testPermutation.cpp - * @date Sep 22, 2011 - * @author richard - */ - -#include - -#include -#include - -#include - -using namespace gtsam; -using namespace std; - -/* ************************************************************************* */ -TEST(Permutation, Identity) { - Permutation expected(5); - expected[0] = 0; - expected[1] = 1; - expected[2] = 2; - expected[3] = 3; - expected[4] = 4; - - Permutation actual = Permutation::Identity(5); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST(Permutation, PullToFront) { - Permutation expected(5); - expected[0] = 4; - expected[1] = 0; - expected[2] = 2; - expected[3] = 1; - expected[4] = 3; - - std::vector toFront; - toFront.push_back(4); - toFront.push_back(0); - toFront.push_back(2); - Permutation actual = Permutation::PullToFront(toFront, 5); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST(Permutation, PushToBack) { - Permutation expected(5); - expected[0] = 1; - expected[1] = 3; - expected[2] = 4; - expected[3] = 0; - expected[4] = 2; - - std::vector toBack; - toBack.push_back(4); - toBack.push_back(0); - toBack.push_back(2); - Permutation actual = Permutation::PushToBack(toBack, 5); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST(Permutation, compose) { - Permutation p1(5); - p1[0] = 1; - p1[1] = 2; - p1[2] = 3; - p1[3] = 4; - p1[4] = 0; - - Permutation p2(5); - p2[0] = 1; - p2[1] = 2; - p2[2] = 4; - p2[3] = 3; - p2[4] = 0; - - Permutation expected(5); - expected[0] = 2; - expected[1] = 3; - expected[2] = 0; - expected[3] = 4; - expected[4] = 1; - - Permutation actual = *p1.permute(p2); - - EXPECT(assert_equal(expected, actual)); - LONGS_EQUAL(p1[p2[0]], actual[0]); - LONGS_EQUAL(p1[p2[1]], actual[1]); - LONGS_EQUAL(p1[p2[2]], actual[2]); - LONGS_EQUAL(p1[p2[3]], actual[3]); - LONGS_EQUAL(p1[p2[4]], actual[4]); -} - -/* ************************************************************************* */ -TEST(Reduction, CreateFromPartialPermutation) { - Permutation selector(3); - selector[0] = 2; - selector[1] = 4; - selector[2] = 6; - Permutation p(3); - p[0] = 2; - p[1] = 0; - p[2] = 1; - - internal::Reduction expected; - expected.insert(make_pair(2,6)); - expected.insert(make_pair(4,2)); - expected.insert(make_pair(6,4)); - - internal::Reduction actual = internal::Reduction::CreateFromPartialPermutation(selector, p); - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr); } - - diff --git a/gtsam/inference/tests/testSerializationInference.cpp b/gtsam/inference/tests/testSerializationInference.cpp deleted file mode 100644 index b2b300440..000000000 --- a/gtsam/inference/tests/testSerializationInference.cpp +++ /dev/null @@ -1,88 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testSerializationInference.cpp - * @brief - * @author Richard Roberts - * @date Feb 7, 2012 - */ - -#include -#include -#include - -#include -#include - -using namespace std; -using namespace gtsam; -using namespace gtsam::serializationTestHelpers; - -/* ************************************************************************* */ -TEST (Serialization, symbolic_graph) { - // construct expected symbolic graph - SymbolicFactorGraph sfg; - sfg.push_factor(0); - sfg.push_factor(0,1); - sfg.push_factor(0,2); - sfg.push_factor(2,1); - - EXPECT(equalsObj(sfg)); - EXPECT(equalsXML(sfg)); - EXPECT(equalsBinary(sfg)); -} - -/* ************************************************************************* */ -TEST (Serialization, symbolic_bn) { - IndexConditional::shared_ptr x2(new IndexConditional(1, 2, 0)); - IndexConditional::shared_ptr l1(new IndexConditional(2, 0)); - IndexConditional::shared_ptr x1(new IndexConditional(0)); - - SymbolicBayesNet sbn; - sbn.push_back(x2); - sbn.push_back(l1); - sbn.push_back(x1); - - EXPECT(equalsObj(sbn)); - EXPECT(equalsXML(sbn)); - EXPECT(equalsBinary(sbn)); -} - -/* ************************************************************************* */ -TEST (Serialization, symbolic_bayes_tree ) { - typedef BayesTree SymbolicBayesTree; - static const Index _X_=0, _T_=1, _S_=2, _E_=3, _L_=4, _B_=5; - IndexConditional::shared_ptr - B(new IndexConditional(_B_)), - L(new IndexConditional(_L_, _B_)), - E(new IndexConditional(_E_, _L_, _B_)), - S(new IndexConditional(_S_, _L_, _B_)), - T(new IndexConditional(_T_, _E_, _L_)), - X(new IndexConditional(_X_, _E_)); - - // Bayes Tree for Asia example - SymbolicBayesTree bayesTree; - SymbolicBayesTree::insert(bayesTree, B); - SymbolicBayesTree::insert(bayesTree, L); - SymbolicBayesTree::insert(bayesTree, E); - SymbolicBayesTree::insert(bayesTree, S); - SymbolicBayesTree::insert(bayesTree, T); - SymbolicBayesTree::insert(bayesTree, X); - - EXPECT(equalsObj(bayesTree)); - EXPECT(equalsXML(bayesTree)); - EXPECT(equalsBinary(bayesTree)); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testSymbolicBayesNet.cpp b/gtsam/inference/tests/testSymbolicBayesNet.cpp deleted file mode 100644 index 44def4c24..000000000 --- a/gtsam/inference/tests/testSymbolicBayesNet.cpp +++ /dev/null @@ -1,210 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testSymbolicBayesNet.cpp - * @brief Unit tests for a symbolic Bayes chain - * @author Frank Dellaert - */ - -#include // for 'insert()' -#include // for operator += -using namespace boost::assign; - -#include - -#include -#include -#include - -using namespace std; -using namespace gtsam; - -static const Index _L_ = 0; -static const Index _A_ = 1; -static const Index _B_ = 2; -static const Index _C_ = 3; -static const Index _D_ = 4; -static const Index _E_ = 5; - -static IndexConditional::shared_ptr - B(new IndexConditional(_B_)), - L(new IndexConditional(_L_, _B_)); - -/* ************************************************************************* */ -TEST( SymbolicBayesNet, equals ) -{ - BayesNet f1; - f1.push_back(B); - f1.push_back(L); - BayesNet f2; - f2.push_back(L); - f2.push_back(B); - CHECK(f1.equals(f1)); - CHECK(!f1.equals(f2)); -} - -/* ************************************************************************* */ -TEST( SymbolicBayesNet, pop_front ) -{ - IndexConditional::shared_ptr - A(new IndexConditional(_A_,_B_,_C_)), - B(new IndexConditional(_B_,_C_)), - C(new IndexConditional(_C_)); - - // Expected after pop_front - BayesNet expected; - expected.push_back(B); - expected.push_back(C); - - // Actual - BayesNet actual; - actual.push_back(A); - actual.push_back(B); - actual.push_back(C); - actual.pop_front(); - - CHECK(assert_equal(expected,actual)); -} - -/* ************************************************************************* */ -TEST( SymbolicBayesNet, combine ) -{ - IndexConditional::shared_ptr - A(new IndexConditional(_A_,_B_,_C_)), - B(new IndexConditional(_B_,_C_)), - C(new IndexConditional(_C_)); - - // p(A|BC) - BayesNet p_ABC; - p_ABC.push_back(A); - - // P(BC)=P(B|C)P(C) - BayesNet p_BC; - p_BC.push_back(B); - p_BC.push_back(C); - - // P(ABC) = P(A|BC) P(BC) - p_ABC.push_back(p_BC); - - BayesNet expected; - expected.push_back(A); - expected.push_back(B); - expected.push_back(C); - - CHECK(assert_equal(expected,p_ABC)); -} - -/* ************************************************************************* */ -TEST(SymbolicBayesNet, find) { - SymbolicBayesNet bn; - bn += IndexConditional::shared_ptr(new IndexConditional(_A_, _B_)); - std::vector keys; - keys.push_back(_B_); - keys.push_back(_C_); - keys.push_back(_D_); - bn += IndexConditional::shared_ptr(new IndexConditional(keys,2)); - bn += IndexConditional::shared_ptr(new IndexConditional(_D_)); - - SymbolicBayesNet::iterator expected = bn.begin(); ++ expected; - SymbolicBayesNet::iterator actual = bn.find(_C_); - EXPECT(assert_equal(**expected, **actual)); -} - -/* ************************************************************************* */ -TEST_UNSAFE(SymbolicBayesNet, popLeaf) { - IndexConditional::shared_ptr - A(new IndexConditional(_A_,_E_)), - B(new IndexConditional(_B_,_E_)), - C(new IndexConditional(_C_,_D_)), - D(new IndexConditional(_D_,_E_)), - E(new IndexConditional(_E_)); - - // BayesNet after popping A - SymbolicBayesNet expected1; - expected1 += B, C, D, E; - - // BayesNet after popping C - SymbolicBayesNet expected2; - expected2 += A, B, D, E; - - // BayesNet after popping C and D - SymbolicBayesNet expected3; - expected3 += A, B, E; - - // BayesNet after popping C and A - SymbolicBayesNet expected4; - expected4 += B, D, E; - - - // BayesNet after popping A - SymbolicBayesNet actual1; - actual1 += A, B, C, D, E; - actual1.popLeaf(actual1.find(_A_)); - - // BayesNet after popping C - SymbolicBayesNet actual2; - actual2 += A, B, C, D, E; - actual2.popLeaf(actual2.find(_C_)); - - // BayesNet after popping C and D - SymbolicBayesNet actual3; - actual3 += A, B, C, D, E; - actual3.popLeaf(actual3.find(_C_)); - actual3.popLeaf(actual3.find(_D_)); - - // BayesNet after popping C and A - SymbolicBayesNet actual4; - actual4 += A, B, C, D, E; - actual4.popLeaf(actual4.find(_C_)); - actual4.popLeaf(actual4.find(_A_)); - - EXPECT(assert_equal(expected1, actual1)); - EXPECT(assert_equal(expected2, actual2)); - EXPECT(assert_equal(expected3, actual3)); - EXPECT(assert_equal(expected4, actual4)); - - // Try to remove a non-leaf node (this test is not working in non-debug mode) -//#undef NDEBUG_SAVED -//#ifdef NDEBUG -//#define NDEBUG_SAVED -//#endif -// -//#undef NDEBUG -// SymbolicBayesNet actual5; -// actual5 += A, B, C, D, E; -// CHECK_EXCEPTION(actual5.popLeaf(actual5.find(_D_)), std::invalid_argument); -// -//#ifdef NDEBUG_SAVED -//#define NDEBUG -//#endif -} - -/* ************************************************************************* */ -TEST(SymbolicBayesNet, saveGraph) { - SymbolicBayesNet bn; - bn += IndexConditional::shared_ptr(new IndexConditional(_A_, _B_)); - std::vector keys; - keys.push_back(_B_); - keys.push_back(_C_); - keys.push_back(_D_); - bn += IndexConditional::shared_ptr(new IndexConditional(keys,2)); - bn += IndexConditional::shared_ptr(new IndexConditional(_D_)); - - bn.saveGraph("SymbolicBayesNet.dot"); -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testSymbolicBayesTree.cpp b/gtsam/inference/tests/testSymbolicBayesTree.cpp deleted file mode 100644 index 68693817b..000000000 --- a/gtsam/inference/tests/testSymbolicBayesTree.cpp +++ /dev/null @@ -1,323 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/* - * @file testSymbolicBayesTree.cpp - * @date sept 15, 2012 - * @author Frank Dellaert - */ - -#include -#include -#include - -#include -#include -using namespace boost::assign; - -#include - -using namespace std; -using namespace gtsam; - -static bool debug = false; - -/* ************************************************************************* */ - -TEST_UNSAFE( SymbolicBayesTree, thinTree ) { - - // create a thin-tree Bayesnet, a la Jean-Guillaume - SymbolicBayesNet bayesNet; - bayesNet.push_front(boost::make_shared(14)); - - bayesNet.push_front(boost::make_shared(13, 14)); - bayesNet.push_front(boost::make_shared(12, 14)); - - bayesNet.push_front(boost::make_shared(11, 13, 14)); - bayesNet.push_front(boost::make_shared(10, 13, 14)); - bayesNet.push_front(boost::make_shared(9, 12, 14)); - bayesNet.push_front(boost::make_shared(8, 12, 14)); - - bayesNet.push_front(boost::make_shared(7, 11, 13)); - bayesNet.push_front(boost::make_shared(6, 11, 13)); - bayesNet.push_front(boost::make_shared(5, 10, 13)); - bayesNet.push_front(boost::make_shared(4, 10, 13)); - - bayesNet.push_front(boost::make_shared(3, 9, 12)); - bayesNet.push_front(boost::make_shared(2, 9, 12)); - bayesNet.push_front(boost::make_shared(1, 8, 12)); - bayesNet.push_front(boost::make_shared(0, 8, 12)); - - if (debug) { - GTSAM_PRINT(bayesNet); - bayesNet.saveGraph("/tmp/symbolicBayesNet.dot"); - } - - // create a BayesTree out of a Bayes net - SymbolicBayesTree bayesTree(bayesNet); - if (debug) { - GTSAM_PRINT(bayesTree); - bayesTree.saveGraph("/tmp/symbolicBayesTree.dot"); - } - - SymbolicBayesTree::Clique::shared_ptr R = bayesTree.root(); - - { - // check shortcut P(S9||R) to root - SymbolicBayesTree::Clique::shared_ptr c = bayesTree[9]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - EXPECT(assert_equal(expected, shortcut)); - } - - { - // check shortcut P(S8||R) to root - SymbolicBayesTree::Clique::shared_ptr c = bayesTree[8]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(12, 14)); - EXPECT(assert_equal(expected, shortcut)); - } - - { - // check shortcut P(S4||R) to root - SymbolicBayesTree::Clique::shared_ptr c = bayesTree[4]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(10, 13, 14)); - EXPECT(assert_equal(expected, shortcut)); - } - - { - // check shortcut P(S2||R) to root - SymbolicBayesTree::Clique::shared_ptr c = bayesTree[2]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(12, 14)); - expected.push_front(boost::make_shared(9, 12, 14)); - EXPECT(assert_equal(expected, shortcut)); - } - - { - // check shortcut P(S0||R) to root - SymbolicBayesTree::Clique::shared_ptr c = bayesTree[0]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(12, 14)); - expected.push_front(boost::make_shared(8, 12, 14)); - EXPECT(assert_equal(expected, shortcut)); - } - - SymbolicBayesNet::shared_ptr actualJoint; - - // Check joint P(8,2) - if (false) { // TODO, not disjoint - actualJoint = bayesTree.jointBayesNet(8, 2, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(8)); - expected.push_front(boost::make_shared(2, 8)); - EXPECT(assert_equal(expected, *actualJoint)); - } - - // Check joint P(1,2) - if (false) { // TODO, not disjoint - actualJoint = bayesTree.jointBayesNet(1, 2, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(2)); - expected.push_front(boost::make_shared(1, 2)); - EXPECT(assert_equal(expected, *actualJoint)); - } - - // Check joint P(2,6) - if (true) { - actualJoint = bayesTree.jointBayesNet(2, 6, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(6)); - expected.push_front(boost::make_shared(2, 6)); - EXPECT(assert_equal(expected, *actualJoint)); - } - - // Check joint P(4,6) - if (false) { // TODO, not disjoint - actualJoint = bayesTree.jointBayesNet(4, 6, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(6)); - expected.push_front(boost::make_shared(4, 6)); - EXPECT(assert_equal(expected, *actualJoint)); - } -} - -/* ************************************************************************* * - Bayes tree for smoother with "natural" ordering: - C1 5 6 - C2 4 : 5 - C3 3 : 4 - C4 2 : 3 - C5 1 : 2 - C6 0 : 1 - **************************************************************************** */ - -TEST_UNSAFE( SymbolicBayesTree, linear_smoother_shortcuts ) { - // Create smoother with 7 nodes - SymbolicFactorGraph smoother; - smoother.push_factor(0); - smoother.push_factor(0, 1); - smoother.push_factor(1, 2); - smoother.push_factor(2, 3); - smoother.push_factor(3, 4); - smoother.push_factor(4, 5); - smoother.push_factor(5, 6); - - BayesNet bayesNet = - *SymbolicSequentialSolver(smoother).eliminate(); - - if (debug) { - GTSAM_PRINT(bayesNet); - bayesNet.saveGraph("/tmp/symbolicBayesNet.dot"); - } - - // create a BayesTree out of a Bayes net - SymbolicBayesTree bayesTree(bayesNet); - if (debug) { - GTSAM_PRINT(bayesTree); - bayesTree.saveGraph("/tmp/symbolicBayesTree.dot"); - } - - SymbolicBayesTree::Clique::shared_ptr R = bayesTree.root(); - - { - // check shortcut P(S2||R) to root - SymbolicBayesTree::Clique::shared_ptr c = bayesTree[4]; // 4 is frontal in C2 - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - EXPECT(assert_equal(expected, shortcut)); - } - - { - // check shortcut P(S3||R) to root - SymbolicBayesTree::Clique::shared_ptr c = bayesTree[3]; // 3 is frontal in C3 - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(4, 5)); - EXPECT(assert_equal(expected, shortcut)); - } - - { - // check shortcut P(S4||R) to root - SymbolicBayesTree::Clique::shared_ptr c = bayesTree[2]; // 2 is frontal in C4 - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(3, 5)); - EXPECT(assert_equal(expected, shortcut)); - } -} - -/* ************************************************************************* */ -// from testSymbolicJunctionTree, which failed at one point -TEST(SymbolicBayesTree, complicatedMarginal) { - - // Create the conditionals to go in the BayesTree - list L; - L = list_of(1)(2)(5); - IndexConditional::shared_ptr R_1_2(new IndexConditional(L, 2)); - L = list_of(3)(4)(6); - IndexConditional::shared_ptr R_3_4(new IndexConditional(L, 2)); - L = list_of(5)(6)(7)(8); - IndexConditional::shared_ptr R_5_6(new IndexConditional(L, 2)); - L = list_of(7)(8)(11); - IndexConditional::shared_ptr R_7_8(new IndexConditional(L, 2)); - L = list_of(9)(10)(11)(12); - IndexConditional::shared_ptr R_9_10(new IndexConditional(L, 2)); - L = list_of(11)(12); - IndexConditional::shared_ptr R_11_12(new IndexConditional(L, 2)); - - // Symbolic Bayes Tree - typedef SymbolicBayesTree::Clique Clique; - typedef SymbolicBayesTree::sharedClique sharedClique; - - // Create Bayes Tree - SymbolicBayesTree bt; - bt.insert(sharedClique(new Clique(R_11_12))); - bt.insert(sharedClique(new Clique(R_9_10))); - bt.insert(sharedClique(new Clique(R_7_8))); - bt.insert(sharedClique(new Clique(R_5_6))); - bt.insert(sharedClique(new Clique(R_3_4))); - bt.insert(sharedClique(new Clique(R_1_2))); - if (debug) { - GTSAM_PRINT(bt); - bt.saveGraph("/tmp/symbolicBayesTree.dot"); - } - - SymbolicBayesTree::Clique::shared_ptr R = bt.root(); - SymbolicBayesNet empty; - - // Shortcut on 9 - { - SymbolicBayesTree::Clique::shared_ptr c = bt[9]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - EXPECT(assert_equal(empty, shortcut)); - } - - // Shortcut on 7 - { - SymbolicBayesTree::Clique::shared_ptr c = bt[7]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - EXPECT(assert_equal(empty, shortcut)); - } - - // Shortcut on 5 - { - SymbolicBayesTree::Clique::shared_ptr c = bt[5]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(8, 11)); - expected.push_front(boost::make_shared(7, 8, 11)); - EXPECT(assert_equal(expected, shortcut)); - } - - // Shortcut on 3 - { - SymbolicBayesTree::Clique::shared_ptr c = bt[3]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(6, 11)); - EXPECT(assert_equal(expected, shortcut)); - } - - // Shortcut on 1 - { - SymbolicBayesTree::Clique::shared_ptr c = bt[1]; - SymbolicBayesNet shortcut = c->shortcut(R, EliminateSymbolic); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(5, 11)); - EXPECT(assert_equal(expected, shortcut)); - } - - // Marginal on 5 - { - IndexFactor::shared_ptr actual = bt.marginalFactor(5, EliminateSymbolic); - EXPECT(assert_equal(IndexFactor(5), *actual, 1e-1)); - } - - // Shortcut on 6 - { - IndexFactor::shared_ptr actual = bt.marginalFactor(6, EliminateSymbolic); - EXPECT(assert_equal(IndexFactor(6), *actual, 1e-1)); - } - -} -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ - diff --git a/gtsam/inference/tests/testSymbolicFactor.cpp b/gtsam/inference/tests/testSymbolicFactor.cpp deleted file mode 100644 index a474c09a1..000000000 --- a/gtsam/inference/tests/testSymbolicFactor.cpp +++ /dev/null @@ -1,113 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testSymbolicFactor.cpp - * @brief Unit tests for a symbolic IndexFactor - * @author Frank Dellaert - */ - -#include -#include -#include -#include -#include - -#include -#include - -using namespace std; -using namespace gtsam; -using namespace boost::assign; - -/* ************************************************************************* */ -TEST(SymbolicFactor, constructor) { - - // Frontals sorted, parents not sorted - vector keys1; keys1 += 3, 4, 5, 9, 7, 8; - (void)IndexConditional(keys1, 3); - -// // Frontals not sorted -// vector keys2; keys2 += 3, 5, 4, 9, 7, 8; -// (void)IndexConditional::FromRange(keys2.begin(), keys2.end(), 3); - -// // Frontals not before parents -// vector keys3; keys3 += 3, 4, 5, 1, 7, 8; -// (void)IndexConditional::FromRange(keys3.begin(), keys3.end(), 3); -} - -/* ************************************************************************* */ -#ifdef TRACK_ELIMINATE -TEST(SymbolicFactor, eliminate) { - vector keys; keys += 2, 3, 4, 6, 7, 9, 10, 11; - IndexFactor actual(keys.begin(), keys.end()); - BayesNet fragment = *actual.eliminate(3); - - IndexFactor expected(keys.begin()+3, keys.end()); - IndexConditional::shared_ptr expected0 = IndexConditional::FromRange(keys.begin(), keys.end(), 1); - IndexConditional::shared_ptr expected1 = IndexConditional::FromRange(keys.begin()+1, keys.end(), 1); - IndexConditional::shared_ptr expected2 = IndexConditional::FromRange(keys.begin()+2, keys.end(), 1); - - CHECK(assert_equal(fragment.size(), size_t(3))); - CHECK(assert_equal(expected, actual)); - BayesNet::const_iterator fragmentCond = fragment.begin(); - CHECK(assert_equal(**fragmentCond++, *expected0)); - CHECK(assert_equal(**fragmentCond++, *expected1)); - CHECK(assert_equal(**fragmentCond++, *expected2)); -} -#endif -/* ************************************************************************* */ -TEST(SymbolicFactor, EliminateSymbolic) { - SymbolicFactorGraph factors; - factors.push_factor(2,4,6); - factors.push_factor(1,2,5); - factors.push_factor(0,3); - - IndexFactor expectedFactor(4,5,6); - std::vector keys; keys += 0,1,2,3,4,5,6; - IndexConditional::shared_ptr expectedConditional(new IndexConditional(keys, 4)); - - IndexFactor::shared_ptr actualFactor; - IndexConditional::shared_ptr actualConditional; - boost::tie(actualConditional, actualFactor) = EliminateSymbolic(factors, 4); - - CHECK(assert_equal(*expectedConditional, *actualConditional)); - CHECK(assert_equal(expectedFactor, *actualFactor)); - -// BayesNet expected_bn; -// vector parents; -// -// parents.clear(); parents += 1,2,3,4,5,6; -// expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(0, parents))); -// -// parents.clear(); parents += 2,3,4,5,6; -// expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(1, parents))); -// -// parents.clear(); parents += 3,4,5,6; -// expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(2, parents))); -// -// parents.clear(); parents += 4,5,6; -// expected_bn.push_back(IndexConditional::shared_ptr(new IndexConditional(3, parents))); -// -// BayesNet::shared_ptr actual_bn; -// IndexFactor::shared_ptr actual_factor; -// boost::tie(actual_bn, actual_factor) = EliminateSymbolic(factors, 4); -// -// CHECK(assert_equal(expected_bn, *actual_bn)); -// CHECK(assert_equal(expected_factor, *actual_factor)); -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testSymbolicFactorGraph.cpp b/gtsam/inference/tests/testSymbolicFactorGraph.cpp deleted file mode 100644 index 35c50eb9e..000000000 --- a/gtsam/inference/tests/testSymbolicFactorGraph.cpp +++ /dev/null @@ -1,107 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testSymbolicFactorGraph.cpp - * @brief Unit tests for a symbolic IndexFactor Graph - * @author Frank Dellaert - */ - -#include // for operator += -using namespace boost::assign; - -#include - -#include -#include -#include -#include -#include - -using namespace std; -using namespace gtsam; - -static const Index vx2 = 0; -static const Index vx1 = 1; -static const Index vl1 = 2; - -///* ************************************************************************* */ -//TEST( SymbolicFactorGraph, EliminateOne ) -//{ -// // create a test graph -// SymbolicFactorGraph fg; -// fg.push_factor(vx2, vx1); -// -// SymbolicSequentialSolver::EliminateUntil(fg, vx2+1); -// SymbolicFactorGraph expected; -// expected.push_back(boost::shared_ptr()); -// expected.push_factor(vx1); -// -// CHECK(assert_equal(expected, fg)); -//} - -/* ************************************************************************* */ -TEST( SymbolicFactorGraph, constructFromBayesNet ) -{ - // create expected factor graph - SymbolicFactorGraph expected; - expected.push_factor(vx2, vx1, vl1); - expected.push_factor(vx1, vl1); - expected.push_factor(vx1); - - // create Bayes Net - IndexConditional::shared_ptr x2(new IndexConditional(vx2, vx1, vl1)); - IndexConditional::shared_ptr l1(new IndexConditional(vx1, vl1)); - IndexConditional::shared_ptr x1(new IndexConditional(vx1)); - - BayesNet bayesNet; - bayesNet.push_back(x2); - bayesNet.push_back(l1); - bayesNet.push_back(x1); - - // create actual factor graph from a Bayes Net - SymbolicFactorGraph actual(bayesNet); - - CHECK(assert_equal((SymbolicFactorGraph)expected,actual)); -} - -/* ************************************************************************* */ -TEST( SymbolicFactorGraph, push_back ) -{ - // Create two factor graphs and expected combined graph - SymbolicFactorGraph fg1, fg2, expected; - - fg1.push_factor(vx1); - fg1.push_factor(vx2, vx1); - - fg2.push_factor(vx1, vl1); - fg2.push_factor(vx2, vl1); - - expected.push_factor(vx1); - expected.push_factor(vx2, vx1); - expected.push_factor(vx1, vl1); - expected.push_factor(vx2, vl1); - - // combine - SymbolicFactorGraph actual = combine(fg1, fg2); - CHECK(assert_equal(expected, actual)); - - // combine using push_back - fg1.push_back(fg2); - CHECK(assert_equal(expected, fg1)); -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testSymbolicSequentialSolver.cpp b/gtsam/inference/tests/testSymbolicSequentialSolver.cpp deleted file mode 100644 index c95bbdab5..000000000 --- a/gtsam/inference/tests/testSymbolicSequentialSolver.cpp +++ /dev/null @@ -1,130 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testSymbolicSequentialSolver.cpp - * @brief Unit tests for a symbolic sequential solver routines - * @author Frank Dellaert - * @date Sept 16, 2012 - */ - -#include - -#include - -#include // for operator += -using namespace boost::assign; - -using namespace std; -using namespace gtsam; - -/* ************************************************************************* */ - -TEST( SymbolicSequentialSolver, SymbolicSequentialSolver ) { - // create factor graph - SymbolicFactorGraph g; - g.push_factor(2, 1, 0); - g.push_factor(2, 0); - g.push_factor(2); - // test solver is Testable - SymbolicSequentialSolver solver(g); -// GTSAM_PRINT(solver); - EXPECT(assert_equal(solver,solver)); -} - -/* ************************************************************************* */ - -TEST( SymbolicSequentialSolver, inference ) { - // Create factor graph - SymbolicFactorGraph fg; - fg.push_factor(0, 1); - fg.push_factor(0, 2); - fg.push_factor(1, 4); - fg.push_factor(2, 4); - fg.push_factor(3, 4); - - // eliminate - SymbolicSequentialSolver solver(fg); - SymbolicBayesNet::shared_ptr actual = solver.eliminate(); - SymbolicBayesNet expected; - expected.push_front(boost::make_shared(4)); - expected.push_front(boost::make_shared(3, 4)); - expected.push_front(boost::make_shared(2, 4)); - expected.push_front(boost::make_shared(1, 2, 4)); - expected.push_front(boost::make_shared(0, 1, 2)); - EXPECT(assert_equal(expected,*actual)); - - { - // jointBayesNet - vector js; - js.push_back(0); - js.push_back(4); - js.push_back(3); - SymbolicBayesNet::shared_ptr actualBN = solver.jointBayesNet(js); - SymbolicBayesNet expectedBN; - expectedBN.push_front(boost::make_shared(3)); - expectedBN.push_front(boost::make_shared(4, 3)); - expectedBN.push_front(boost::make_shared(0, 4)); - EXPECT( assert_equal(expectedBN,*actualBN)); - - // jointFactorGraph - SymbolicFactorGraph::shared_ptr actualFG = solver.jointFactorGraph(js); - SymbolicFactorGraph expectedFG; - expectedFG.push_factor(0, 4); - expectedFG.push_factor(4, 3); - expectedFG.push_factor(3); - EXPECT( assert_equal(expectedFG,(SymbolicFactorGraph)(*actualFG))); - } - - { - // jointBayesNet - vector js; - js.push_back(0); - js.push_back(2); - js.push_back(3); - SymbolicBayesNet::shared_ptr actualBN = solver.jointBayesNet(js); - SymbolicBayesNet expectedBN; - expectedBN.push_front(boost::make_shared(2)); - expectedBN.push_front(boost::make_shared(3, 2)); - expectedBN.push_front(boost::make_shared(0, 3, 2)); - EXPECT( assert_equal(expectedBN,*actualBN)); - - // jointFactorGraph - SymbolicFactorGraph::shared_ptr actualFG = solver.jointFactorGraph(js); - SymbolicFactorGraph expectedFG; - expectedFG.push_factor(0, 3, 2); - expectedFG.push_factor(3, 2); - expectedFG.push_factor(2); - EXPECT( assert_equal(expectedFG,(SymbolicFactorGraph)(*actualFG))); - } - - { - // conditionalBayesNet - vector js; - js.push_back(0); - js.push_back(2); - js.push_back(3); - size_t nrFrontals = 2; - SymbolicBayesNet::shared_ptr actualBN = // - solver.conditionalBayesNet(js, nrFrontals); - SymbolicBayesNet expectedBN; - expectedBN.push_front(boost::make_shared(2, 3)); - expectedBN.push_front(boost::make_shared(0, 2, 3)); - EXPECT( assert_equal(expectedBN,*actualBN)); - } -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testVariableSlots.cpp b/gtsam/inference/tests/testVariableSlots.cpp index 1dd422908..f61b49bdd 100644 --- a/gtsam/inference/tests/testVariableSlots.cpp +++ b/gtsam/inference/tests/testVariableSlots.cpp @@ -20,7 +20,7 @@ #include #include -#include +#include #include diff --git a/gtsam/linear/Errors.cpp b/gtsam/linear/Errors.cpp index 1aae91869..c0740f756 100644 --- a/gtsam/linear/Errors.cpp +++ b/gtsam/linear/Errors.cpp @@ -18,7 +18,9 @@ */ #include +#include #include +#include using namespace std; @@ -28,11 +30,9 @@ namespace gtsam { Errors::Errors(){} /* ************************************************************************* */ -Errors::Errors(const VectorValues &V) { - this->resize(V.size()) ; - int i = 0 ; - BOOST_FOREACH( Vector &e, *this) { - e = V[i++]; +Errors::Errors(const VectorValues& V) { + BOOST_FOREACH(const Vector& e, V | boost::adaptors::map_values) { + push_back(e); } } diff --git a/gtsam/linear/Errors.h b/gtsam/linear/Errors.h index f3ccffb14..13da360a5 100644 --- a/gtsam/linear/Errors.h +++ b/gtsam/linear/Errors.h @@ -19,19 +19,25 @@ #pragma once -#include +#include +#include + +#include namespace gtsam { - + + // Forward declarations + class VectorValues; + /** vector of errors */ - class Errors : public std::list { + class Errors : public FastList { public: GTSAM_EXPORT Errors() ; - /** break V into pieces according to its start indices */ - GTSAM_EXPORT Errors(const VectorValues &V) ; + /** break V into pieces according to its start indices */ + GTSAM_EXPORT Errors(const VectorValues&V); /** print */ GTSAM_EXPORT void print(const std::string& s = "Errors") const; @@ -51,13 +57,13 @@ namespace gtsam { }; // Errors /** - * dot product - */ + * dot product + */ GTSAM_EXPORT double dot(const Errors& a, const Errors& b); /** - * BLAS level 2 style - */ + * BLAS level 2 style + */ template <> GTSAM_EXPORT void axpy(double alpha, const Errors& x, Errors& y); diff --git a/gtsam/linear/GaussianBayesNet.cpp b/gtsam/linear/GaussianBayesNet.cpp index 8e1520235..8b3dc0084 100644 --- a/gtsam/linear/GaussianBayesNet.cpp +++ b/gtsam/linear/GaussianBayesNet.cpp @@ -17,239 +17,160 @@ #include #include -#include -#include +#include +#include #include -#include -#include - -#include using namespace std; using namespace gtsam; -// trick from some reading group #define FOREACH_PAIR( KEY, VAL, COL) BOOST_FOREACH (boost::tie(KEY,VAL),COL) #define REVERSE_FOREACH_PAIR( KEY, VAL, COL) BOOST_REVERSE_FOREACH (boost::tie(KEY,VAL),COL) namespace gtsam { -// Explicitly instantiate so we don't have to include everywhere -template class BayesNet; + // Instantiate base class + template class FactorGraph; -/* ************************************************************************* */ -GaussianBayesNet scalarGaussian(Index key, double mu, double sigma) { - GaussianBayesNet bn; - GaussianConditional::shared_ptr - conditional(new GaussianConditional(key, Vector_(1,mu)/sigma, eye(1)/sigma, ones(1))); - bn.push_back(conditional); - return bn; -} - -/* ************************************************************************* */ -GaussianBayesNet simpleGaussian(Index key, const Vector& mu, double sigma) { - GaussianBayesNet bn; - size_t n = mu.size(); - GaussianConditional::shared_ptr - conditional(new GaussianConditional(key, mu/sigma, eye(n)/sigma, ones(n))); - bn.push_back(conditional); - return bn; -} - -/* ************************************************************************* */ -void push_front(GaussianBayesNet& bn, Index key, Vector d, Matrix R, - Index name1, Matrix S, Vector sigmas) { - GaussianConditional::shared_ptr cg(new GaussianConditional(key, d, R, name1, S, sigmas)); - bn.push_front(cg); -} - -/* ************************************************************************* */ -void push_front(GaussianBayesNet& bn, Index key, Vector d, Matrix R, - Index name1, Matrix S, Index name2, Matrix T, Vector sigmas) { - GaussianConditional::shared_ptr cg(new GaussianConditional(key, d, R, name1, S, name2, T, sigmas)); - bn.push_front(cg); -} - -/* ************************************************************************* */ -boost::shared_ptr allocateVectorValues(const GaussianBayesNet& bn) { - vector dimensions(bn.size()); - Index var = 0; - BOOST_FOREACH(const boost::shared_ptr conditional, bn) { - dimensions[var++] = conditional->dim(); - } - return boost::shared_ptr(new VectorValues(dimensions)); -} - -/* ************************************************************************* */ -VectorValues optimize(const GaussianBayesNet& bn) { - VectorValues x = *allocateVectorValues(bn); - optimizeInPlace(bn, x); - return x; -} - -/* ************************************************************************* */ -// (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas) -void optimizeInPlace(const GaussianBayesNet& bn, VectorValues& x) { - /** solve each node in turn in topological sort order (parents first)*/ - BOOST_REVERSE_FOREACH(const boost::shared_ptr cg, bn) { - // i^th part of R*x=y, x=inv(R)*y - // (Rii*xi + R_i*x(i+1:))./si = yi <-> xi = inv(Rii)*(yi.*si - R_i*x(i+1:)) - cg->solveInPlace(x); - } -} - -/* ************************************************************************* */ -VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& input) { - VectorValues output = input; - BOOST_REVERSE_FOREACH(const boost::shared_ptr cg, bn) { - const Index key = *(cg->beginFrontals()); - Vector xS = internal::extractVectorValuesSlices(output, cg->beginParents(), cg->endParents()); - xS = input[key] - cg->get_S() * xS; - output[key] = cg->get_R().triangularView().solve(xS); + /* ************************************************************************* */ + bool GaussianBayesNet::equals(const This& bn, double tol) const + { + return Base::equals(bn, tol); } - BOOST_FOREACH(const boost::shared_ptr cg, bn) { - cg->scaleFrontalsBySigma(output); - } - - return output; -} - - -/* ************************************************************************* */ -// gy=inv(L)*gx by solving L*gy=gx. -// gy=inv(R'*inv(Sigma))*gx -// gz'*R'=gx', gy = gz.*sigmas -VectorValues backSubstituteTranspose(const GaussianBayesNet& bn, - const VectorValues& gx) { - - // Initialize gy from gx - // TODO: used to insert zeros if gx did not have an entry for a variable in bn - VectorValues gy = gx; - - // we loop from first-eliminated to last-eliminated - // i^th part of L*gy=gx is done block-column by block-column of L - BOOST_FOREACH(const boost::shared_ptr cg, bn) - cg->solveTransposeInPlace(gy); - - // Scale gy - BOOST_FOREACH(GaussianConditional::shared_ptr cg, bn) - cg->scaleFrontalsBySigma(gy); - - return gy; -} - -/* ************************************************************************* */ -VectorValues optimizeGradientSearch(const GaussianBayesNet& Rd) { - gttic(Allocate_VectorValues); - VectorValues grad = *allocateVectorValues(Rd); - gttoc(Allocate_VectorValues); - - optimizeGradientSearchInPlace(Rd, grad); - - return grad; -} - -/* ************************************************************************* */ -void optimizeGradientSearchInPlace(const GaussianBayesNet& Rd, VectorValues& grad) { - gttic(Compute_Gradient); - // Compute gradient (call gradientAtZero function, which is defined for various linear systems) - gradientAtZero(Rd, grad); - double gradientSqNorm = grad.dot(grad); - gttoc(Compute_Gradient); - - gttic(Compute_Rg); - // Compute R * g - FactorGraph Rd_jfg(Rd); - Errors Rg = Rd_jfg * grad; - gttoc(Compute_Rg); - - gttic(Compute_minimizing_step_size); - // Compute minimizing step size - double step = -gradientSqNorm / dot(Rg, Rg); - gttoc(Compute_minimizing_step_size); - - gttic(Compute_point); - // Compute steepest descent point - scal(step, grad); - gttoc(Compute_point); -} - -/* ************************************************************************* */ -pair matrix(const GaussianBayesNet& bn) { - - // add the dimensions of all variables to get matrix dimension - // and at the same time create a mapping from keys to indices - size_t N=0; map mapping; - BOOST_FOREACH(GaussianConditional::shared_ptr cg,bn) { - GaussianConditional::const_iterator it = cg->beginFrontals(); - for (; it != cg->endFrontals(); ++it) { - mapping.insert(make_pair(*it,N)); - N += cg->dim(it); + /* ************************************************************************* */ + VectorValues GaussianBayesNet::optimize() const + { + VectorValues soln; + // (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas) + /** solve each node in turn in topological sort order (parents first)*/ + BOOST_REVERSE_FOREACH(const sharedConditional& cg, *this) { + // i^th part of R*x=y, x=inv(R)*y + // (Rii*xi + R_i*x(i+1:))./si = yi <-> xi = inv(Rii)*(yi.*si - R_i*x(i+1:)) + soln.insert(cg->solve(soln)); } + return soln; } - // create matrix and copy in values - Matrix R = zeros(N,N); - Vector d(N); - Index key; size_t I; - FOREACH_PAIR(key,I,mapping) { - // find corresponding conditional - boost::shared_ptr cg = bn[key]; - - // get sigmas - Vector sigmas = cg->get_sigmas(); - - // get RHS and copy to d - GaussianConditional::const_d_type d_ = cg->get_d(); - const size_t n = d_.size(); - for (size_t i=0;iget_R(); - for (size_t i=0;ibeginParents(); - for (; keyS!=cg->endParents(); keyS++) { - Matrix S = cg->get_S(keyS); // get S matrix - const size_t m = S.rows(), n = S.cols(); // find S size - const size_t J = mapping[*keyS]; // find column index - for (size_t i=0;i cg, bayesNet){ - logDet += cg->get_R().diagonal().unaryExpr(ptr_fun(log)).sum(); + /* ************************************************************************* */ + VectorValues GaussianBayesNet::optimizeGradientSearch() const + { + gttic(GaussianBayesTree_optimizeGradientSearch); + return GaussianFactorGraph(*this).optimizeGradientSearch(); } - return exp(logDet); -} + /* ************************************************************************* */ + VectorValues GaussianBayesNet::gradient(const VectorValues& x0) const { + return GaussianFactorGraph(*this).gradient(x0); + } -/* ************************************************************************* */ -VectorValues gradient(const GaussianBayesNet& bayesNet, const VectorValues& x0) { - return gradient(GaussianFactorGraph(bayesNet), x0); -} + /* ************************************************************************* */ + VectorValues GaussianBayesNet::gradientAtZero() const { + return GaussianFactorGraph(*this).gradientAtZero(); + } -/* ************************************************************************* */ -void gradientAtZero(const GaussianBayesNet& bayesNet, VectorValues& g) { - gradientAtZero(GaussianFactorGraph(bayesNet), g); -} + /* ************************************************************************* */ + double GaussianBayesNet::error(const VectorValues& x) const { + return GaussianFactorGraph(*this).error(x); + } -/* ************************************************************************* */ + /* ************************************************************************* */ + VectorValues GaussianBayesNet::backSubstitute(const VectorValues& rhs) const + { + VectorValues result; + BOOST_REVERSE_FOREACH(const sharedConditional& cg, *this) { + result.insert(cg->solveOtherRHS(result, rhs)); + } + return result; + } + + + /* ************************************************************************* */ + // gy=inv(L)*gx by solving L*gy=gx. + // gy=inv(R'*inv(Sigma))*gx + // gz'*R'=gx', gy = gz.*sigmas + VectorValues GaussianBayesNet::backSubstituteTranspose(const VectorValues& gx) const + { + // Initialize gy from gx + // TODO: used to insert zeros if gx did not have an entry for a variable in bn + VectorValues gy = gx; + + // we loop from first-eliminated to last-eliminated + // i^th part of L*gy=gx is done block-column by block-column of L + BOOST_FOREACH(const sharedConditional& cg, *this) + cg->solveTransposeInPlace(gy); + + return gy; + } + + ///* ************************************************************************* */ + //VectorValues GaussianBayesNet::optimizeGradientSearch() const + //{ + // gttic(Compute_Gradient); + // // Compute gradient (call gradientAtZero function, which is defined for various linear systems) + // VectorValues grad = gradientAtZero(); + // double gradientSqNorm = grad.dot(grad); + // gttoc(Compute_Gradient); + + // gttic(Compute_Rg); + // // Compute R * g + // Errors Rg = GaussianFactorGraph(*this) * grad; + // gttoc(Compute_Rg); + + // gttic(Compute_minimizing_step_size); + // // Compute minimizing step size + // double step = -gradientSqNorm / dot(Rg, Rg); + // gttoc(Compute_minimizing_step_size); + + // gttic(Compute_point); + // // Compute steepest descent point + // scal(step, grad); + // gttoc(Compute_point); + + // return grad; + //} + + /* ************************************************************************* */ + pair GaussianBayesNet::matrix() const + { + return GaussianFactorGraph(*this).jacobian(); + } + + ///* ************************************************************************* */ + //VectorValues GaussianBayesNet::gradient(const VectorValues& x0) const + //{ + // return GaussianFactorGraph(*this).gradient(x0); + //} + + ///* ************************************************************************* */ + //VectorValues GaussianBayesNet::gradientAtZero() const + //{ + // return GaussianFactorGraph(*this).gradientAtZero(); + //} + + /* ************************************************************************* */ + double GaussianBayesNet::determinant() const + { + return exp(logDeterminant()); + } + + /* ************************************************************************* */ + double GaussianBayesNet::logDeterminant() const + { + double logDet = 0.0; + BOOST_FOREACH(const sharedConditional& cg, *this) { + if(cg->get_model()) { + Vector diag = cg->get_R().diagonal(); + cg->get_model()->whitenInPlace(diag); + logDet += diag.unaryExpr(ptr_fun(log)).sum(); + } else { + logDet += cg->get_R().diagonal().unaryExpr(ptr_fun(log)).sum(); + } + } + return logDet; + } + + /* ************************************************************************* */ } // namespace gtsam diff --git a/gtsam/linear/GaussianBayesNet.h b/gtsam/linear/GaussianBayesNet.h index 3052e3926..26b187369 100644 --- a/gtsam/linear/GaussianBayesNet.h +++ b/gtsam/linear/GaussianBayesNet.h @@ -20,142 +20,155 @@ #pragma once -#include #include -#include +#include +#include namespace gtsam { /** A Bayes net made from linear-Gaussian densities */ - typedef BayesNet GaussianBayesNet; + class GTSAM_EXPORT GaussianBayesNet: public FactorGraph + { + public: - /** Create a scalar Gaussian */ - GTSAM_EXPORT GaussianBayesNet scalarGaussian(Index key, double mu=0.0, double sigma=1.0); + typedef FactorGraph Base; + typedef GaussianBayesNet This; + typedef GaussianConditional ConditionalType; + typedef boost::shared_ptr shared_ptr; + typedef boost::shared_ptr sharedConditional; - /** Create a simple Gaussian on a single multivariate variable */ - GTSAM_EXPORT GaussianBayesNet simpleGaussian(Index key, const Vector& mu, double sigma=1.0); + /// @name Standard Constructors + /// @{ - /** - * Add a conditional node with one parent - * |Rx+Sy-d| - */ - GTSAM_EXPORT void push_front(GaussianBayesNet& bn, Index key, Vector d, Matrix R, - Index name1, Matrix S, Vector sigmas); + /** Construct empty factor graph */ + GaussianBayesNet() {} - /** - * Add a conditional node with two parents - * |Rx+Sy+Tz-d| - */ - GTSAM_EXPORT void push_front(GaussianBayesNet& bn, Index key, Vector d, Matrix R, - Index name1, Matrix S, Index name2, Matrix T, Vector sigmas); + /** Construct from iterator over conditionals */ + template + GaussianBayesNet(ITERATOR firstConditional, ITERATOR lastConditional) : Base(firstConditional, lastConditional) {} - /** - * Allocate a VectorValues for the variables in a BayesNet - */ - GTSAM_EXPORT boost::shared_ptr allocateVectorValues(const GaussianBayesNet& bn); + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit GaussianBayesNet(const CONTAINER& conditionals) : Base(conditionals) {} - /** - * Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, computed by - * back-substitution. - */ - GTSAM_EXPORT VectorValues optimize(const GaussianBayesNet& bn); + /** Implicit copy/downcast constructor to override explicit template container constructor */ + template + GaussianBayesNet(const FactorGraph& graph) : Base(graph) {} - /** - * Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, computed by - * back-substitution, writes the solution \f$ x \f$ into a pre-allocated - * VectorValues. You can use allocateVectorValues(const GaussianBayesNet&) - * allocate it. See also optimize(const GaussianBayesNet&), which does not - * require pre-allocation. - */ - GTSAM_EXPORT void optimizeInPlace(const GaussianBayesNet& bn, VectorValues& x); + /// @} - /** - * Optimize along the gradient direction, with a closed-form computation to - * perform the line search. The gradient is computed about \f$ \delta x=0 \f$. - * - * This function returns \f$ \delta x \f$ that minimizes a reparametrized - * problem. The error function of a GaussianBayesNet is - * - * \f[ f(\delta x) = \frac{1}{2} |R \delta x - d|^2 = \frac{1}{2}d^T d - d^T R \delta x + \frac{1}{2} \delta x^T R^T R \delta x \f] - * - * with gradient and Hessian - * - * \f[ g(\delta x) = R^T(R\delta x - d), \qquad G(\delta x) = R^T R. \f] - * - * This function performs the line search in the direction of the - * gradient evaluated at \f$ g = g(\delta x = 0) \f$ with step size - * \f$ \alpha \f$ that minimizes \f$ f(\delta x = \alpha g) \f$: - * - * \f[ f(\alpha) = \frac{1}{2} d^T d + g^T \delta x + \frac{1}{2} \alpha^2 g^T G g \f] - * - * Optimizing by setting the derivative to zero yields - * \f$ \hat \alpha = (-g^T g) / (g^T G g) \f$. For efficiency, this function - * evaluates the denominator without computing the Hessian \f$ G \f$, returning - * - * \f[ \delta x = \hat\alpha g = \frac{-g^T g}{(R g)^T(R g)} \f] - * - * @param bn The GaussianBayesNet on which to perform this computation - * @return The resulting \f$ \delta x \f$ as described above - */ - GTSAM_EXPORT VectorValues optimizeGradientSearch(const GaussianBayesNet& bn); + /// @name Testable + /// @{ - /** In-place version of optimizeGradientSearch(const GaussianBayesNet&) requiring pre-allocated VectorValues \c grad - * - * @param bn The GaussianBayesNet on which to perform this computation - * @param [out] grad The resulting \f$ \delta x \f$ as described in optimizeGradientSearch(const GaussianBayesNet&) - * */ - GTSAM_EXPORT void optimizeGradientSearchInPlace(const GaussianBayesNet& bn, VectorValues& grad); + /** Check equality */ + bool equals(const This& bn, double tol = 1e-9) const; - /** - * Backsubstitute - * gy=inv(R*inv(Sigma))*gx - */ - GTSAM_EXPORT VectorValues backSubstitute(const GaussianBayesNet& bn, const VectorValues& gx); + /// @} - /** - * Transpose Backsubstitute - * gy=inv(L)*gx by solving L*gy=gx. - * gy=inv(R'*inv(Sigma))*gx - * gz'*R'=gx', gy = gz.*sigmas - */ - GTSAM_EXPORT VectorValues backSubstituteTranspose(const GaussianBayesNet& bn, const VectorValues& gx); + /// @name Standard Interface + /// @{ - /** - * Return (dense) upper-triangular matrix representation - */ - GTSAM_EXPORT std::pair matrix(const GaussianBayesNet&); + /** + * Solve the GaussianBayesNet, i.e. return \f$ x = R^{-1}*d \f$, computed by + * back-substitution. + */ + VectorValues optimize() const; - /** - * Computes the determinant of a GassianBayesNet - * A GaussianBayesNet is an upper triangular matrix and for an upper triangular matrix - * determinant is the product of the diagonal elements. Instead of actually multiplying - * we add the logarithms of the diagonal elements and take the exponent at the end - * because this is more numerically stable. - * @param bayesNet The input GaussianBayesNet - * @return The determinant - */ - GTSAM_EXPORT double determinant(const GaussianBayesNet& bayesNet); + ///@} - /** - * Compute the gradient of the energy function, - * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, - * centered around \f$ x = x_0 \f$. - * The gradient is \f$ R^T(Rx-d) \f$. - * @param bayesNet The Gaussian Bayes net $(R,d)$ - * @param x0 The center about which to compute the gradient - * @return The gradient as a VectorValues - */ - GTSAM_EXPORT VectorValues gradient(const GaussianBayesNet& bayesNet, const VectorValues& x0); + ///@name Linear Algebra + ///@{ + + /** + * Return (dense) upper-triangular matrix representation + */ + std::pair matrix() const; - /** - * Compute the gradient of the energy function, - * \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, - * centered around zero. - * The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&). - * @param bayesNet The Gaussian Bayes net $(R,d)$ - * @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues - * @return The gradient as a VectorValues - */ - GTSAM_EXPORT void gradientAtZero(const GaussianBayesNet& bayesNet, VectorValues& g); + /** + * Optimize along the gradient direction, with a closed-form computation to perform the line + * search. The gradient is computed about \f$ \delta x=0 \f$. + * + * This function returns \f$ \delta x \f$ that minimizes a reparametrized problem. The error + * function of a GaussianBayesNet is + * + * \f[ f(\delta x) = \frac{1}{2} |R \delta x - d|^2 = \frac{1}{2}d^T d - d^T R \delta x + + * \frac{1}{2} \delta x^T R^T R \delta x \f] + * + * with gradient and Hessian + * + * \f[ g(\delta x) = R^T(R\delta x - d), \qquad G(\delta x) = R^T R. \f] + * + * This function performs the line search in the direction of the gradient evaluated at \f$ g = + * g(\delta x = 0) \f$ with step size \f$ \alpha \f$ that minimizes \f$ f(\delta x = \alpha g) + * \f$: + * + * \f[ f(\alpha) = \frac{1}{2} d^T d + g^T \delta x + \frac{1}{2} \alpha^2 g^T G g \f] + * + * Optimizing by setting the derivative to zero yields \f$ \hat \alpha = (-g^T g) / (g^T G g) + * \f$. For efficiency, this function evaluates the denominator without computing the Hessian + * \f$ G \f$, returning + * + * \f[ \delta x = \hat\alpha g = \frac{-g^T g}{(R g)^T(R g)} \f] */ + VectorValues optimizeGradientSearch() const; + + /** Compute the gradient of the energy function, \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - + * d \right\Vert^2 \f$, centered around \f$ x = x_0 \f$. The gradient is \f$ R^T(Rx-d) \f$. + * + * @param x0 The center about which to compute the gradient + * @return The gradient as a VectorValues */ + VectorValues gradient(const VectorValues& x0) const; + + /** Compute the gradient of the energy function, \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d + * \right\Vert^2 \f$, centered around zero. The gradient about zero is \f$ -R^T d \f$. See also + * gradient(const GaussianBayesNet&, const VectorValues&). + * + * @param [output] g A VectorValues to store the gradient, which must be preallocated, see + * allocateVectorValues */ + VectorValues gradientAtZero() const; + + /** Mahalanobis norm error. */ + double error(const VectorValues& x) const; + + /** + * Computes the determinant of a GassianBayesNet. A GaussianBayesNet is an upper triangular + * matrix and for an upper triangular matrix determinant is the product of the diagonal + * elements. Instead of actually multiplying we add the logarithms of the diagonal elements and + * take the exponent at the end because this is more numerically stable. + * @param bayesNet The input GaussianBayesNet + * @return The determinant */ + double determinant() const; + + /** + * Computes the log of the determinant of a GassianBayesNet. A GaussianBayesNet is an upper + * triangular matrix and for an upper triangular matrix determinant is the product of the + * diagonal elements. + * @param bayesNet The input GaussianBayesNet + * @return The determinant */ + double logDeterminant() const; + + /** + * Backsubstitute with a different RHS vector than the one stored in this BayesNet. + * gy=inv(R*inv(Sigma))*gx + */ + VectorValues backSubstitute(const VectorValues& gx) const; + + /** + * Transpose backsubstitute with a different RHS vector than the one stored in this BayesNet. + * gy=inv(L)*gx by solving L*gy=gx. + * gy=inv(R'*inv(Sigma))*gx + * gz'*R'=gx', gy = gz.*sigmas + */ + VectorValues backSubstituteTranspose(const VectorValues& gx) const; + + /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } + }; } /// namespace gtsam diff --git a/gtsam/linear/GaussianBayesTree.cpp b/gtsam/linear/GaussianBayesTree.cpp index 17d54635c..0ceecdfd2 100644 --- a/gtsam/linear/GaussianBayesTree.cpp +++ b/gtsam/linear/GaussianBayesTree.cpp @@ -17,82 +17,90 @@ * @author Richard Roberts */ +#include +#include +#include +#include #include -#include +#include +#include namespace gtsam { -/* ************************************************************************* */ -VectorValues optimize(const GaussianBayesTree& bayesTree) { - VectorValues result = *allocateVectorValues(bayesTree); - optimizeInPlace(bayesTree, result); - return result; -} + // Instantiate base class + template class BayesTreeCliqueBase; + template class BayesTree; -/* ************************************************************************* */ -void optimizeInPlace(const GaussianBayesTree& bayesTree, VectorValues& result) { - internal::optimizeInPlace(bayesTree.root(), result); -} + /* ************************************************************************* */ + namespace internal + { + /* ************************************************************************* */ + double logDeterminant(const GaussianBayesTreeClique::shared_ptr& clique, double& parentSum) + { + parentSum += clique->conditional()->get_R().diagonal().unaryExpr(std::ptr_fun(log)).sum(); + assert(false); + return 0; + } + } -/* ************************************************************************* */ -VectorValues optimizeGradientSearch(const GaussianBayesTree& bayesTree) { - gttic(Allocate_VectorValues); - VectorValues grad = *allocateVectorValues(bayesTree); - gttoc(Allocate_VectorValues); + /* ************************************************************************* */ + bool GaussianBayesTree::equals(const This& other, double tol) const + { + return Base::equals(other, tol); + } - optimizeGradientSearchInPlace(bayesTree, grad); + /* ************************************************************************* */ + VectorValues GaussianBayesTree::optimize() const + { + return internal::linearAlgorithms::optimizeBayesTree(*this); + } - return grad; -} + /* ************************************************************************* */ + VectorValues GaussianBayesTree::optimizeGradientSearch() const + { + gttic(GaussianBayesTree_optimizeGradientSearch); + return GaussianFactorGraph(*this).optimizeGradientSearch(); + } -/* ************************************************************************* */ -void optimizeGradientSearchInPlace(const GaussianBayesTree& bayesTree, VectorValues& grad) { - gttic(Compute_Gradient); - // Compute gradient (call gradientAtZero function, which is defined for various linear systems) - gradientAtZero(bayesTree, grad); - double gradientSqNorm = grad.dot(grad); - gttoc(Compute_Gradient); + /* ************************************************************************* */ + VectorValues GaussianBayesTree::gradient(const VectorValues& x0) const { + return GaussianFactorGraph(*this).gradient(x0); + } - gttic(Compute_Rg); - // Compute R * g - FactorGraph Rd_jfg(bayesTree); - Errors Rg = Rd_jfg * grad; - gttoc(Compute_Rg); + /* ************************************************************************* */ + VectorValues GaussianBayesTree::gradientAtZero() const { + return GaussianFactorGraph(*this).gradientAtZero(); + } - gttic(Compute_minimizing_step_size); - // Compute minimizing step size - double step = -gradientSqNorm / dot(Rg, Rg); - gttoc(Compute_minimizing_step_size); + /* ************************************************************************* */ + double GaussianBayesTree::error(const VectorValues& x) const { + return GaussianFactorGraph(*this).error(x); + } - gttic(Compute_point); - // Compute steepest descent point - scal(step, grad); - gttoc(Compute_point); -} + /* ************************************************************************* */ + double GaussianBayesTree::logDeterminant() const + { + if(this->roots_.empty()) { + return 0.0; + } else { + double sum = 0.0; + treeTraversal::DepthFirstForest(*this, sum, internal::logDeterminant); + return sum; + } + } -/* ************************************************************************* */ -VectorValues gradient(const GaussianBayesTree& bayesTree, const VectorValues& x0) { - return gradient(FactorGraph(bayesTree), x0); -} + /* ************************************************************************* */ + double GaussianBayesTree::determinant() const + { + return exp(logDeterminant()); + } -/* ************************************************************************* */ -void gradientAtZero(const GaussianBayesTree& bayesTree, VectorValues& g) { - gradientAtZero(FactorGraph(bayesTree), g); -} + /* ************************************************************************* */ + Matrix GaussianBayesTree::marginalCovariance(Key key) const + { + return marginalFactor(key)->information().inverse(); + } -/* ************************************************************************* */ -double determinant(const GaussianBayesTree& bayesTree) { - return exp(logDeterminant(bayesTree)); -} - -/* ************************************************************************* */ -double logDeterminant(const GaussianBayesTree& bayesTree) { - if (!bayesTree.root()) - return 0.0; - - return internal::logDeterminant(bayesTree.root()); -} -/* ************************************************************************* */ } // \namespace gtsam diff --git a/gtsam/linear/GaussianBayesTree.h b/gtsam/linear/GaussianBayesTree.h index 63411b610..86d88e95b 100644 --- a/gtsam/linear/GaussianBayesTree.h +++ b/gtsam/linear/GaussianBayesTree.h @@ -19,106 +19,114 @@ #pragma once +#include +#include #include -#include -#include +#include namespace gtsam { -/// A Bayes Tree representing a Gaussian density -GTSAM_EXPORT typedef BayesTree GaussianBayesTree; + // Forward declarations + class GaussianConditional; + class VectorValues; -/// optimize the BayesTree, starting from the root -GTSAM_EXPORT VectorValues optimize(const GaussianBayesTree& bayesTree); + /* ************************************************************************* */ + /** A clique in a GaussianBayesTree */ + class GTSAM_EXPORT GaussianBayesTreeClique : + public BayesTreeCliqueBase + { + public: + typedef GaussianBayesTreeClique This; + typedef BayesTreeCliqueBase Base; + typedef boost::shared_ptr shared_ptr; + typedef boost::weak_ptr weak_ptr; + GaussianBayesTreeClique() {} + GaussianBayesTreeClique(const boost::shared_ptr& conditional) : Base(conditional) {} + }; -/// recursively optimize this conditional and all subtrees -GTSAM_EXPORT void optimizeInPlace(const GaussianBayesTree& bayesTree, VectorValues& result); + /* ************************************************************************* */ + /** A Bayes tree representing a Gaussian density */ + class GTSAM_EXPORT GaussianBayesTree : + public BayesTree + { + private: + typedef BayesTree Base; -namespace internal { -template -void optimizeInPlace(const typename BAYESTREE::sharedClique& clique, VectorValues& result); -} + public: + typedef GaussianBayesTree This; + typedef boost::shared_ptr shared_ptr; -/** - * Optimize along the gradient direction, with a closed-form computation to - * perform the line search. The gradient is computed about \f$ \delta x=0 \f$. - * - * This function returns \f$ \delta x \f$ that minimizes a reparametrized - * problem. The error function of a GaussianBayesNet is - * - * \f[ f(\delta x) = \frac{1}{2} |R \delta x - d|^2 = \frac{1}{2}d^T d - d^T R \delta x + \frac{1}{2} \delta x^T R^T R \delta x \f] - * - * with gradient and Hessian - * - * \f[ g(\delta x) = R^T(R\delta x - d), \qquad G(\delta x) = R^T R. \f] - * - * This function performs the line search in the direction of the - * gradient evaluated at \f$ g = g(\delta x = 0) \f$ with step size - * \f$ \alpha \f$ that minimizes \f$ f(\delta x = \alpha g) \f$: - * - * \f[ f(\alpha) = \frac{1}{2} d^T d + g^T \delta x + \frac{1}{2} \alpha^2 g^T G g \f] - * - * Optimizing by setting the derivative to zero yields - * \f$ \hat \alpha = (-g^T g) / (g^T G g) \f$. For efficiency, this function - * evaluates the denominator without computing the Hessian \f$ G \f$, returning - * - * \f[ \delta x = \hat\alpha g = \frac{-g^T g}{(R g)^T(R g)} \f] - */ -GTSAM_EXPORT VectorValues optimizeGradientSearch(const GaussianBayesTree& bayesTree); + /** Default constructor, creates an empty Bayes tree */ + GaussianBayesTree() {} -/** In-place version of optimizeGradientSearch requiring pre-allocated VectorValues \c x */ -GTSAM_EXPORT void optimizeGradientSearchInPlace(const GaussianBayesTree& bayesTree, VectorValues& grad); + /** Check equality */ + bool equals(const This& other, double tol = 1e-9) const; -/** - * Compute the gradient of the energy function, - * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, - * centered around \f$ x = x_0 \f$. - * The gradient is \f$ R^T(Rx-d) \f$. - * @param bayesTree The Gaussian Bayes Tree $(R,d)$ - * @param x0 The center about which to compute the gradient - * @return The gradient as a VectorValues - */ -GTSAM_EXPORT VectorValues gradient(const GaussianBayesTree& bayesTree, const VectorValues& x0); + /** Recursively optimize the BayesTree to produce a vector solution. */ + VectorValues optimize() const; -/** - * Compute the gradient of the energy function, - * \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d \right\Vert^2 \f$, - * centered around zero. - * The gradient about zero is \f$ -R^T d \f$. See also gradient(const GaussianBayesNet&, const VectorValues&). - * @param bayesTree The Gaussian Bayes Tree $(R,d)$ - * @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues - * @return The gradient as a VectorValues - */ -GTSAM_EXPORT void gradientAtZero(const GaussianBayesTree& bayesTree, VectorValues& g); + /** + * Optimize along the gradient direction, with a closed-form computation to perform the line + * search. The gradient is computed about \f$ \delta x=0 \f$. + * + * This function returns \f$ \delta x \f$ that minimizes a reparametrized problem. The error + * function of a GaussianBayesNet is + * + * \f[ f(\delta x) = \frac{1}{2} |R \delta x - d|^2 = \frac{1}{2}d^T d - d^T R \delta x + + * \frac{1}{2} \delta x^T R^T R \delta x \f] + * + * with gradient and Hessian + * + * \f[ g(\delta x) = R^T(R\delta x - d), \qquad G(\delta x) = R^T R. \f] + * + * This function performs the line search in the direction of the gradient evaluated at \f$ g = + * g(\delta x = 0) \f$ with step size \f$ \alpha \f$ that minimizes \f$ f(\delta x = \alpha g) + * \f$: + * + * \f[ f(\alpha) = \frac{1}{2} d^T d + g^T \delta x + \frac{1}{2} \alpha^2 g^T G g \f] + * + * Optimizing by setting the derivative to zero yields \f$ \hat \alpha = (-g^T g) / (g^T G g) + * \f$. For efficiency, this function evaluates the denominator without computing the Hessian + * \f$ G \f$, returning + * + * \f[ \delta x = \hat\alpha g = \frac{-g^T g}{(R g)^T(R g)} \f] */ + VectorValues optimizeGradientSearch() const; -/** - * Computes the determinant of a GassianBayesTree - * A GassianBayesTree is an upper triangular matrix and for an upper triangular matrix - * determinant is the product of the diagonal elements. Instead of actually multiplying - * we add the logarithms of the diagonal elements and take the exponent at the end - * because this is more numerically stable. - * @param bayesTree The input GassianBayesTree - * @return The determinant - */ -GTSAM_EXPORT double determinant(const GaussianBayesTree& bayesTree); + /** Compute the gradient of the energy function, \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} R x - + * d \right\Vert^2 \f$, centered around \f$ x = x_0 \f$. The gradient is \f$ R^T(Rx-d) \f$. + * + * @param x0 The center about which to compute the gradient + * @return The gradient as a VectorValues */ + VectorValues gradient(const VectorValues& x0) const; -/** - * Computes the log determinant of a GassianBayesTree - * A GassianBayesTree is an upper triangular matrix and for an upper triangular matrix - * determinant is the product of the diagonal elements. Instead of actually multiplying - * we add the logarithms of the diagonal elements because this is more numerically stable. - * @param bayesTree The input GassianBayesTree - * @return The log determinant - */ -GTSAM_EXPORT double logDeterminant(const GaussianBayesTree& bayesTree); + /** Compute the gradient of the energy function, \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} R x - d + * \right\Vert^2 \f$, centered around zero. The gradient about zero is \f$ -R^T d \f$. See also + * gradient(const GaussianBayesNet&, const VectorValues&). + * + * @param [output] g A VectorValues to store the gradient, which must be preallocated, see + * allocateVectorValues */ + VectorValues gradientAtZero() const; + /** Mahalanobis norm error. */ + double error(const VectorValues& x) const; -namespace internal { -template -double logDeterminant(const typename BAYESTREE::sharedClique& clique); -} + /** Computes the determinant of a GassianBayesTree, as if the Bayes tree is reorganized into a + * matrix. A GassianBayesTree is equivalent to an upper triangular matrix, and for an upper + * triangular matrix determinant is the product of the diagonal elements. Instead of actually + * multiplying we add the logarithms of the diagonal elements and take the exponent at the end + * because this is more numerically stable. */ + double determinant() const; + + /** Computes the determinant of a GassianBayesTree, as if the Bayes tree is reorganized into a + * matrix. A GassianBayesTree is equivalent to an upper triangular matrix, and for an upper + * triangular matrix determinant is the product of the diagonal elements. Instead of actually + * multiplying we add the logarithms of the diagonal elements and take the exponent at the end + * because this is more numerically stable. */ + double logDeterminant() const; + + /** Return the marginal on the requested variable as a covariance matrix. See also + * marginalFactor(). */ + Matrix marginalCovariance(Key key) const; + }; } - -#include - diff --git a/gtsam/linear/GaussianBayesTreeOrdered-inl.h b/gtsam/linear/GaussianBayesTreeOrdered-inl.h new file mode 100644 index 000000000..1f6c8e9f5 --- /dev/null +++ b/gtsam/linear/GaussianBayesTreeOrdered-inl.h @@ -0,0 +1,59 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file GaussianBayesTree-inl.h + * @brief Gaussian Bayes Tree, the result of eliminating a GaussianJunctionTree + * @brief GaussianBayesTree + * @author Frank Dellaert + * @author Richard Roberts + */ + +#pragma once + +#include + +#include // Only to help Eclipse + +#include + +namespace gtsam { + +/* ************************************************************************* */ +namespace internal { +template +void optimizeInPlace(const typename BAYESTREE::sharedClique& clique, VectorValues& result) { + // parents are assumed to already be solved and available in result + clique->conditional()->solveInPlace(result); + + // starting from the root, call optimize on each conditional + BOOST_FOREACH(const typename BAYESTREE::sharedClique& child, clique->children_) + optimizeInPlace(child, result); +} + +/* ************************************************************************* */ +template +double logDeterminant(const typename BAYESTREE::sharedClique& clique) { + double result = 0.0; + + // this clique + result += clique->conditional()->get_R().diagonal().unaryExpr(std::ptr_fun(log)).sum(); + + // sum of children + BOOST_FOREACH(const typename BAYESTREE::sharedClique& child, clique->children_) + result += logDeterminant(child); + + return result; +} + +/* ************************************************************************* */ +} // \namespace internal +} // \namespace gtsam diff --git a/gtsam/linear/GaussianConditional-inl.h b/gtsam/linear/GaussianConditional-inl.h new file mode 100644 index 000000000..6fe7bd642 --- /dev/null +++ b/gtsam/linear/GaussianConditional-inl.h @@ -0,0 +1,48 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file GaussianConditional-inl.h + * @brief Conditional Gaussian Base class + * @author Christian Potthast + */ + +// \callgraph + +#pragma once + +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + GaussianConditional::GaussianConditional(Index key, const Vector& d, + const Matrix& R, const PARENTS& parents, const SharedDiagonal& sigmas, const typename PARENTS::value_type*) : + BaseFactor(boost::join( + ListOfOne(std::make_pair(key, R)), + parents), d, sigmas), + BaseConditional(1) {} + + /* ************************************************************************* */ + template + GaussianConditional::GaussianConditional(const TERMS& terms, + size_t nrFrontals, const Vector& d, const SharedDiagonal& sigmas) : + BaseFactor(terms, d, sigmas), BaseConditional(nrFrontals) {} + + /* ************************************************************************* */ + template + GaussianConditional::GaussianConditional( + const KEYS& keys, size_t nrFrontals, const VerticalBlockMatrix& augmentedMatrix, const SharedDiagonal& sigmas) : + BaseFactor(keys, augmentedMatrix, sigmas), BaseConditional(nrFrontals) {} + +} // gtsam diff --git a/gtsam/linear/GaussianConditional.cpp b/gtsam/linear/GaussianConditional.cpp index 16b04af14..e8dbbacb0 100644 --- a/gtsam/linear/GaussianConditional.cpp +++ b/gtsam/linear/GaussianConditional.cpp @@ -16,6 +16,7 @@ */ #include +#include #include #ifdef __GNUC__ #pragma GCC diagnostic push @@ -29,210 +30,164 @@ #include #include -#include -#include +#include using namespace std; namespace gtsam { -/* ************************************************************************* */ -GaussianConditional::GaussianConditional() : rsd_(matrix_) {} + /* ************************************************************************* */ + GaussianConditional::GaussianConditional( + Key key, const Vector& d, const Matrix& R, const SharedDiagonal& sigmas) : + BaseFactor(key, R, d, sigmas), BaseConditional(1) {} -/* ************************************************************************* */ -GaussianConditional::GaussianConditional(Index key) : IndexConditional(key), rsd_(matrix_) {} + /* ************************************************************************* */ + GaussianConditional::GaussianConditional( + Key key, const Vector& d, const Matrix& R, + Key name1, const Matrix& S, const SharedDiagonal& sigmas) : + BaseFactor(key, R, name1, S, d, sigmas), BaseConditional(1) {} -/* ************************************************************************* */ -GaussianConditional::GaussianConditional(Index key,const Vector& d, const Matrix& R, const Vector& sigmas) : - IndexConditional(key), rsd_(matrix_), sigmas_(sigmas) { - assert(R.rows() <= R.cols()); - size_t dims[] = { R.cols(), 1 }; - rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+2, d.size())); - rsd_(0) = R.triangularView(); - get_d_() = d; -} + /* ************************************************************************* */ + GaussianConditional::GaussianConditional( + Key key, const Vector& d, const Matrix& R, + Key name1, const Matrix& S, Key name2, const Matrix& T, const SharedDiagonal& sigmas) : + BaseFactor(key, R, name1, S, name2, T, d, sigmas), BaseConditional(1) {} -/* ************************************************************************* */ -GaussianConditional::GaussianConditional(Index key, const Vector& d, const Matrix& R, - Index name1, const Matrix& S, const Vector& sigmas) : - IndexConditional(key,name1), rsd_(matrix_), sigmas_(sigmas) { - assert(R.rows() <= R.cols()); - size_t dims[] = { R.cols(), S.cols(), 1 }; - rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+3, d.size())); - rsd_(0) = R.triangularView(); - rsd_(1) = S; - get_d_() = d; -} + /* ************************************************************************* */ + void GaussianConditional::print(const string &s, const IndexFormatter& formatter) const + { + cout << s << " Conditional density "; + for(const_iterator it = beginFrontals(); it != endFrontals(); ++it) { + cout << (boost::format("[%1%]")%(formatter(*it))).str() << " "; + } + cout << endl; + cout << formatMatrixIndented(" R = ", get_R()) << endl; + for(const_iterator it = beginParents() ; it != endParents() ; ++it ) { + cout << formatMatrixIndented((boost::format(" S[%1%] = ")%(formatter(*it))).str(), getA(it)) + << endl; + } + cout << formatMatrixIndented(" d = ", getb(), true) << "\n"; + if(model_) + model_->print(" Noise model: "); + else + cout << " No noise model" << endl; + } -/* ************************************************************************* */ -GaussianConditional::GaussianConditional(Index key, const Vector& d, const Matrix& R, - Index name1, const Matrix& S, Index name2, const Matrix& T, const Vector& sigmas) : - IndexConditional(key,name1,name2), rsd_(matrix_), sigmas_(sigmas) { - assert(R.rows() <= R.cols()); - size_t dims[] = { R.cols(), S.cols(), T.cols(), 1 }; - rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+4, d.size())); - rsd_(0) = R.triangularView(); - rsd_(1) = S; - rsd_(2) = T; - get_d_() = d; -} + /* ************************************************************************* */ + bool GaussianConditional::equals(const GaussianConditional &c, double tol) const + { + // check if the size of the parents_ map is the same + if (parents().size() != c.parents().size()) + return false; -/* ************************************************************************* */ - GaussianConditional::GaussianConditional(Index key, const Vector& d, - const Matrix& R, const list >& parents, const Vector& sigmas) : - IndexConditional(key, GetKeys(parents.size(), parents.begin(), parents.end())), rsd_(matrix_), sigmas_(sigmas) { - assert(R.rows() <= R.cols()); - size_t* dims = (size_t*)alloca(sizeof(size_t)*(1+parents.size()+1)); // FIXME: alloca is bad, just ask Google. - dims[0] = R.cols(); - size_t j=1; - std::list >::const_iterator parent=parents.begin(); - for(; parent!=parents.end(); ++parent,++j) - dims[j] = parent->second.cols(); - dims[j] = 1; - rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+1+parents.size()+1, d.size())); - rsd_(0) = R.triangularView(); - j = 1; - for(std::list >::const_iterator parent=parents.begin(); parent!=parents.end(); ++parent) { - rsd_(j).noalias() = parent->second; - ++ j; - } - get_d_() = d; -} + // check if R_ and d_ are linear independent + for (DenseIndex i=0; i rows1; rows1.push_back(Vector(get_R().row(i))); + list rows2; rows2.push_back(Vector(c.get_R().row(i))); -/* ************************************************************************* */ -GaussianConditional::GaussianConditional(const std::list >& terms, - const size_t nrFrontals, const Vector& d, const Vector& sigmas) : - IndexConditional(GetKeys(terms.size(), terms.begin(), terms.end()), nrFrontals), - rsd_(matrix_), sigmas_(sigmas) { - size_t* dims = (size_t*)alloca(sizeof(size_t)*(terms.size()+1)); // FIXME: alloca is bad, just ask Google. - size_t j=0; - typedef pair Index_Matrix; - BOOST_FOREACH(const Index_Matrix& term, terms) { - dims[j] = term.second.cols(); - ++ j; - } - dims[j] = 1; - rsd_.copyStructureFrom(rsd_type(matrix_, dims, dims+terms.size()+1, d.size())); - j=0; - BOOST_FOREACH(const Index_Matrix& term, terms) { - rsd_(j) = term.second; - ++ j; - } - get_d_() = d; -} + // check if the matrices are the same + // iterate over the parents_ map + for (const_iterator it = beginParents(); it != endParents(); ++it) { + const_iterator it2 = c.beginParents() + (it-beginParents()); + if(*it != *(it2)) + return false; + rows1.push_back(row(getA(it), i)); + rows2.push_back(row(c.getA(it2), i)); + } -/* ************************************************************************* */ -GaussianConditional::GaussianConditional(const GaussianConditional& rhs) : - rsd_(matrix_) { - *this = rhs; -} - -/* ************************************************************************* */ -GaussianConditional& GaussianConditional::operator=(const GaussianConditional& rhs) { - if(this != &rhs) { - this->Base::operator=(rhs); // Copy keys - rsd_.assignNoalias(rhs.rsd_); // Copy matrix and block configuration - sigmas_ = rhs.sigmas_; // Copy sigmas - } - return *this; -} - -/* ************************************************************************* */ -void GaussianConditional::print(const string &s, const IndexFormatter& formatter) const -{ - cout << s << ": density on "; - for(const_iterator it = beginFrontals(); it != endFrontals(); ++it) { - cout << (boost::format("[%1%]")%(formatter(*it))).str() << " "; - } - cout << endl; - gtsam::print(Matrix(get_R()),"R"); - for(const_iterator it = beginParents() ; it != endParents() ; ++it ) { - gtsam::print(Matrix(get_S(it)), (boost::format("A[%1%]")%(formatter(*it))).str()); - } - gtsam::print(Vector(get_d()),"d"); - gtsam::print(sigmas_,"sigmas"); -} - -/* ************************************************************************* */ -bool GaussianConditional::equals(const GaussianConditional &c, double tol) const { - // check if the size of the parents_ map is the same - if (parents().size() != c.parents().size()) - return false; - - // check if R_ and d_ are linear independent - for (size_t i=0; i rows1; rows1.push_back(Vector(get_R().row(i))); - list rows2; rows2.push_back(Vector(c.get_R().row(i))); - - // check if the matrices are the same - // iterate over the parents_ map - for (const_iterator it = beginParents(); it != endParents(); ++it) { - const_iterator it2 = c.beginParents() + (it-beginParents()); - if(*it != *(it2)) + Vector row1 = concatVectors(rows1); + Vector row2 = concatVectors(rows2); + if (!linear_dependent(row1, row2, tol)) return false; - rows1.push_back(row(get_S(it), i)); - rows2.push_back(row(c.get_S(it2), i)); } - Vector row1 = concatVectors(rows1); - Vector row2 = concatVectors(rows2); - if (!linear_dependent(row1, row2, tol)) + // check if sigmas are equal + if ((model_ && !c.model_) || (!model_ && c.model_) + || (model_ && c.model_ && !model_->equals(*c.model_, tol))) return false; + + return true; } - // check if sigmas are equal - if (!(equal_with_abs_tol(sigmas_, c.sigmas_, tol))) return false; + /* ************************************************************************* */ + VectorValues GaussianConditional::solve(const VectorValues& x) const + { + // Solve matrix + Vector xS = x.vector(vector(beginParents(), endParents())); + xS = getb() - get_S() * xS; + Vector soln = get_R().triangularView().solve(xS); - return true; -} + // Check for indeterminant solution + if(soln.hasNaN()) throw IndeterminantLinearSystemException(keys().front()); -/* ************************************************************************* */ -JacobianFactor::shared_ptr GaussianConditional::toFactor() const { - return JacobianFactor::shared_ptr(new JacobianFactor(*this)); -} + // Insert solution into a VectorValues + VectorValues result; + DenseIndex vectorPosition = 0; + for(const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) { + result.insert(*frontal, soln.segment(vectorPosition, getDim(frontal))); + vectorPosition += getDim(frontal); + } -/* ************************************************************************* */ -void GaussianConditional::solveInPlace(VectorValues& x) const { - static const bool debug = false; - if(debug) this->print("Solving conditional in place"); - Vector xS = internal::extractVectorValuesSlices(x, this->beginParents(), this->endParents()); - xS = this->get_d() - this->get_S() * xS; - Vector soln = this->get_R().triangularView().solve(xS); - - // Check for indeterminant solution - if(soln.unaryExpr(!boost::lambda::bind(ptr_fun(isfinite), boost::lambda::_1)).any()) - throw IndeterminantLinearSystemException(this->keys().front()); - - if(debug) { - gtsam::print(Matrix(this->get_R()), "Calling backSubstituteUpper on "); - gtsam::print(soln, "full back-substitution solution: "); + return result; } - internal::writeVectorValuesSlices(soln, x, this->beginFrontals(), this->endFrontals()); -} -/* ************************************************************************* */ -void GaussianConditional::solveTransposeInPlace(VectorValues& gy) const { - Vector frontalVec = internal::extractVectorValuesSlices(gy, beginFrontals(), endFrontals()); - frontalVec = gtsam::backSubstituteUpper(frontalVec,Matrix(get_R())); + /* ************************************************************************* */ + VectorValues GaussianConditional::solveOtherRHS( + const VectorValues& parents, const VectorValues& rhs) const + { + Vector xS = parents.vector(vector(beginParents(), endParents())); + const Vector rhsR = rhs.vector(vector(beginFrontals(), endFrontals())); + xS = rhsR - get_S() * xS; + Vector soln = get_R().triangularView().solve(xS); - // Check for indeterminant solution - if(frontalVec.unaryExpr(!boost::lambda::bind(ptr_fun(isfinite), boost::lambda::_1)).any()) - throw IndeterminantLinearSystemException(this->keys().front()); + // Scale by sigmas + if(model_) + soln.array() *= model_->sigmas().array(); - GaussianConditional::const_iterator it; - for (it = beginParents(); it!= endParents(); it++) { - const Index i = *it; - transposeMultiplyAdd(-1.0,get_S(it),frontalVec,gy[i]); + // Insert solution into a VectorValues + VectorValues result; + DenseIndex vectorPosition = 0; + for(const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) { + result.insert(*frontal, soln.segment(vectorPosition, getDim(frontal))); + vectorPosition += getDim(frontal); + } + + return result; } - internal::writeVectorValuesSlices(frontalVec, gy, beginFrontals(), endFrontals()); -} -/* ************************************************************************* */ -void GaussianConditional::scaleFrontalsBySigma(VectorValues& gy) const { - Vector frontalVec = internal::extractVectorValuesSlices(gy, beginFrontals(), endFrontals()); - frontalVec = emul(frontalVec, get_sigmas()); - internal::writeVectorValuesSlices(frontalVec, gy, beginFrontals(), endFrontals()); -} + /* ************************************************************************* */ + void GaussianConditional::solveTransposeInPlace(VectorValues& gy) const + { + Vector frontalVec = gy.vector(vector(beginFrontals(), endFrontals())); + frontalVec = gtsam::backSubstituteUpper(frontalVec, Matrix(get_R())); + + // Check for indeterminant solution + if (frontalVec.hasNaN()) throw IndeterminantLinearSystemException(this->keys().front()); + + for (const_iterator it = beginParents(); it!= endParents(); it++) + gtsam::transposeMultiplyAdd(-1.0, Matrix(getA(it)), frontalVec, gy[*it]); + + // Scale by sigmas + if(model_) + frontalVec.array() *= model_->sigmas().array(); + + // Write frontal solution into a VectorValues + DenseIndex vectorPosition = 0; + for(const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) { + gy[*frontal] = frontalVec.segment(vectorPosition, getDim(frontal)); + vectorPosition += getDim(frontal); + } + } + + /* ************************************************************************* */ + void GaussianConditional::scaleFrontalsBySigma(VectorValues& gy) const + { + DenseIndex vectorPosition = 0; + for(const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) { + gy[*frontal].array() *= model_->sigmas().segment(vectorPosition, getDim(frontal)).array(); + vectorPosition += getDim(frontal); + } + } } diff --git a/gtsam/linear/GaussianConditional.h b/gtsam/linear/GaussianConditional.h index f20d10ef9..a6532f291 100644 --- a/gtsam/linear/GaussianConditional.h +++ b/gtsam/linear/GaussianConditional.h @@ -22,279 +22,135 @@ #include #include -#include -#include +#include +#include #include -// Forward declaration to friend unit tests -class JacobianFactoreliminate2Test; -class GaussianConditionalconstructorTest; -class GaussianFactorGrapheliminationTest; -class GaussianJunctionTreecomplicatedMarginalTest; -class GaussianConditionalinformationTest; -class GaussianConditionalisGaussianFactorTest; - namespace gtsam { -// Forward declarations -class GaussianFactor; -class JacobianFactor; - -/** - * A conditional Gaussian functions as the node in a Bayes network - * It has a set of parents y,z, etc. and implements a probability density on x. - * The negative log-probability is given by \f$ \frac{1}{2} |Rx - (d - Sy - Tz - ...)|^2 \f$ - */ -class GTSAM_EXPORT GaussianConditional : public IndexConditional { - -public: - typedef GaussianFactor FactorType; - typedef boost::shared_ptr shared_ptr; - - /** Store the conditional matrix as upper-triangular column-major */ - typedef Matrix RdMatrix; - typedef VerticalBlockView rsd_type; - - typedef rsd_type::Block r_type; - typedef rsd_type::constBlock const_r_type; - typedef rsd_type::Column d_type; - typedef rsd_type::constColumn const_d_type; - -protected: - - /** Store the conditional as one big upper-triangular wide matrix, arranged - * as \f$ [ R S1 S2 ... d ] \f$. Access these blocks using a VerticalBlockView. - * */ - RdMatrix matrix_; - rsd_type rsd_; - - /** vector of standard deviations */ - Vector sigmas_; - - /** typedef to base class */ - typedef IndexConditional Base; - -public: - - /** default constructor needed for serialization */ - GaussianConditional(); - - /** constructor */ - explicit GaussianConditional(Index key); - - /** constructor with no parents - * |Rx-d| - */ - GaussianConditional(Index key, const Vector& d, const Matrix& R, const Vector& sigmas); - - /** constructor with only one parent - * |Rx+Sy-d| - */ - GaussianConditional(Index key, const Vector& d, const Matrix& R, - Index name1, const Matrix& S, const Vector& sigmas); - - /** constructor with two parents - * |Rx+Sy+Tz-d| - */ - GaussianConditional(Index key, const Vector& d, const Matrix& R, - Index name1, const Matrix& S, Index name2, const Matrix& T, const Vector& sigmas); - /** - * constructor with number of arbitrary parents (only used in unit tests, - * std::list is not efficient) - * \f$ |Rx+sum(Ai*xi)-d| \f$ - */ - GaussianConditional(Index key, const Vector& d, - const Matrix& R, const std::list >& parents, const Vector& sigmas); + * A conditional Gaussian functions as the node in a Bayes network + * It has a set of parents y,z, etc. and implements a probability density on x. + * The negative log-probability is given by \f$ \frac{1}{2} |Rx - (d - Sy - Tz - ...)|^2 \f$ + */ + class GTSAM_EXPORT GaussianConditional : + public JacobianFactor, + public Conditional + { + public: + typedef GaussianConditional This; ///< Typedef to this class + typedef boost::shared_ptr shared_ptr; ///< shared_ptr to this class + typedef JacobianFactor BaseFactor; ///< Typedef to our factor base class + typedef Conditional BaseConditional; ///< Typedef to our conditional base class - /** - * Constructor with arbitrary number of frontals and parents (only used in unit tests, - * std::list is not efficient) - */ - GaussianConditional(const std::list >& terms, - size_t nrFrontals, const Vector& d, const Vector& sigmas); + /** default constructor needed for serialization */ + GaussianConditional() {} - /** - * Constructor when matrices are already stored in a combined matrix, allows - * for multiple frontal variables. - */ - template - GaussianConditional(ITERATOR firstKey, ITERATOR lastKey, size_t nrFrontals, - const VerticalBlockView& matrices, const Vector& sigmas); + /** constructor with no parents |Rx-d| */ + GaussianConditional(Key key, const Vector& d, const Matrix& R, + const SharedDiagonal& sigmas = SharedDiagonal()); - /** Copy constructor */ - GaussianConditional(const GaussianConditional& rhs); + /** constructor with only one parent |Rx+Sy-d| */ + GaussianConditional(Key key, const Vector& d, const Matrix& R, + Key name1, const Matrix& S, const SharedDiagonal& sigmas = SharedDiagonal()); - /** Combine several GaussianConditional into a single dense GC. The - * conditionals enumerated by \c first and \c last must be in increasing - * order, meaning that the parents of any conditional may not include a - * conditional coming before it. - * @param firstConditional Iterator to the first conditional to combine, must dereference to a shared_ptr. - * @param lastConditional Iterator to after the last conditional to combine, must dereference to a shared_ptr. */ - template - static shared_ptr Combine(ITERATOR firstConditional, ITERATOR lastConditional); + /** constructor with two parents |Rx+Sy+Tz-d| */ + GaussianConditional(Key key, const Vector& d, const Matrix& R, + Key name1, const Matrix& S, Key name2, const Matrix& T, + const SharedDiagonal& sigmas = SharedDiagonal()); - /** Assignment operator */ - GaussianConditional& operator=(const GaussianConditional& rhs); + /** Constructor with number of arbitrary parents. \f$ |Rx+sum(Ai*xi)-d| \f$ + * @tparam PARENTS A container whose value type is std::pair, specifying the + * collection of parent keys and matrices. */ + template + GaussianConditional(Key key, const Vector& d, + const Matrix& R, const PARENTS& parents, + const SharedDiagonal& sigmas = SharedDiagonal(), + const typename PARENTS::value_type* = 0); - /** print */ - void print(const std::string& = "GaussianConditional", - const IndexFormatter& formatter = DefaultIndexFormatter) const; + /** Constructor with arbitrary number of frontals and parents. + * @tparam TERMS A container whose value type is std::pair, specifying the + * collection of keys and matrices making up the conditional. */ + template + GaussianConditional(const TERMS& terms, + size_t nrFrontals, const Vector& d, + const SharedDiagonal& sigmas = SharedDiagonal()); - /** equals function */ - bool equals(const GaussianConditional &cg, double tol = 1e-9) const; + /** Constructor with arbitrary number keys, and where the augmented matrix is given all together + * instead of in block terms. Note that only the active view of the provided augmented matrix + * is used, and that the matrix data is copied into a newly-allocated matrix in the constructed + * factor. */ + template + GaussianConditional( + const KEYS& keys, size_t nrFrontals, const VerticalBlockMatrix& augmentedMatrix, + const SharedDiagonal& sigmas = SharedDiagonal()); - /** dimension of multivariate variable (same as rows()) */ - size_t dim() const { return rsd_.rows(); } + /** Combine several GaussianConditional into a single dense GC. The conditionals enumerated by + * \c first and \c last must be in increasing order, meaning that the parents of any + * conditional may not include a conditional coming before it. + * @param firstConditional Iterator to the first conditional to combine, must dereference to a + * shared_ptr. + * @param lastConditional Iterator to after the last conditional to combine, must dereference + * to a shared_ptr. */ + template + static shared_ptr Combine(ITERATOR firstConditional, ITERATOR lastConditional); - /** dimension of multivariate variable (same as dim()) */ - size_t rows() const { return dim(); } + /** print */ + void print(const std::string& = "GaussianConditional", + const KeyFormatter& formatter = DefaultKeyFormatter) const; - /** Compute the augmented information matrix as - * \f$ [ R S d ]^T [ R S d ] \f$ - */ - Matrix augmentedInformation() const { - return rsd_.full().transpose() * rsd_.full().transpose(); - } + /** equals function */ + bool equals(const GaussianConditional&cg, double tol = 1e-9) const; - /** Compute the information matrix */ - Matrix information() const { - return get_R().transpose() * get_R(); - } + /** Return a view of the upper-triangular R block of the conditional */ + constABlock get_R() const { return Ab_.range(0, nrFrontals()); } - /** Return a view of the upper-triangular R block of the conditional */ - rsd_type::constBlock get_R() const { return rsd_.range(0, nrFrontals()); } + /** Get a view of the parent blocks. */ + constABlock get_S() const { return Ab_.range(nrFrontals(), size()); } - /** Return a view of the r.h.s. d vector */ - const_d_type get_d() const { return rsd_.column(nrFrontals()+nrParents(), 0); } + /** Get a view of the S matrix for the variable pointed to by the given key iterator */ + constABlock get_S(const_iterator variable) const { return BaseFactor::getA(variable); } - /** get the dimension of a variable */ - size_t dim(const_iterator variable) const { return rsd_(variable - this->begin()).cols(); } + /** Get a view of the r.h.s. vector d */ + const constBVector get_d() const { return BaseFactor::getb(); } - /** Get a view of the parent block corresponding to the variable pointed to by the given key iterator */ - rsd_type::constBlock get_S(const_iterator variable) const { return rsd_(variable - this->begin()); } - /** Get a view of the parent block corresponding to the variable pointed to by the given key iterator (non-const version) */ - rsd_type::constBlock get_S() const { return rsd_.range(nrFrontals(), size()); } - /** Get the Vector of sigmas */ - const Vector& get_sigmas() const {return sigmas_;} + /** + * Solves a conditional Gaussian and writes the solution into the entries of + * \c x for each frontal variable of the conditional. The parents are + * assumed to have already been solved in and their values are read from \c x. + * This function works for multiple frontal variables. + * + * Given the Gaussian conditional with log likelihood \f$ |R x_f - (d - S x_s)|^2, + * where \f$ f \f$ are the frontal variables and \f$ s \f$ are the separator + * variables of this conditional, this solve function computes + * \f$ x_f = R^{-1} (d - S x_s) \f$ using back-substitution. + * + * @param parents VectorValues containing solved parents \f$ x_s \f$. + */ + VectorValues solve(const VectorValues& parents) const; -protected: + VectorValues solveOtherRHS(const VectorValues& parents, const VectorValues& rhs) const; - const RdMatrix& matrix() const { return matrix_; } - const rsd_type& rsd() const { return rsd_; } + /** Performs transpose backsubstition in place on values */ + void solveTransposeInPlace(VectorValues& gy) const; -public: - /** - * Copy to a Factor (this creates a JacobianFactor and returns it as its - * base class GaussianFactor. - */ - boost::shared_ptr toFactor() const; + /** Scale the values in \c gy according to the sigmas for the frontal variables in this + * conditional. */ + void scaleFrontalsBySigma(VectorValues& gy) const; +// __declspec(deprecated) void scaleFrontalsBySigma(VectorValues& gy) const; // FIXME: depreciated flag doesn't appear to exist? - /** - * Solves a conditional Gaussian and writes the solution into the entries of - * \c x for each frontal variable of the conditional. The parents are - * assumed to have already been solved in and their values are read from \c x. - * This function works for multiple frontal variables. - * - * Given the Gaussian conditional with log likelihood \f$ |R x_f - (d - S x_s)|^2, - * where \f$ f \f$ are the frontal variables and \f$ s \f$ are the separator - * variables of this conditional, this solve function computes - * \f$ x_f = R^{-1} (d - S x_s) \f$ using back-substitution. - * - * @param x VectorValues structure with solved parents \f$ x_s \f$, and into which the - * solution \f$ x_f \f$ will be written. - */ - void solveInPlace(VectorValues& x) const; + private: - // functions for transpose backsubstitution - - /** - * Performs backsubstition in place on values - */ - void solveTransposeInPlace(VectorValues& gy) const; - void scaleFrontalsBySigma(VectorValues& gy) const; - -protected: - rsd_type::Column get_d_() { return rsd_.column(nrFrontals()+nrParents(), 0); } - rsd_type::Block get_R_() { return rsd_.range(0, nrFrontals()); } - rsd_type::Block get_S_(iterator variable) { return rsd_(variable - this->begin()); } - -private: - - // Friends - friend class JacobianFactor; - friend class ::JacobianFactoreliminate2Test; - friend class ::GaussianConditionalconstructorTest; - friend class ::GaussianFactorGrapheliminationTest; - friend class ::GaussianJunctionTreecomplicatedMarginalTest; - friend class ::GaussianConditionalinformationTest; - friend class ::GaussianConditionalisGaussianFactorTest; - - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(Archive & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(IndexConditional); - ar & BOOST_SERIALIZATION_NVP(matrix_); - ar & BOOST_SERIALIZATION_NVP(rsd_); - ar & BOOST_SERIALIZATION_NVP(sigmas_); - } -}; // GaussianConditional - -/* ************************************************************************* */ -template -GaussianConditional::GaussianConditional(ITERATOR firstKey, ITERATOR lastKey, - size_t nrFrontals, const VerticalBlockView& matrices, - const Vector& sigmas) : - IndexConditional(std::vector(firstKey, lastKey), nrFrontals), rsd_( - matrix_), sigmas_(sigmas) { - rsd_.assignNoalias(matrices); -} - -/* ************************************************************************* */ -template -GaussianConditional::shared_ptr GaussianConditional::Combine(ITERATOR firstConditional, ITERATOR lastConditional) { - - // TODO: check for being a clique - - // Get dimensions from first conditional - std::vector dims; dims.reserve((*firstConditional)->size() + 1); - for(const_iterator j = (*firstConditional)->begin(); j != (*firstConditional)->end(); ++j) - dims.push_back((*firstConditional)->dim(j)); - dims.push_back(1); - - // We assume the conditionals form clique, so the first n variables will be - // frontal variables in the new conditional. - size_t nFrontals = 0; - size_t nRows = 0; - for(ITERATOR c = firstConditional; c != lastConditional; ++c) { - nRows += dims[nFrontals]; - ++ nFrontals; - } - - // Allocate combined conditional, has same keys as firstConditional - Matrix tempCombined; - VerticalBlockView tempBlockView(tempCombined, dims.begin(), dims.end(), 0); - GaussianConditional::shared_ptr combinedConditional(new GaussianConditional((*firstConditional)->begin(), (*firstConditional)->end(), nFrontals, tempBlockView, zero(nRows))); - - // Resize to correct number of rows - combinedConditional->matrix_.resize(nRows, combinedConditional->matrix_.cols()); - combinedConditional->rsd_.rowEnd() = combinedConditional->matrix_.rows(); - - // Copy matrix and sigmas - const size_t totalDims = combinedConditional->matrix_.cols(); - size_t currentSlot = 0; - for(ITERATOR c = firstConditional; c != lastConditional; ++c) { - const size_t startRow = combinedConditional->rsd_.offset(currentSlot); // Start row is same as start column - combinedConditional->rsd_.range(0, currentSlot).block(startRow, 0, dims[currentSlot], combinedConditional->rsd_.offset(currentSlot)).operator=( - Matrix::Zero(dims[currentSlot], combinedConditional->rsd_.offset(currentSlot))); - combinedConditional->rsd_.range(currentSlot, dims.size()).block(startRow, 0, dims[currentSlot], totalDims - startRow).operator=( - (*c)->matrix_); - combinedConditional->sigmas_.segment(startRow, dims[currentSlot]) = (*c)->sigmas_; - ++ currentSlot; - } - - return combinedConditional; -} + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(Archive & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseFactor); + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseConditional); + } + }; // GaussianConditional } // gtsam + +#include + diff --git a/gtsam/linear/GaussianDensity.cpp b/gtsam/linear/GaussianDensity.cpp index a8f2fc895..e05cbc60d 100644 --- a/gtsam/linear/GaussianDensity.cpp +++ b/gtsam/linear/GaussianDensity.cpp @@ -23,31 +23,33 @@ using namespace std; namespace gtsam { /* ************************************************************************* */ - void GaussianDensity::print(const string &s, const IndexFormatter& formatter) const + GaussianDensity GaussianDensity::FromMeanAndStddev(Key key, const Vector& mean, const double& sigma) + { + return GaussianDensity(key, mean / sigma, Matrix::Identity(mean.size(), mean.size()) / sigma); + } + + /* ************************************************************************* */ + void GaussianDensity::print(const string &s, const KeyFormatter& formatter) const { cout << s << ": density on "; for(const_iterator it = beginFrontals(); it != endFrontals(); ++it) cout << (boost::format("[%1%]")%(formatter(*it))).str() << " "; cout << endl; - gtsam::print(Matrix(get_R()),"R"); - gtsam::print(Vector(get_d()),"d"); - gtsam::print(sigmas_,"sigmas"); + gtsam::print(Matrix(get_R()), "R: "); + gtsam::print(Vector(get_d()), "d: "); + if(model_) + model_->print("Noise model: "); } /* ************************************************************************* */ Vector GaussianDensity::mean() const { - // Solve for mean - VectorValues x; - Index k = firstFrontalKey(); - // a VectorValues that only has a value for k: cannot be printed if k<>0 - x.insert(k, Vector(sigmas_.size())); - solveInPlace(x); - return x[k]; + VectorValues soln = this->solve(VectorValues()); + return soln[firstFrontalKey()]; } /* ************************************************************************* */ Matrix GaussianDensity::covariance() const { - return inverse(information()); + return information().inverse(); } } // gtsam diff --git a/gtsam/linear/GaussianDensity.h b/gtsam/linear/GaussianDensity.h index e7bc51f74..15db0ce35 100644 --- a/gtsam/linear/GaussianDensity.h +++ b/gtsam/linear/GaussianDensity.h @@ -24,13 +24,13 @@ namespace gtsam { /** - * A Gaussian density. - * - * It is implemented as a GaussianConditional without parents. - * The negative log-probability is given by \f$ |Rx - d|^2 \f$ - * with \f$ \Lambda = \Sigma^{-1} = R^T R \f$ and \f$ \mu = R^{-1} d \f$ - */ - class GTSAM_EXPORT GaussianDensity: public GaussianConditional { + * A Gaussian density. + * + * It is implemented as a GaussianConditional without parents. + * The negative log-probability is given by \f$ |Rx - d|^2 \f$ + * with \f$ \Lambda = \Sigma^{-1} = R^T R \f$ and \f$ \mu = R^{-1} d \f$ + */ + class GTSAM_EXPORT GaussianDensity : public GaussianConditional { public: @@ -38,24 +38,26 @@ namespace gtsam { /// default constructor needed for serialization GaussianDensity() : - GaussianConditional() { + GaussianConditional() { } /// Copy constructor from GaussianConditional GaussianDensity(const GaussianConditional& conditional) : - GaussianConditional(conditional) { - assert(conditional.nrParents() == 0); + GaussianConditional(conditional) { + if(conditional.nrParents() != 0) + throw std::invalid_argument("GaussianDensity can only be created from a conditional with no parents"); } /// constructor using d, R - GaussianDensity(Index key, const Vector& d, const Matrix& R, - const Vector& sigmas) : - GaussianConditional(key, d, R, sigmas) { - } + GaussianDensity(Index key, const Vector& d, const Matrix& R, const SharedDiagonal& noiseModel = SharedDiagonal()) : + GaussianConditional(key, d, R, noiseModel) {} + + /// Construct using a mean and variance + static GaussianDensity FromMeanAndStddev(Key key, const Vector& mean, const double& sigma); /// print void print(const std::string& = "GaussianDensity", - const IndexFormatter& formatter =DefaultIndexFormatter) const; + const KeyFormatter& formatter = DefaultKeyFormatter) const; /// Mean \f$ \mu = R^{-1} d \f$ Vector mean() const; @@ -64,6 +66,6 @@ namespace gtsam { Matrix covariance() const; }; -// GaussianDensity + // GaussianDensity }// gtsam diff --git a/gtsam/linear/GaussianEliminationTree.cpp b/gtsam/linear/GaussianEliminationTree.cpp new file mode 100644 index 000000000..0daaf0707 --- /dev/null +++ b/gtsam/linear/GaussianEliminationTree.cpp @@ -0,0 +1,44 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file GaussianEliminationTree.cpp + * @date Mar 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#include +#include + +namespace gtsam { + + // Instantiate base class + template class EliminationTree; + + /* ************************************************************************* */ + GaussianEliminationTree::GaussianEliminationTree( + const GaussianFactorGraph& factorGraph, const VariableIndex& structure, + const Ordering& order) : + Base(factorGraph, structure, order) {} + + /* ************************************************************************* */ + GaussianEliminationTree::GaussianEliminationTree( + const GaussianFactorGraph& factorGraph, const Ordering& order) : + Base(factorGraph, order) {} + + /* ************************************************************************* */ + bool GaussianEliminationTree::equals(const This& other, double tol) const + { + return Base::equals(other, tol); + } + +} diff --git a/gtsam/linear/GaussianEliminationTree.h b/gtsam/linear/GaussianEliminationTree.h new file mode 100644 index 000000000..1a4ba7868 --- /dev/null +++ b/gtsam/linear/GaussianEliminationTree.h @@ -0,0 +1,63 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file GaussianEliminationTree.h + * @date Mar 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#pragma once + +#include +#include +#include + +namespace gtsam { + + class GTSAM_EXPORT GaussianEliminationTree : + public EliminationTree + { + public: + typedef EliminationTree Base; ///< Base class + typedef GaussianEliminationTree This; ///< This class + typedef boost::shared_ptr shared_ptr; ///< Shared pointer to this class + + /** + * Build the elimination tree of a factor graph using pre-computed column structure. + * @param factorGraph The factor graph for which to build the elimination tree + * @param structure The set of factors involving each variable. If this is not + * precomputed, you can call the Create(const FactorGraph&) + * named constructor instead. + * @return The elimination tree + */ + GaussianEliminationTree(const GaussianFactorGraph& factorGraph, + const VariableIndex& structure, const Ordering& order); + + /** Build the elimination tree of a factor graph. Note that this has to compute the column + * structure as a VariableIndex, so if you already have this precomputed, use the other + * constructor instead. + * @param factorGraph The factor graph for which to build the elimination tree + */ + GaussianEliminationTree(const GaussianFactorGraph& factorGraph, + const Ordering& order); + + /** Test whether the tree is equal to another */ + bool equals(const This& other, double tol = 1e-9) const; + + private: + + friend class ::EliminationTreeTester; + + }; + +} diff --git a/gtsam/linear/GaussianFactor.h b/gtsam/linear/GaussianFactor.h index 3363880ef..1c9589ce2 100644 --- a/gtsam/linear/GaussianFactor.h +++ b/gtsam/linear/GaussianFactor.h @@ -21,77 +21,62 @@ #pragma once #include -#include - -#include - -#include -#include +#include namespace gtsam { // Forward declarations class VectorValues; - class Permutation; - class GaussianConditional; - template class BayesNet; /** - * An abstract virtual base class for JacobianFactor and HessianFactor. - * A GaussianFactor has a quadratic error function. - * GaussianFactor is non-mutable (all methods const!). - * The factor value is exp(-0.5*||Ax-b||^2) - */ - class GTSAM_EXPORT GaussianFactor: public IndexFactor { - - protected: - - /** Copy constructor */ - GaussianFactor(const This& f) : IndexFactor(f) {} - - /** Construct from derived type */ - GaussianFactor(const GaussianConditional& c); - - /** Constructor from a collection of keys */ - template GaussianFactor(KeyIterator beginKey, KeyIterator endKey) : - Base(beginKey, endKey) {} - - /** Default constructor for I/O */ - GaussianFactor() {} - - /** Construct unary factor */ - GaussianFactor(Index j) : IndexFactor(j) {} - - /** Construct binary factor */ - GaussianFactor(Index j1, Index j2) : IndexFactor(j1, j2) {} - - /** Construct ternary factor */ - GaussianFactor(Index j1, Index j2, Index j3) : IndexFactor(j1, j2, j3) {} - - /** Construct 4-way factor */ - GaussianFactor(Index j1, Index j2, Index j3, Index j4) : IndexFactor(j1, j2, j3, j4) {} - - /** Construct n-way factor */ - GaussianFactor(const std::set& js) : IndexFactor(js) {} - - /** Construct n-way factor */ - GaussianFactor(const std::vector& js) : IndexFactor(js) {} - + * An abstract virtual base class for JacobianFactor and HessianFactor. A GaussianFactor has a + * quadratic error function. GaussianFactor is non-mutable (all methods const!). The factor value + * is exp(-0.5*||Ax-b||^2) */ + class GTSAM_EXPORT GaussianFactor : public Factor + { public: + typedef GaussianFactor This; ///< This class + typedef boost::shared_ptr shared_ptr; ///< shared_ptr to this class + typedef Factor Base; ///< Our base class - typedef GaussianConditional ConditionalType; - typedef boost::shared_ptr shared_ptr; + /** Default constructor creates empty factor */ + GaussianFactor() {} + + /** Construct from container of keys. This constructor is used internally from derived factor + * constructors, either from a container of keys or from a boost::assign::list_of. */ + template + GaussianFactor(const CONTAINER& keys) : Base(keys) {} // Implementing Testable interface virtual void print(const std::string& s = "", - const IndexFormatter& formatter = DefaultIndexFormatter) const = 0; + const KeyFormatter& formatter = DefaultKeyFormatter) const = 0; + /** Equals for testable */ virtual bool equals(const GaussianFactor& lf, double tol = 1e-9) const = 0; + /** Print for testable */ virtual double error(const VectorValues& c) const = 0; /** 0.5*(A*x-b)'*D*(A*x-b) */ /** Return the dimension of the variable pointed to by the given key iterator */ - virtual size_t getDim(const_iterator variable) const = 0; + virtual DenseIndex getDim(const_iterator variable) const = 0; + + /** + * Return a dense \f$ [ \;A\;b\; ] \in \mathbb{R}^{m \times n+1} \f$ + * Jacobian matrix, augmented with b with the noise models baked + * into A and b. The negative log-likelihood is + * \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$. See also + * GaussianFactorGraph::jacobian and GaussianFactorGraph::sparseJacobian. + */ + virtual Matrix augmentedJacobian() const = 0; + + /** + * Return the dense Jacobian \f$ A \f$ and right-hand-side \f$ b \f$, + * with the noise models baked into A and b. The negative log-likelihood + * is \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$. See also + * GaussianFactorGraph::augmentedJacobian and + * GaussianFactorGraph::sparseJacobian. + */ + virtual std::pair jacobian() const = 0; /** Return the augmented information matrix represented by this GaussianFactor. * The augmented information matrix contains the information matrix with an @@ -111,21 +96,8 @@ namespace gtsam { /** Clone a factor (make a deep copy) */ virtual GaussianFactor::shared_ptr clone() const = 0; - /** - * Permutes the GaussianFactor, but for efficiency requires the permutation - * to already be inverted. This acts just as a change-of-name for each - * variable. The order of the variables within the factor is not changed. - */ - virtual void permuteWithInverse(const Permutation& inversePermutation) { - IndexFactor::permuteWithInverse(inversePermutation); - } - - /** - * Apply a reduction, which is a remapping of variable indices. - */ - virtual void reduceWithInverse(const internal::Reduction& inverseReduction) { - IndexFactor::reduceWithInverse(inverseReduction); - } + /** Test whether the factor is empty */ + virtual bool empty() const = 0; /** * Construct the corresponding anti-factor to negate information @@ -139,18 +111,9 @@ namespace gtsam { friend class boost::serialization::access; template void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(IndexFactor); + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); } }; // GaussianFactor - - /** make keys from list, vector, or map of matrices */ - template - static std::vector GetKeys(size_t n, ITERATOR begin, ITERATOR end) { - std::vector keys; - keys.reserve(n); - for (ITERATOR it=begin;it!=end;++it) keys.push_back(it->first); - return keys; - } - + } // namespace gtsam diff --git a/gtsam/linear/GaussianFactorGraph.cpp b/gtsam/linear/GaussianFactorGraph.cpp index b04287288..7e5f0b16c 100644 --- a/gtsam/linear/GaussianFactorGraph.cpp +++ b/gtsam/linear/GaussianFactorGraph.cpp @@ -18,15 +18,14 @@ * @author Frank Dellaert */ -#include -#include -#include -#include -#include -#include #include -#include -#include +#include +#include +#include +#include +#include +#include +#include #include #include #include @@ -36,62 +35,29 @@ using namespace gtsam; namespace gtsam { + // Instantiate base classes + template class FactorGraph; + template class EliminateableFactorGraph; + /* ************************************************************************* */ - GaussianFactorGraph::GaussianFactorGraph(const GaussianBayesNet& CBN) : Base(CBN) {} + bool GaussianFactorGraph::equals(const This& fg, double tol) const + { + return Base::equals(fg, tol); + } /* ************************************************************************* */ GaussianFactorGraph::Keys GaussianFactorGraph::keys() const { - FastSet keys; + FastSet keys; BOOST_FOREACH(const sharedFactor& factor, *this) - if (factor) keys.insert(factor->begin(), factor->end()); + if (factor) + keys.insert(factor->begin(), factor->end()); return keys; } - /* ************************************************************************* */ - void GaussianFactorGraph::permuteWithInverse( - const Permutation& inversePermutation) { - BOOST_FOREACH(const sharedFactor& factor, factors_) { - if(factor) - factor->permuteWithInverse(inversePermutation); - } - } - - /* ************************************************************************* */ - void GaussianFactorGraph::reduceWithInverse( - const internal::Reduction& inverseReduction) { - BOOST_FOREACH(const sharedFactor& factor, factors_) { - if(factor) - factor->reduceWithInverse(inverseReduction); - } - } - - /* ************************************************************************* */ - void GaussianFactorGraph::combine(const GaussianFactorGraph &lfg) { - for (const_iterator factor = lfg.factors_.begin(); factor - != lfg.factors_.end(); factor++) { - push_back(*factor); - } - } - - /* ************************************************************************* */ - GaussianFactorGraph GaussianFactorGraph::combine2( - const GaussianFactorGraph& lfg1, const GaussianFactorGraph& lfg2) { - - // create new linear factor graph equal to the first one - GaussianFactorGraph fg = lfg1; - - // add the second factors_ in the graph - for (const_iterator factor = lfg2.factors_.begin(); factor - != lfg2.factors_.end(); factor++) { - fg.push_back(*factor); - } - return fg; - } - /* ************************************************************************* */ std::vector > GaussianFactorGraph::sparseJacobian() const { // First find dimensions of each variable - FastVector dims; + vector dims; BOOST_FOREACH(const sharedFactor& factor, *this) { for(GaussianFactor::const_iterator pos = factor->begin(); pos != factor->end(); ++pos) { if(dims.size() <= *pos) @@ -107,17 +73,18 @@ namespace gtsam { // Iterate over all factors, adding sparse scalar entries typedef boost::tuple triplet; - FastVector entries; + vector entries; size_t row = 0; BOOST_FOREACH(const sharedFactor& factor, *this) { // Convert to JacobianFactor if necessary JacobianFactor::shared_ptr jacobianFactor( boost::dynamic_pointer_cast(factor)); if (!jacobianFactor) { - HessianFactor::shared_ptr hessian(boost::dynamic_pointer_cast(factor)); - if (hessian) - jacobianFactor.reset(new JacobianFactor(*hessian)); - else + //TODO : re-enable + //HessianFactor::shared_ptr hessian(boost::dynamic_pointer_cast(factor)); + //if (hessian) + // jacobianFactor.reset(new JacobianFactor(*hessian)); + //else throw invalid_argument( "GaussianFactorGraph contains a factor that is neither a JacobianFactor nor a HessianFactor."); } @@ -169,19 +136,9 @@ namespace gtsam { /* ************************************************************************* */ Matrix GaussianFactorGraph::augmentedJacobian() const { - // Convert to Jacobians - FactorGraph jfg; - jfg.reserve(this->size()); - BOOST_FOREACH(const sharedFactor& factor, *this) { - if(boost::shared_ptr jf = - boost::dynamic_pointer_cast(factor)) - jfg.push_back(jf); - else - jfg.push_back(boost::make_shared(*factor)); - } // combine all factors - JacobianFactor combined(*CombineJacobians(jfg, VariableSlots(*this))); - return combined.matrix_augmented(); + JacobianFactor combined(*this); + return combined.augmentedJacobian(); } /* ************************************************************************* */ @@ -192,140 +149,6 @@ namespace gtsam { augmented.col(augmented.cols()-1)); } - /* ************************************************************************* */ - // Helper functions for Combine - static boost::tuple, size_t, size_t> countDims(const FactorGraph& factors, const VariableSlots& variableSlots) { -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - vector varDims(variableSlots.size(), numeric_limits::max()); -#else - vector varDims(variableSlots.size()); -#endif - size_t m = 0; - size_t n = 0; - { - Index jointVarpos = 0; - BOOST_FOREACH(const VariableSlots::value_type& slots, variableSlots) { - - assert(slots.second.size() == factors.size()); - - Index sourceFactorI = 0; - BOOST_FOREACH(const Index sourceVarpos, slots.second) { - if(sourceVarpos < numeric_limits::max()) { - const JacobianFactor& sourceFactor = *factors[sourceFactorI]; - size_t vardim = sourceFactor.getDim(sourceFactor.begin() + sourceVarpos); -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS -if(varDims[jointVarpos] == numeric_limits::max()) { - varDims[jointVarpos] = vardim; - n += vardim; -} else - assert(varDims[jointVarpos] == vardim); -#else - varDims[jointVarpos] = vardim; -n += vardim; -break; -#endif - } - ++ sourceFactorI; - } - ++ jointVarpos; - } - BOOST_FOREACH(const JacobianFactor::shared_ptr& factor, factors) { - m += factor->rows(); - } - } - return boost::make_tuple(varDims, m, n); - } - - /* ************************************************************************* */ - JacobianFactor::shared_ptr CombineJacobians( - const FactorGraph& factors, - const VariableSlots& variableSlots) { - - const bool debug = ISDEBUG("CombineJacobians"); - if (debug) factors.print("Combining factors: "); - if (debug) variableSlots.print(); - - if (debug) cout << "Determine dimensions" << endl; - gttic(countDims); - vector varDims; - size_t m, n; - boost::tie(varDims, m, n) = countDims(factors, variableSlots); - if (debug) { - cout << "Dims: " << m << " x " << n << "\n"; - BOOST_FOREACH(const size_t dim, varDims) cout << dim << " "; - cout << endl; - } - gttoc(countDims); - - if (debug) cout << "Allocate new factor" << endl; - gttic(allocate); - JacobianFactor::shared_ptr combined(new JacobianFactor()); - combined->allocate(variableSlots, varDims, m); - Vector sigmas(m); - gttoc(allocate); - - if (debug) cout << "Copy blocks" << endl; - gttic(copy_blocks); - // Loop over slots in combined factor - Index combinedSlot = 0; - BOOST_FOREACH(const VariableSlots::value_type& varslot, variableSlots) { - JacobianFactor::ABlock destSlot(combined->getA(combined->begin()+combinedSlot)); - // Loop over source factors - size_t nextRow = 0; - for(size_t factorI = 0; factorI < factors.size(); ++factorI) { - // Slot in source factor - const Index sourceSlot = varslot.second[factorI]; - const size_t sourceRows = factors[factorI]->rows(); - JacobianFactor::ABlock::RowsBlockXpr destBlock(destSlot.middleRows(nextRow, sourceRows)); - // Copy if exists in source factor, otherwise set zero - if(sourceSlot != numeric_limits::max()) - destBlock = factors[factorI]->getA(factors[factorI]->begin()+sourceSlot); - else - destBlock.setZero(); - nextRow += sourceRows; - } - ++combinedSlot; - } - gttoc(copy_blocks); - - if (debug) cout << "Copy rhs (b) and sigma" << endl; - gttic(copy_vectors); - bool anyConstrained = false; - // Loop over source factors - size_t nextRow = 0; - for(size_t factorI = 0; factorI < factors.size(); ++factorI) { - const size_t sourceRows = factors[factorI]->rows(); - combined->getb().segment(nextRow, sourceRows) = factors[factorI]->getb(); - sigmas.segment(nextRow, sourceRows) = factors[factorI]->get_model()->sigmas(); - if (factors[factorI]->isConstrained()) anyConstrained = true; - nextRow += sourceRows; - } - gttoc(copy_vectors); - - if (debug) cout << "Create noise model from sigmas" << endl; - gttic(noise_model); - combined->setModel(anyConstrained, sigmas); - gttoc(noise_model); - - if (debug) cout << "Assert Invariants" << endl; - combined->assertInvariants(); - - return combined; - } - - /* ************************************************************************* */ - GaussianFactorGraph::EliminationResult EliminateJacobians(const FactorGraph< - JacobianFactor>& factors, size_t nrFrontals) { - gttic(Combine); - JacobianFactor::shared_ptr jointFactor = - CombineJacobians(factors, VariableSlots(factors)); - gttoc(Combine); - gttic(eliminate); - GaussianConditional::shared_ptr gbn = jointFactor->eliminate(nrFrontals); - gttoc(eliminate); - return make_pair(gbn, jointFactor); - } - /* ************************************************************************* */ Matrix GaussianFactorGraph::augmentedHessian() const { // combine all factors and get upper-triangular part of Hessian @@ -343,247 +166,179 @@ break; augmented.topLeftCorner(augmented.rows()-1, augmented.rows()-1), augmented.col(augmented.rows()-1).head(augmented.rows()-1)); } - + /* ************************************************************************* */ - GaussianFactorGraph::EliminationResult EliminateCholesky(const FactorGraph< - GaussianFactor>& factors, size_t nrFrontals) { - - const bool debug = ISDEBUG("EliminateCholesky"); - - // Form Ab' * Ab - gttic(combine); - HessianFactor::shared_ptr combinedFactor(new HessianFactor(factors)); - gttoc(combine); - - // Do Cholesky, note that after this, the lower triangle still contains - // some untouched non-zeros that should be zero. We zero them while - // extracting submatrices next. - gttic(partial_Cholesky); - combinedFactor->partialCholesky(nrFrontals); - - gttoc(partial_Cholesky); - - // Extract conditional and fill in details of the remaining factor - gttic(split); - GaussianConditional::shared_ptr conditional = - combinedFactor->splitEliminatedFactor(nrFrontals); - if (debug) { - conditional->print("Extracted conditional: "); - combinedFactor->print("Eliminated factor (L piece): "); - } - gttoc(split); - - combinedFactor->assertInvariants(); - return make_pair(conditional, combinedFactor); + VectorValues GaussianFactorGraph::optimize(OptionalOrdering ordering, const Eliminate& function) const + { + gttic(GaussianFactorGraph_optimize); + return BaseEliminateable::eliminateMultifrontal(ordering, function)->optimize(); } /* ************************************************************************* */ - static FactorGraph convertToJacobians(const FactorGraph< - GaussianFactor>& factors) { + VectorValues GaussianFactorGraph::gradient(const VectorValues& x0) const + { + VectorValues g = VectorValues::Zero(x0); + Errors e = gaussianErrors(x0); + transposeMultiplyAdd(1.0, e, g); + return g; + } - typedef JacobianFactor J; - typedef HessianFactor H; - - const bool debug = ISDEBUG("convertToJacobians"); - - FactorGraph jacobians; - jacobians.reserve(factors.size()); - BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) - if (factor) { - J::shared_ptr jacobian(boost::dynamic_pointer_cast(factor)); - if (jacobian) { - jacobians.push_back(jacobian); - if (debug) jacobian->print("Existing JacobianFactor: "); - } else { - H::shared_ptr hessian(boost::dynamic_pointer_cast(factor)); - if (!hessian) throw std::invalid_argument( - "convertToJacobians: factor is neither a JacobianFactor nor a HessianFactor."); - J::shared_ptr converted(new J(*hessian)); - if (debug) { - cout << "Converted HessianFactor to JacobianFactor:\n"; - hessian->print("HessianFactor: "); - converted->print("JacobianFactor: "); - if (!assert_equal(*hessian, HessianFactor(*converted), 1e-3)) throw runtime_error( - "convertToJacobians: Conversion between Jacobian and Hessian incorrect"); - } - jacobians.push_back(converted); + /* ************************************************************************* */ + namespace { + JacobianFactor::shared_ptr convertToJacobianFactorPtr(const GaussianFactor::shared_ptr &gf) { + JacobianFactor::shared_ptr result = boost::dynamic_pointer_cast(gf); + if( !result ) { + result = boost::make_shared(*gf); // Convert any non-Jacobian factors to Jacobians (e.g. Hessian -> Jacobian with Cholesky) } + return result; } - return jacobians; } /* ************************************************************************* */ - GaussianFactorGraph::EliminationResult EliminateQR(const FactorGraph< - GaussianFactor>& factors, size_t nrFrontals) { - - const bool debug = ISDEBUG("EliminateQR"); - - // Convert all factors to the appropriate type and call the type-specific EliminateGaussian. - if (debug) cout << "Using QR" << endl; - - gttic(convert_to_Jacobian); - FactorGraph jacobians = convertToJacobians(factors); - gttoc(convert_to_Jacobian); - - gttic(Jacobian_EliminateGaussian); - GaussianConditional::shared_ptr conditional; - GaussianFactor::shared_ptr factor; - boost::tie(conditional, factor) = EliminateJacobians(jacobians, nrFrontals); - gttoc(Jacobian_EliminateGaussian); - - return make_pair(conditional, factor); - } // \EliminateQR - - /* ************************************************************************* */ - bool hasConstraints(const FactorGraph& factors) { - typedef JacobianFactor J; - BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) { - J::shared_ptr jacobian(boost::dynamic_pointer_cast(factor)); - if (jacobian && jacobian->get_model()->isConstrained()) { - return true; - } - } - return false; - } - - /* ************************************************************************* */ - GaussianFactorGraph::EliminationResult EliminatePreferCholesky( - const FactorGraph& factors, size_t nrFrontals) { - - // If any JacobianFactors have constrained noise models, we have to convert - // all factors to JacobianFactors. Otherwise, we can convert all factors - // to HessianFactors. This is because QR can handle constrained noise - // models but Cholesky cannot. - if (hasConstraints(factors)) - return EliminateQR(factors, nrFrontals); - else { - GaussianFactorGraph::EliminationResult ret; - gttic(EliminateCholesky); - ret = EliminateCholesky(factors, nrFrontals); - gttoc(EliminateCholesky); - return ret; - } - - } // \EliminatePreferCholesky - - - - /* ************************************************************************* */ - static JacobianFactor::shared_ptr convertToJacobianFactorPtr(const GaussianFactor::shared_ptr &gf) { - JacobianFactor::shared_ptr result = boost::dynamic_pointer_cast(gf); - if( !result ) { - result = boost::make_shared(*gf); // Convert any non-Jacobian factors to Jacobians (e.g. Hessian -> Jacobian with Cholesky) - } - return result; - } - - /* ************************************************************************* */ - Errors operator*(const GaussianFactorGraph& fg, const VectorValues& x) { + VectorValues GaussianFactorGraph::gradientAtZero() const + { + // Zero-out the gradient + VectorValues g; Errors e; - BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { + BOOST_FOREACH(const sharedFactor& Ai_G, *this) { JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); - e.push_back((*Ai)*x); + e.push_back(-Ai->getb()); + } + transposeMultiplyAdd(1.0, e, g); + return g; + } + + /* ************************************************************************* */ + VectorValues GaussianFactorGraph::optimizeGradientSearch() const + { + gttic(GaussianFactorGraph_optimizeGradientSearch); + + gttic(Compute_Gradient); + // Compute gradient (call gradientAtZero function, which is defined for various linear systems) + VectorValues grad = gradientAtZero(); + double gradientSqNorm = grad.dot(grad); + gttoc(Compute_Gradient); + + gttic(Compute_Rg); + // Compute R * g + Errors Rg = *this * grad; + gttoc(Compute_Rg); + + gttic(Compute_minimizing_step_size); + // Compute minimizing step size + double step = -gradientSqNorm / dot(Rg, Rg); + gttoc(Compute_minimizing_step_size); + + gttic(Compute_point); + // Compute steepest descent point + grad *= step; + gttoc(Compute_point); + + return grad; + } + + /* ************************************************************************* */ + Errors GaussianFactorGraph::operator*(const VectorValues& x) const { + Errors e; + BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, *this) { + JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); + e.push_back((*Ai) * x); } return e; } /* ************************************************************************* */ - void multiplyInPlace(const GaussianFactorGraph& fg, const VectorValues& x, Errors& e) { - multiplyInPlace(fg,x,e.begin()); + void GaussianFactorGraph::multiplyInPlace(const VectorValues& x, Errors& e) const { + multiplyInPlace(x, e.begin()); } /* ************************************************************************* */ - void multiplyInPlace(const GaussianFactorGraph& fg, const VectorValues& x, const Errors::iterator& e) { + void GaussianFactorGraph::multiplyInPlace(const VectorValues& x, const Errors::iterator& e) const { Errors::iterator ei = e; - BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { + BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, *this) { JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); *ei = (*Ai)*x; ei++; } } + /* ************************************************************************* */ + bool hasConstraints(const GaussianFactorGraph& factors) { + typedef JacobianFactor J; + BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) { + J::shared_ptr jacobian(boost::dynamic_pointer_cast(factor)); + if (jacobian && jacobian->get_model() && jacobian->get_model()->isConstrained()) { + return true; + } + } + return false; + } + /* ************************************************************************* */ // x += alpha*A'*e - void transposeMultiplyAdd(const GaussianFactorGraph& fg, double alpha, const Errors& e, VectorValues& x) { + void GaussianFactorGraph::transposeMultiplyAdd(double alpha, const Errors& e, VectorValues& x) const + { // For each factor add the gradient contribution Errors::const_iterator ei = e.begin(); - BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { + BOOST_FOREACH(const sharedFactor& Ai_G, *this) { JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); - Ai->transposeMultiplyAdd(alpha,*(ei++),x); + Ai->transposeMultiplyAdd(alpha, *(ei++), x); } } - /* ************************************************************************* */ - VectorValues gradient(const GaussianFactorGraph& fg, const VectorValues& x0) { - // It is crucial for performance to make a zero-valued clone of x - VectorValues g = VectorValues::Zero(x0); - Errors e; - BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { - JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); - e.push_back(Ai->error_vector(x0)); - } - transposeMultiplyAdd(fg, 1.0, e, g); - return g; - } + ///* ************************************************************************* */ + //void residual(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r) { + // Key i = 0 ; + // BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { + // JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); + // r[i] = Ai->getb(); + // i++; + // } + // VectorValues Ax = VectorValues::SameStructure(r); + // multiply(fg,x,Ax); + // axpy(-1.0,Ax,r); + //} + + ///* ************************************************************************* */ + //void multiply(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r) { + // r.setZero(); + // Key i = 0; + // BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { + // JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); + // Vector &y = r[i]; + // for(JacobianFactor::const_iterator j = Ai->begin(); j != Ai->end(); ++j) { + // y += Ai->getA(j) * x[*j]; + // } + // ++i; + // } + //} /* ************************************************************************* */ - void gradientAtZero(const GaussianFactorGraph& fg, VectorValues& g) { - // Zero-out the gradient - g.setZero(); - Errors e; - BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { - JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); - e.push_back(-Ai->getb()); - } - transposeMultiplyAdd(fg, 1.0, e, g); - } - - /* ************************************************************************* */ - void residual(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r) { - Index i = 0 ; - BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { - JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); - r[i] = Ai->getb(); - i++; - } - VectorValues Ax = VectorValues::SameStructure(r); - multiply(fg,x,Ax); - axpy(-1.0,Ax,r); - } - - /* ************************************************************************* */ - void multiply(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r) { - r.setZero(); - Index i = 0; - BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { - JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); - Vector &y = r[i]; - for(JacobianFactor::const_iterator j = Ai->begin(); j != Ai->end(); ++j) { - y += Ai->getA(j) * x[*j]; - } - ++i; - } - } - - /* ************************************************************************* */ - void transposeMultiply(const GaussianFactorGraph& fg, const VectorValues &r, VectorValues &x) { - x.setZero(); - Index i = 0; - BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { + VectorValues GaussianFactorGraph::transposeMultiply(const Errors& e) const + { + VectorValues x; + Errors::const_iterator ei = e.begin(); + BOOST_FOREACH(const sharedFactor& Ai_G, *this) { JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); for(JacobianFactor::const_iterator j = Ai->begin(); j != Ai->end(); ++j) { - x[*j] += Ai->getA(j).transpose() * r[i]; + // Create the value as a zero vector if it does not exist. + pair xi = x.tryInsert(*j, Vector()); + if(xi.second) + xi.first->second = Vector::Zero(Ai->getDim(j)); + xi.first->second += Ai->getA(j).transpose() * *ei; } - ++i; + ++ ei; } + return x; } /* ************************************************************************* */ - boost::shared_ptr gaussianErrors_(const GaussianFactorGraph& fg, const VectorValues& x) { - boost::shared_ptr e(new Errors); - BOOST_FOREACH(const GaussianFactor::shared_ptr& Ai_G, fg) { + Errors GaussianFactorGraph::gaussianErrors(const VectorValues& x) const + { + Errors e; + BOOST_FOREACH(const sharedFactor& Ai_G, *this) { JacobianFactor::shared_ptr Ai = convertToJacobianFactorPtr(Ai_G); - e->push_back(Ai->error_vector(x)); + e.push_back(Ai->error_vector(x)); } return e; } diff --git a/gtsam/linear/GaussianFactorGraph.h b/gtsam/linear/GaussianFactorGraph.h index 03fd04215..319b407ae 100644 --- a/gtsam/linear/GaussianFactorGraph.h +++ b/gtsam/linear/GaussianFactorGraph.h @@ -21,142 +21,119 @@ #pragma once -#include - -#include -#include -#include +#include +#include +#include #include -#include +#include +#include // Included here instead of fw-declared so we can use Errors::iterator namespace gtsam { - // Forward declaration to use as default argument, documented declaration below. - GTSAM_EXPORT FactorGraph::EliminationResult - EliminateQR(const FactorGraph& factors, size_t nrFrontals); + // Forward declarations + class GaussianFactorGraph; + class GaussianFactor; + class GaussianConditional; + class GaussianBayesNet; + class GaussianEliminationTree; + class GaussianBayesTree; + class GaussianJunctionTree; + /* ************************************************************************* */ + template<> struct EliminationTraits + { + typedef GaussianFactor FactorType; ///< Type of factors in factor graph + typedef GaussianFactorGraph FactorGraphType; ///< Type of the factor graph (e.g. GaussianFactorGraph) + typedef GaussianConditional ConditionalType; ///< Type of conditionals from elimination + typedef GaussianBayesNet BayesNetType; ///< Type of Bayes net from sequential elimination + typedef GaussianEliminationTree EliminationTreeType; ///< Type of elimination tree + typedef GaussianBayesTree BayesTreeType; ///< Type of Bayes tree + typedef GaussianJunctionTree JunctionTreeType; ///< Type of Junction tree + /// The default dense elimination function + static std::pair, boost::shared_ptr > + DefaultEliminate(const FactorGraphType& factors, const Ordering& keys) { + return EliminatePreferCholesky(factors, keys); } + }; + + /* ************************************************************************* */ /** * A Linear Factor Graph is a factor graph where all factors are Gaussian, i.e. * Factor == GaussianFactor * VectorValues = A values structure of vectors * Most of the time, linear factor graphs arise by linearizing a non-linear factor graph. */ - class GaussianFactorGraph : public FactorGraph { + class GTSAM_EXPORT GaussianFactorGraph : + public FactorGraph, + public EliminateableFactorGraph + { public: - typedef boost::shared_ptr shared_ptr; - typedef FactorGraph Base; + typedef GaussianFactorGraph This; ///< Typedef to this class + typedef FactorGraph Base; ///< Typedef to base factor graph type + typedef EliminateableFactorGraph BaseEliminateable; ///< Typedef to base elimination class + typedef boost::shared_ptr shared_ptr; ///< shared_ptr to this class - /** - * Default constructor - */ + /** Default constructor */ GaussianFactorGraph() {} - /** - * Constructor that receives a Chordal Bayes Net and returns a GaussianFactorGraph - */ - GTSAM_EXPORT GaussianFactorGraph(const GaussianBayesNet& CBN); + /** Construct from iterator over factors */ + template + GaussianFactorGraph(ITERATOR firstFactor, ITERATOR lastFactor) : Base(firstFactor, lastFactor) {} - /** - * Constructor that receives a BayesTree and returns a GaussianFactorGraph - */ - template - GaussianFactorGraph(const BayesTree& gbt) : Base(gbt) {} + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit GaussianFactorGraph(const CONTAINER& factors) : Base(factors) {} - /** Constructor from a factor graph of GaussianFactor or a derived type */ + /** Implicit copy/downcast constructor to override explicit template container constructor */ template - GaussianFactorGraph(const FactorGraph& fg) { - push_back(fg); - } + GaussianFactorGraph(const FactorGraph& graph) : Base(graph) {} + + /// @name Testable + /// @{ + + bool equals(const This& fg, double tol = 1e-9) const; + + /// @} /** Add a factor by value - makes a copy */ - void add(const GaussianFactor& factor) { - factors_.push_back(factor.clone()); - } + void add(const GaussianFactor& factor) { factors_.push_back(factor.clone()); } /** Add a factor by pointer - stores pointer without copying the factor */ - void add(const sharedFactor& factor) { - factors_.push_back(factor); - } + void add(const sharedFactor& factor) { factors_.push_back(factor); } /** Add a null factor */ void add(const Vector& b) { - add(JacobianFactor(b)); - } + add(JacobianFactor(b)); } /** Add a unary factor */ - void add(Index key1, const Matrix& A1, + void add(Key key1, const Matrix& A1, const Vector& b, const SharedDiagonal& model) { - add(JacobianFactor(key1,A1,b,model)); - } + add(JacobianFactor(key1,A1,b,model)); } /** Add a binary factor */ - void add(Index key1, const Matrix& A1, - Index key2, const Matrix& A2, + void add(Key key1, const Matrix& A1, + Key key2, const Matrix& A2, const Vector& b, const SharedDiagonal& model) { - add(JacobianFactor(key1,A1,key2,A2,b,model)); - } + add(JacobianFactor(key1,A1,key2,A2,b,model)); } /** Add a ternary factor */ - void add(Index key1, const Matrix& A1, - Index key2, const Matrix& A2, - Index key3, const Matrix& A3, + void add(Key key1, const Matrix& A1, + Key key2, const Matrix& A2, + Key key3, const Matrix& A3, const Vector& b, const SharedDiagonal& model) { - add(JacobianFactor(key1,A1,key2,A2,key3,A3,b,model)); - } + add(JacobianFactor(key1,A1,key2,A2,key3,A3,b,model)); } /** Add an n-ary factor */ - void add(const std::vector > &terms, - const Vector &b, const SharedDiagonal& model) { - add(JacobianFactor(terms,b,model)); - } + template + void add(const TERMS& terms, const Vector &b, const SharedDiagonal& model) { + add(JacobianFactor(terms,b,model)); } /** * Return the set of variables involved in the factors (computes a set * union). */ - typedef FastSet Keys; - GTSAM_EXPORT Keys keys() const; - - - /** Eliminate the first \c n frontal variables, returning the resulting - * conditional and remaining factor graph - this is very inefficient for - * eliminating all variables, to do that use EliminationTree or - * JunctionTree. Note that this version simply calls - * FactorGraph::eliminateFrontals with EliminateQR as the - * eliminate function argument. - */ - std::pair eliminateFrontals(size_t nFrontals, const Eliminate& function = EliminateQR) const { - return Base::eliminateFrontals(nFrontals, function); } - - /** Factor the factor graph into a conditional and a remaining factor graph. - * Given the factor graph \f$ f(X) \f$, and \c variables to factorize out - * \f$ V \f$, this function factorizes into \f$ f(X) = f(V;Y)f(Y) \f$, where - * \f$ Y := X\V \f$ are the remaining variables. If \f$ f(X) = p(X) \f$ is - * a probability density or likelihood, the factorization produces a - * conditional probability density and a marginal \f$ p(X) = p(V|Y)p(Y) \f$. - * - * For efficiency, this function treats the variables to eliminate - * \c variables as fully-connected, so produces a dense (fully-connected) - * conditional on all of the variables in \c variables, instead of a sparse - * BayesNet. If the variables are not fully-connected, it is more efficient - * to sequentially factorize multiple times. - * Note that this version simply calls - * FactorGraph::eliminate with EliminateQR as the eliminate - * function argument. - */ - std::pair eliminate(const std::vector& variables, const Eliminate& function = EliminateQR) const { - return Base::eliminate(variables, function); } - - /** Eliminate a single variable, by calling GaussianFactorGraph::eliminate. */ - std::pair eliminateOne(Index variable, const Eliminate& function = EliminateQR) const { - return Base::eliminateOne(variable, function); } - - /** Permute the variables in the factors */ - GTSAM_EXPORT void permuteWithInverse(const Permutation& inversePermutation); - - /** Apply a reduction, which is a remapping of variable indices. */ - GTSAM_EXPORT void reduceWithInverse(const internal::Reduction& inverseReduction); + typedef FastSet Keys; + Keys keys() const; /** unnormalized error */ double error(const VectorValues& x) const { @@ -171,34 +148,22 @@ namespace gtsam { return exp(-0.5 * error(c)); } - /** - * Static function that combines two factor graphs. - * @param lfg1 Linear factor graph - * @param lfg2 Linear factor graph - * @return a new combined factor graph - */ - GTSAM_EXPORT static GaussianFactorGraph combine2(const GaussianFactorGraph& lfg1, - const GaussianFactorGraph& lfg2); - - /** - * combine two factor graphs - * @param *lfg Linear factor graph - */ - GTSAM_EXPORT void combine(const GaussianFactorGraph &lfg); + ///@name Linear Algebra + ///@{ /** * Return vector of i, j, and s to generate an m-by-n sparse Jacobian matrix, * where i(k) and j(k) are the base 0 row and column indices, s(k) a double. * The standard deviations are baked into A and b */ - GTSAM_EXPORT std::vector > sparseJacobian() const; + std::vector > sparseJacobian() const; /** * Matrix version of sparseJacobian: generates a 3*m matrix with [i,j,s] entries * such that S(i(k),j(k)) = s(k), which can be given to MATLAB's sparse. * The standard deviations are baked into A and b */ - GTSAM_EXPORT Matrix sparseJacobian_() const; + Matrix sparseJacobian_() const; /** * Return a dense \f$ [ \;A\;b\; ] \in \mathbb{R}^{m \times n+1} \f$ @@ -207,7 +172,7 @@ namespace gtsam { * \f$ \frac{1}{2} \Vert Ax-b \Vert^2 \f$. See also * GaussianFactorGraph::jacobian and GaussianFactorGraph::sparseJacobian. */ - GTSAM_EXPORT Matrix augmentedJacobian() const; + Matrix augmentedJacobian() const; /** * Return the dense Jacobian \f$ A \f$ and right-hand-side \f$ b \f$, @@ -216,7 +181,7 @@ namespace gtsam { * GaussianFactorGraph::augmentedJacobian and * GaussianFactorGraph::sparseJacobian. */ - GTSAM_EXPORT std::pair jacobian() const; + std::pair jacobian() const; /** * Return a dense \f$ \Lambda \in \mathbb{R}^{n+1 \times n+1} \f$ Hessian @@ -229,7 +194,7 @@ namespace gtsam { and the negative log-likelihood is \f$ \frac{1}{2} x^T \Lambda x + \eta^T x + c \f$. */ - GTSAM_EXPORT Matrix augmentedHessian() const; + Matrix augmentedHessian() const; /** * Return the dense Hessian \f$ \Lambda \f$ and information vector @@ -237,7 +202,80 @@ namespace gtsam { * is \frac{1}{2} x^T \Lambda x + \eta^T x + c. See also * GaussianFactorGraph::augmentedHessian. */ - GTSAM_EXPORT std::pair hessian() const; + std::pair hessian() const; + + /** Solve the factor graph by performing multifrontal variable elimination in COLAMD order using + * the dense elimination function specified in \c function (default EliminatePreferCholesky), + * followed by back-substitution in the Bayes tree resulting from elimination. Is equivalent + * to calling graph.eliminateMultifrontal()->optimize(). */ + VectorValues optimize(OptionalOrdering ordering = boost::none, + const Eliminate& function = EliminationTraitsType::DefaultEliminate) const; + + /** + * Compute the gradient of the energy function, + * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} A x - b \right\Vert^2 \f$, + * centered around \f$ x = x_0 \f$. + * The gradient is \f$ A^T(Ax-b) \f$. + * @param fg The Jacobian factor graph $(A,b)$ + * @param x0 The center about which to compute the gradient + * @return The gradient as a VectorValues + */ + VectorValues gradient(const VectorValues& x0) const; + + /** + * Compute the gradient of the energy function, \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} A x - b + * \right\Vert^2 \f$, centered around zero. The gradient is \f$ A^T(Ax-b) \f$. + * @param fg The Jacobian factor graph $(A,b)$ + * @param [output] g A VectorValues to store the gradient, which must be preallocated, + * see allocateVectorValues + * @return The gradient as a VectorValues */ + VectorValues gradientAtZero() const; + + /** Optimize along the gradient direction, with a closed-form computation to perform the line + * search. The gradient is computed about \f$ \delta x=0 \f$. + * + * This function returns \f$ \delta x \f$ that minimizes a reparametrized problem. The error + * function of a GaussianBayesNet is + * + * \f[ f(\delta x) = \frac{1}{2} |R \delta x - d|^2 = \frac{1}{2}d^T d - d^T R \delta x + + * \frac{1}{2} \delta x^T R^T R \delta x \f] + * + * with gradient and Hessian + * + * \f[ g(\delta x) = R^T(R\delta x - d), \qquad G(\delta x) = R^T R. \f] + * + * This function performs the line search in the direction of the gradient evaluated at \f$ g = + * g(\delta x = 0) \f$ with step size \f$ \alpha \f$ that minimizes \f$ f(\delta x = \alpha g) + * \f$: + * + * \f[ f(\alpha) = \frac{1}{2} d^T d + g^T \delta x + \frac{1}{2} \alpha^2 g^T G g \f] + * + * Optimizing by setting the derivative to zero yields \f$ \hat \alpha = (-g^T g) / (g^T G g) + * \f$. For efficiency, this function evaluates the denominator without computing the Hessian + * \f$ G \f$, returning + * + * \f[ \delta x = \hat\alpha g = \frac{-g^T g}{(R g)^T(R g)} \f] */ + VectorValues optimizeGradientSearch() const; + + /** x = A'*e */ + VectorValues transposeMultiply(const Errors& e) const; + + /** x += alpha*A'*e */ + void transposeMultiplyAdd(double alpha, const Errors& e, VectorValues& x) const; + + /** return A*x-b */ + Errors gaussianErrors(const VectorValues& x) const; + + ///** return A*x */ + Errors operator*(const VectorValues& x) const; + + ///** In-place version e <- A*x that overwrites e. */ + void multiplyInPlace(const VectorValues& x, Errors& e) const; + + /** In-place version e <- A*x that takes an iterator. */ + void multiplyInPlace(const VectorValues& x, const Errors::iterator& e) const; + + /// @} private: /** Serialization function */ @@ -249,140 +287,16 @@ namespace gtsam { }; - /** - * Combine and eliminate several factors. - * \addtogroup LinearSolving - */ - GTSAM_EXPORT JacobianFactor::shared_ptr CombineJacobians( - const FactorGraph& factors, - const VariableSlots& variableSlots); - /** * Evaluates whether linear factors have any constrained noise models * @return true if any factor is constrained. */ - GTSAM_EXPORT bool hasConstraints(const FactorGraph& factors); - - /** - * Densely combine and partially eliminate JacobianFactors to produce a - * single conditional with nrFrontals frontal variables and a remaining - * factor. - * Variables are eliminated in the natural order of the variable indices of in the factors. - * @param factors Factors to combine and eliminate - * @param nrFrontals Number of frontal variables to eliminate. - * @return The conditional and remaining factor - - * \addtogroup LinearSolving - */ - GTSAM_EXPORT GaussianFactorGraph::EliminationResult EliminateJacobians(const FactorGraph< - JacobianFactor>& factors, size_t nrFrontals = 1); - - /** - * Densely partially eliminate with QR factorization. HessianFactors are - * first factored with Cholesky decomposition to produce JacobianFactors, - * by calling the conversion constructor JacobianFactor(const HessianFactor&). - * Variables are eliminated in the natural order of the variable indices of in - * the factors. - * @param factors Factors to combine and eliminate - * @param nrFrontals Number of frontal variables to eliminate. - * @return The conditional and remaining factor - - * \addtogroup LinearSolving - */ - GTSAM_EXPORT GaussianFactorGraph::EliminationResult EliminateQR(const FactorGraph< - GaussianFactor>& factors, size_t nrFrontals = 1); - - /** - * Densely partially eliminate with Cholesky factorization. JacobianFactors - * are left-multiplied with their transpose to form the Hessian using the - * conversion constructor HessianFactor(const JacobianFactor&). - * - * If any factors contain constrained noise models (any sigmas equal to - * zero), QR factorization will be performed instead, because our current - * implementation cannot handle constrained noise models in Cholesky - * factorization. EliminateCholesky(), on the other hand, will fail if any - * factors contain constrained noise models. - * - * Variables are eliminated in the natural order of the variable indices of in - * the factors. - * @param factors Factors to combine and eliminate - * @param nrFrontals Number of frontal variables to eliminate. - * @return The conditional and remaining factor - - * \addtogroup LinearSolving - */ - GTSAM_EXPORT GaussianFactorGraph::EliminationResult EliminatePreferCholesky(const FactorGraph< - GaussianFactor>& factors, size_t nrFrontals = 1); - - /** - * Densely partially eliminate with Cholesky factorization. JacobianFactors - * are left-multiplied with their transpose to form the Hessian using the - * conversion constructor HessianFactor(const JacobianFactor&). - * - * If any factors contain constrained noise models, this function will fail - * because our current implementation cannot handle constrained noise models - * in Cholesky factorization. The function EliminatePreferCholesky() - * automatically does QR instead when this is the case. - * - * Variables are eliminated in the natural order of the variable indices of in - * the factors. - * @param factors Factors to combine and eliminate - * @param nrFrontals Number of frontal variables to eliminate. - * @return The conditional and remaining factor - - * \addtogroup LinearSolving - */ - GTSAM_EXPORT GaussianFactorGraph::EliminationResult EliminateCholesky(const FactorGraph< - GaussianFactor>& factors, size_t nrFrontals = 1); + GTSAM_EXPORT bool hasConstraints(const GaussianFactorGraph& factors); /****** Linear Algebra Opeations ******/ - /** return A*x */ - GTSAM_EXPORT Errors operator*(const GaussianFactorGraph& fg, const VectorValues& x); - - /** In-place version e <- A*x that overwrites e. */ - GTSAM_EXPORT void multiplyInPlace(const GaussianFactorGraph& fg, const VectorValues& x, Errors& e); - - /** In-place version e <- A*x that takes an iterator. */ - GTSAM_EXPORT void multiplyInPlace(const GaussianFactorGraph& fg, const VectorValues& x, const Errors::iterator& e); - - /** x += alpha*A'*e */ - GTSAM_EXPORT void transposeMultiplyAdd(const GaussianFactorGraph& fg, double alpha, const Errors& e, VectorValues& x); - - /** - * Compute the gradient of the energy function, - * \f$ \nabla_{x=x_0} \left\Vert \Sigma^{-1} A x - b \right\Vert^2 \f$, - * centered around \f$ x = x_0 \f$. - * The gradient is \f$ A^T(Ax-b) \f$. - * @param fg The Jacobian factor graph $(A,b)$ - * @param x0 The center about which to compute the gradient - * @return The gradient as a VectorValues - */ - GTSAM_EXPORT VectorValues gradient(const GaussianFactorGraph& fg, const VectorValues& x0); - - /** - * Compute the gradient of the energy function, - * \f$ \nabla_{x=0} \left\Vert \Sigma^{-1} A x - b \right\Vert^2 \f$, - * centered around zero. - * The gradient is \f$ A^T(Ax-b) \f$. - * @param fg The Jacobian factor graph $(A,b)$ - * @param [output] g A VectorValues to store the gradient, which must be preallocated, see allocateVectorValues - * @return The gradient as a VectorValues - */ - GTSAM_EXPORT void gradientAtZero(const GaussianFactorGraph& fg, VectorValues& g); - - /* matrix-vector operations */ - GTSAM_EXPORT void residual(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r); - GTSAM_EXPORT void multiply(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r); - GTSAM_EXPORT void transposeMultiply(const GaussianFactorGraph& fg, const VectorValues &r, VectorValues &x); - - /** shared pointer version - * \todo Make this a member function - affects SubgraphPreconditioner */ - GTSAM_EXPORT boost::shared_ptr gaussianErrors_(const GaussianFactorGraph& fg, const VectorValues& x); - - /** return A*x-b - * \todo Make this a member function - affects SubgraphPreconditioner */ - inline Errors gaussianErrors(const GaussianFactorGraph& fg, const VectorValues& x) { - return *gaussianErrors_(fg, x); } + ///* matrix-vector operations */ + //GTSAM_EXPORT void residual(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r); + //GTSAM_EXPORT void multiply(const GaussianFactorGraph& fg, const VectorValues &x, VectorValues &r); } // namespace gtsam diff --git a/gtsam/linear/GaussianISAM.cpp b/gtsam/linear/GaussianISAM.cpp index a358e5cab..ddbc5c1b3 100644 --- a/gtsam/linear/GaussianISAM.cpp +++ b/gtsam/linear/GaussianISAM.cpp @@ -10,58 +10,25 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianISAM.cpp - * @brief Linear ISAM only - * @author Michael Kaess + * @file SymbolicISAM.cpp + * @date July 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts */ #include -#include - -#include - -using namespace std; -using namespace gtsam; +#include namespace gtsam { -// Explicitly instantiate so we don't have to include everywhere -template class ISAM; + // Instantiate base class + template class ISAM; + + /* ************************************************************************* */ + GaussianISAM::GaussianISAM() {} + + /* ************************************************************************* */ + GaussianISAM::GaussianISAM(const GaussianBayesTree& bayesTree) : + Base(bayesTree) {} -/* ************************************************************************* */ -GaussianFactor::shared_ptr GaussianISAM::marginalFactor(Index j) const { - return Super::marginalFactor(j, &EliminateQR); } - -/* ************************************************************************* */ -BayesNet::shared_ptr GaussianISAM::marginalBayesNet(Index j) const { - return Super::marginalBayesNet(j, &EliminateQR); -} - -/* ************************************************************************* */ -Matrix GaussianISAM::marginalCovariance(Index j) const { - GaussianConditional::shared_ptr conditional = marginalBayesNet(j)->front(); - return conditional->information().inverse(); -} - -/* ************************************************************************* */ - BayesNet::shared_ptr GaussianISAM::jointBayesNet( - Index key1, Index key2) const { - return Super::jointBayesNet(key1, key2, &EliminateQR); -} - -/* ************************************************************************* */ -VectorValues optimize(const GaussianISAM& isam) { - VectorValues result(isam.dims_); - // Call optimize for BayesTree - optimizeInPlace((const BayesTree&)isam, result); - return result; -} - -/* ************************************************************************* */ -BayesNet GaussianISAM::shortcut(sharedClique clique, sharedClique root) { - return clique->shortcut(root,&EliminateQR); -} -/* ************************************************************************* */ - -} // \namespace gtsam diff --git a/gtsam/linear/GaussianISAM.h b/gtsam/linear/GaussianISAM.h index fcee326e4..24cf2a95d 100644 --- a/gtsam/linear/GaussianISAM.h +++ b/gtsam/linear/GaussianISAM.h @@ -10,93 +10,37 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianISAM.h - * @brief Linear ISAM only - * @author Michael Kaess + * @file SymbolicISAM.h + * @date July 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts */ -// \callgraph - #pragma once -#include - +#include #include -#include -#include namespace gtsam { -class GaussianISAM : public ISAM { + class GTSAM_EXPORT GaussianISAM : public ISAM + { + public: + typedef ISAM Base; + typedef GaussianISAM This; + typedef boost::shared_ptr shared_ptr; - typedef ISAM Super; - std::vector dims_; + /// @name Standard Constructors + /// @{ -public: + /** Create an empty Bayes Tree */ + GaussianISAM(); - typedef std::vector Dims; + /** Copy constructor */ + GaussianISAM(const GaussianBayesTree& bayesTree); - /** Create an empty Bayes Tree */ - GaussianISAM() : Super() {} + /// @} - /** Create a Bayes Tree from a Bayes Net */ -// GaussianISAM(const GaussianBayesNet& bayesNet) : Super(bayesNet) {} - GaussianISAM(const BayesTree& bayesTree) : Super(bayesTree) { - GaussianJunctionTree::countDims(bayesTree, dims_); - } + }; - /** Override update_internal to also keep track of variable dimensions. */ - template - void update_internal(const FACTORGRAPH& newFactors, Cliques& orphans) { - Super::update_internal(newFactors, orphans, &EliminateQR); // TODO: why does this force QR? - update_dimensions(newFactors); - } - - template - void update(const FACTORGRAPH& newFactors) { - Cliques orphans; - this->update_internal(newFactors, orphans); - } - - template - inline void update_dimensions(const FACTORGRAPH& newFactors) { - BOOST_FOREACH(const typename FACTORGRAPH::sharedFactor& factor, newFactors) { - for(typename FACTORGRAPH::FactorType::const_iterator key = factor->begin(); key != factor->end(); ++key) { - if(*key >= dims_.size()) - dims_.resize(*key + 1); - if(dims_[*key] == 0) - dims_[*key] = factor->getDim(key); - else - assert(dims_[*key] == factor->getDim(key)); - } - } - } - - void clear() { - Super::clear(); - dims_.clear(); - } - - // access - const Dims& dims() const { return dims_; } ///< Const access to dimensions structure - Dims& dims() { return dims_; } ///< non-const access to dimensions structure (advanced interface) - - GTSAM_EXPORT friend VectorValues optimize(const GaussianISAM&); - - /** return marginal on any variable as a factor, Bayes net, or mean/cov */ - GTSAM_EXPORT GaussianFactor::shared_ptr marginalFactor(Index j) const; - GTSAM_EXPORT BayesNet::shared_ptr marginalBayesNet(Index key) const; - GTSAM_EXPORT Matrix marginalCovariance(Index key) const; - - /** return joint between two variables, as a Bayes net */ - GTSAM_EXPORT BayesNet::shared_ptr jointBayesNet(Index key1, Index key2) const; - - /** return the conditional P(S|Root) on the separator given the root */ - GTSAM_EXPORT static BayesNet shortcut(sharedClique clique, sharedClique root); - -}; // \class GaussianISAM - -// optimize the BayesTree, starting from the root -GTSAM_EXPORT VectorValues optimize(const GaussianISAM& isam); - -} // \namespace gtsam +} diff --git a/gtsam/linear/GaussianJunctionTree.cpp b/gtsam/linear/GaussianJunctionTree.cpp index 381c35e3f..182375925 100644 --- a/gtsam/linear/GaussianJunctionTree.cpp +++ b/gtsam/linear/GaussianJunctionTree.cpp @@ -11,49 +11,23 @@ /** * @file GaussianJunctionTree.cpp - * @date Jul 12, 2010 - * @author Kai Ni + * @date Mar 29, 2013 * @author Frank Dellaert - * @brief: the Gaussian junction tree + * @author Richard Roberts */ -#include -#include +#include #include -#include - -#include - -#include +#include namespace gtsam { - // explicit template instantiation - template class JunctionTree; - template class ClusterTree; - - using namespace std; + // Instantiate base class + template class JunctionTree; /* ************************************************************************* */ - VectorValues GaussianJunctionTree::optimize(Eliminate function) const { - gttic(GJT_eliminate); - // eliminate from leaves to the root - BTClique::shared_ptr rootClique(this->eliminate(function)); - gttoc(GJT_eliminate); + GaussianJunctionTree::GaussianJunctionTree( + const GaussianEliminationTree& eliminationTree) : + Base(Base::FromEliminationTree(eliminationTree)) {} - // Allocate solution vector and copy RHS - gttic(allocate_VectorValues); - vector dims(rootClique->conditional()->back()+1, 0); - countDims(rootClique, dims); - VectorValues result(dims); - gttoc(allocate_VectorValues); - - // back-substitution - gttic(backsubstitute); - internal::optimizeInPlace(rootClique, result); - gttoc(backsubstitute); - return result; - } - - /* ************************************************************************* */ -} //namespace gtsam +} diff --git a/gtsam/linear/GaussianJunctionTree.h b/gtsam/linear/GaussianJunctionTree.h index ba67e6002..f944d417b 100644 --- a/gtsam/linear/GaussianJunctionTree.h +++ b/gtsam/linear/GaussianJunctionTree.h @@ -11,78 +11,56 @@ /** * @file GaussianJunctionTree.h - * @date Jul 12, 2010 - * @author Kai Ni + * @date Mar 29, 2013 * @author Frank Dellaert - * @brief: the Gaussian junction tree + * @author Richard Roberts */ -#pragma once - -#include -#include -#include -#include #include +#include +#include namespace gtsam { + // Forward declarations + class GaussianEliminationTree; + /** - * A JunctionTree where all the factors are of type GaussianFactor. + * A ClusterTree, i.e., a set of variable clusters with factors, arranged in a tree, with + * the additional property that it represents the clique tree associated with a Bayes net. * - * In GTSAM, typically, a GaussianJunctionTree is created directly from a GaussianFactorGraph, - * after which you call optimize() to solve for the mean, or JunctionTree::eliminate() to - * create a BayesTree. In both cases, you need to provide a basic - * GaussianFactorGraph::Eliminate function that will be used to + * In GTSAM a junction tree is an intermediate data structure in multifrontal + * variable elimination. Each node is a cluster of factors, along with a + * clique of variables that are eliminated all at once. In detail, every node k represents + * a clique (maximal fully connected subset) of an associated chordal graph, such as a + * chordal Bayes net resulting from elimination. + * + * The difference with the BayesTree is that a JunctionTree stores factors, whereas a + * BayesTree stores conditionals, that are the product of eliminating the factors in the + * corresponding JunctionTree cliques. + * + * The tree structure and elimination method are exactly analagous to the EliminationTree, + * except that in the JunctionTree, at each node multiple variables are eliminated at a time. * * \addtogroup Multifrontal + * \nosubgrouping */ - class GaussianJunctionTree: public JunctionTree { - + class GTSAM_EXPORT GaussianJunctionTree : + public JunctionTree { public: - typedef boost::shared_ptr shared_ptr; - typedef JunctionTree Base; - typedef Base::sharedClique sharedClique; - typedef GaussianFactorGraph::Eliminate Eliminate; - - public : - - /** Default constructor */ - GaussianJunctionTree() : Base() {} - - /** Constructor from a factor graph. Builds a VariableIndex. */ - GaussianJunctionTree(const GaussianFactorGraph& fg) : Base(fg) {} - - /** Construct from a factor graph and a pre-computed variable index. */ - GaussianJunctionTree(const GaussianFactorGraph& fg, const VariableIndex& variableIndex) - : Base(fg, variableIndex) {} - + typedef JunctionTree Base; ///< Base class + typedef GaussianJunctionTree This; ///< This class + typedef boost::shared_ptr shared_ptr; ///< Shared pointer to this class + /** - * optimize the linear graph - */ - GTSAM_EXPORT VectorValues optimize(Eliminate function) const; + * Build the elimination tree of a factor graph using pre-computed column structure. + * @param factorGraph The factor graph for which to build the elimination tree + * @param structure The set of factors involving each variable. If this is not + * precomputed, you can call the Create(const FactorGraph&) + * named constructor instead. + * @return The elimination tree + */ + GaussianJunctionTree(const GaussianEliminationTree& eliminationTree); + }; - // convenient function to return dimensions of all variables in the BayesTree - template - static void countDims(const BayesTree& bayesTree, DIM_CONTAINER& dims) { - dims = DIM_CONTAINER(bayesTree.root()->conditional()->back()+1, 0); - countDims(bayesTree.root(), dims); - } - - private: - template - static void countDims(const boost::shared_ptr& clique, DIM_CONTAINER& dims) { - GaussianConditional::const_iterator it = clique->conditional()->beginFrontals(); - for (; it != clique->conditional()->endFrontals(); ++it) { - assert(dims.at(*it) == 0); - dims.at(*it) = clique->conditional()->dim(it); - } - - BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children()) { - countDims(child, dims); - } - } - - }; // GaussianJunctionTree - -} // namespace gtsam +} diff --git a/gtsam/linear/GaussianMultifrontalSolver.cpp b/gtsam/linear/GaussianMultifrontalSolver.cpp deleted file mode 100644 index af4fe90ec..000000000 --- a/gtsam/linear/GaussianMultifrontalSolver.cpp +++ /dev/null @@ -1,93 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GaussianMultifrontalSolver.cpp - * @author Richard Roberts - * @date Oct 21, 2010 - */ - -#include - -namespace gtsam { - -/* ************************************************************************* */ -GaussianMultifrontalSolver::GaussianMultifrontalSolver(const FactorGraph& factorGraph, bool useQR) : - Base(factorGraph), useQR_(useQR) {} - -/* ************************************************************************* */ -GaussianMultifrontalSolver::GaussianMultifrontalSolver(const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex, bool useQR) : - Base(factorGraph, variableIndex), useQR_(useQR) {} - -/* ************************************************************************* */ -GaussianMultifrontalSolver::shared_ptr -GaussianMultifrontalSolver::Create(const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex, bool useQR) { - return shared_ptr(new GaussianMultifrontalSolver(factorGraph, variableIndex, useQR)); -} - -/* ************************************************************************* */ -void GaussianMultifrontalSolver::replaceFactors(const FactorGraph::shared_ptr& factorGraph) { - Base::replaceFactors(factorGraph); -} - -/* ************************************************************************* */ -GaussianBayesTree::shared_ptr GaussianMultifrontalSolver::eliminate() const { - if (useQR_) - return Base::eliminate(&EliminateQR); - else - return Base::eliminate(&EliminatePreferCholesky); -} - -/* ************************************************************************* */ -VectorValues::shared_ptr GaussianMultifrontalSolver::optimize() const { - gttic(optimize); - VectorValues::shared_ptr values; - if (useQR_) - values = VectorValues::shared_ptr(new VectorValues(junctionTree_->optimize(&EliminateQR))); - else - values= VectorValues::shared_ptr(new VectorValues(junctionTree_->optimize(&EliminatePreferCholesky))); - gttoc(optimize); - return values; -} - -/* ************************************************************************* */ -GaussianFactorGraph::shared_ptr GaussianMultifrontalSolver::jointFactorGraph(const std::vector& js) const { - if (useQR_) - return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph(*Base::jointFactorGraph(js,&EliminateQR))); - else - return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph(*Base::jointFactorGraph(js,&EliminatePreferCholesky))); -} - -/* ************************************************************************* */ -GaussianFactor::shared_ptr GaussianMultifrontalSolver::marginalFactor(Index j) const { - if (useQR_) - return Base::marginalFactor(j,&EliminateQR); - else - return Base::marginalFactor(j,&EliminatePreferCholesky); -} - -/* ************************************************************************* */ -Matrix GaussianMultifrontalSolver::marginalCovariance(Index j) const { - FactorGraph fg; - GaussianConditional::shared_ptr conditional; - if (useQR_) { - fg.push_back(Base::marginalFactor(j,&EliminateQR)); - conditional = EliminateQR(fg,1).first; - } else { - fg.push_back(Base::marginalFactor(j,&EliminatePreferCholesky)); - conditional = EliminatePreferCholesky(fg,1).first; - } - return conditional->information().inverse(); -} - -} diff --git a/gtsam/linear/GaussianMultifrontalSolver.h b/gtsam/linear/GaussianMultifrontalSolver.h deleted file mode 100644 index 97f3de67c..000000000 --- a/gtsam/linear/GaussianMultifrontalSolver.h +++ /dev/null @@ -1,132 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GaussianMultifrontalSolver.h - * @author Richard Roberts - * @date Oct 21, 2010 - */ - -#pragma once - -#include -#include -#include -#include - -#include - -namespace gtsam { - -/** - * This solver uses multifrontal elimination to solve a GaussianFactorGraph, - * i.e. a sparse linear system. Underlying this is a junction tree, which is - * eliminated into a Bayes tree. - * - * The elimination ordering is "baked in" to the variable indices at this - * stage, i.e. elimination proceeds in order from '0'. A fill-reducing - * ordering is computed symbolically from the NonlinearFactorGraph, on the - * nonlinear side of gtsam. (It is actually also possible to permute an - * existing GaussianFactorGraph into a COLAMD ordering instead, this is done - * when computing marginals). - * - * The JunctionTree recursively produces a BayesTree, - * on which this class calls optimize(...) to perform back-substitution. - */ -class GaussianMultifrontalSolver : GenericMultifrontalSolver { - -protected: - - typedef GenericMultifrontalSolver Base; - typedef boost::shared_ptr shared_ptr; - - /** flag to determine whether to use Cholesky or QR */ - bool useQR_; - -public: - - /** - * Construct the solver for a factor graph. This builds the junction - * tree, which already does some of the work of elimination. - */ - GTSAM_EXPORT GaussianMultifrontalSolver(const FactorGraph& factorGraph, bool useQR = false); - - /** - * Construct the solver with a shared pointer to a factor graph and to a - * VariableIndex. The solver will store these pointers, so this constructor - * is the fastest. - */ - GTSAM_EXPORT GaussianMultifrontalSolver(const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex, bool useQR = false); - - /** - * Named constructor to return a shared_ptr. This builds the elimination - * tree, which already does some of the symbolic work of elimination. - */ - GTSAM_EXPORT static shared_ptr Create(const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex, bool useQR = false); - - /** - * Return a new solver that solves the given factor graph, which must have - * the *same structure* as the one this solver solves. For some solvers this - * is more efficient than constructing the solver from scratch. This can be - * used in cases where the numerical values of the linear problem change, - * e.g. during iterative nonlinear optimization. - */ - GTSAM_EXPORT void replaceFactors(const FactorGraph::shared_ptr& factorGraph); - - /** - * Eliminate the factor graph sequentially. Uses a column elimination tree - * to recursively eliminate. - */ - GTSAM_EXPORT GaussianBayesTree::shared_ptr eliminate() const; - - /** - * Compute the least-squares solution of the GaussianFactorGraph. This - * eliminates to create a BayesNet and then back-substitutes this BayesNet to - * obtain the solution. - */ - GTSAM_EXPORT VectorValues::shared_ptr optimize() const; - - /** - * Compute the marginal joint over a set of variables, by integrating out - * all of the other variables. This function returns the result as a factor - * graph. - * - * NOTE: This function is limited to computing a joint on 2 variables - */ - GTSAM_EXPORT GaussianFactorGraph::shared_ptr jointFactorGraph(const std::vector& js) const; - - /** - * Compute the marginal Gaussian density over a variable, by integrating out - * all of the other variables. This function returns the result as an upper- - * triangular R factor and right-hand-side, i.e. a GaussianConditional with - * R*x = d. To get a mean and covariance matrix, use marginalStandard(...) - */ - GTSAM_EXPORT GaussianFactor::shared_ptr marginalFactor(Index j) const; - - /** - * Compute the marginal Gaussian density over a variable, by integrating out - * all of the other variables. This function returns the result as a mean - * vector and covariance matrix. Compared to marginalCanonical, which - * returns a GaussianConditional, this function back-substitutes the R factor - * to obtain the mean, then computes \f$ \Sigma = (R^T * R)^{-1} \f$. - */ - GTSAM_EXPORT Matrix marginalCovariance(Index j) const; - - /** @return true if using QR */ - inline bool usingQR() const { return useQR_; } - -}; - -} - - diff --git a/gtsam/linear/GaussianSequentialSolver.cpp b/gtsam/linear/GaussianSequentialSolver.cpp deleted file mode 100644 index 75a8c3871..000000000 --- a/gtsam/linear/GaussianSequentialSolver.cpp +++ /dev/null @@ -1,120 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GaussianSequentialSolver.cpp - * @author Richard Roberts - * @date Oct 19, 2010 - */ - -#include -#include - -namespace gtsam { - -/* ************************************************************************* */ -GaussianSequentialSolver::GaussianSequentialSolver( - const FactorGraph& factorGraph, bool useQR) : - Base(factorGraph), useQR_(useQR) { -} - -/* ************************************************************************* */ -GaussianSequentialSolver::GaussianSequentialSolver( - const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex, bool useQR) : - Base(factorGraph, variableIndex), useQR_(useQR) { -} - -/* ************************************************************************* */ -GaussianSequentialSolver::shared_ptr GaussianSequentialSolver::Create( - const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex, bool useQR) { - return shared_ptr( - new GaussianSequentialSolver(factorGraph, variableIndex, useQR)); -} - -/* ************************************************************************* */ -void GaussianSequentialSolver::replaceFactors( - const FactorGraph::shared_ptr& factorGraph) { - Base::replaceFactors(factorGraph); -} - -/* ************************************************************************* */ -GaussianBayesNet::shared_ptr GaussianSequentialSolver::eliminate() const { - if (useQR_) - return Base::eliminate(&EliminateQR); - else - return Base::eliminate(&EliminatePreferCholesky); -} - -/* ************************************************************************* */ -VectorValues::shared_ptr GaussianSequentialSolver::optimize() const { - - static const bool debug = false; - - if(debug) this->factors_->print("GaussianSequentialSolver, eliminating "); - if(debug) this->eliminationTree_->print("GaussianSequentialSolver, elimination tree "); - - gttic(eliminate); - // Eliminate using the elimination tree - GaussianBayesNet::shared_ptr bayesNet(this->eliminate()); - gttoc(eliminate); - - if(debug) bayesNet->print("GaussianSequentialSolver, Bayes net "); - - // Allocate the solution vector if it is not already allocated -// VectorValues::shared_ptr solution = allocateVectorValues(*bayesNet); - - gttic(optimize); - // Back-substitute - VectorValues::shared_ptr solution( - new VectorValues(gtsam::optimize(*bayesNet))); - gttoc(optimize); - - if(debug) solution->print("GaussianSequentialSolver, solution "); - - return solution; -} - -/* ************************************************************************* */ -GaussianFactor::shared_ptr GaussianSequentialSolver::marginalFactor(Index j) const { - if (useQR_) - return Base::marginalFactor(j,&EliminateQR); - else - return Base::marginalFactor(j,&EliminatePreferCholesky); -} - -/* ************************************************************************* */ -Matrix GaussianSequentialSolver::marginalCovariance(Index j) const { - FactorGraph fg; - GaussianConditional::shared_ptr conditional; - if (useQR_) { - fg.push_back(Base::marginalFactor(j, &EliminateQR)); - conditional = EliminateQR(fg, 1).first; - } else { - fg.push_back(Base::marginalFactor(j, &EliminatePreferCholesky)); - conditional = EliminatePreferCholesky(fg, 1).first; - } - return conditional->information().inverse(); -} - -/* ************************************************************************* */ -GaussianFactorGraph::shared_ptr -GaussianSequentialSolver::jointFactorGraph(const std::vector& js) const { - if (useQR_) - return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph( - *Base::jointFactorGraph(js, &EliminateQR))); - else - return GaussianFactorGraph::shared_ptr(new GaussianFactorGraph( - *Base::jointFactorGraph(js, &EliminatePreferCholesky))); -} - -} /// namespace gtsam diff --git a/gtsam/linear/GaussianSequentialSolver.h b/gtsam/linear/GaussianSequentialSolver.h deleted file mode 100644 index c3c54154e..000000000 --- a/gtsam/linear/GaussianSequentialSolver.h +++ /dev/null @@ -1,133 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file GaussianSequentialSolver.h - * @brief Solves a GaussianFactorGraph (i.e. a sparse linear system) using sequential variable elimination. - * @author Richard Roberts - * @date Oct 19, 2010 - */ - -#pragma once - -#include -#include -#include - -#include - -namespace gtsam { - -/** This solver uses sequential variable elimination to solve a - * GaussianFactorGraph, i.e. a sparse linear system. Underlying this is a - * column elimination tree (inference/EliminationTree), see Gilbert 2001 BIT. - * - * The elimination ordering is "baked in" to the variable indices at this - * stage, i.e. elimination proceeds in order from '0'. A fill-reducing - * ordering is computed symbolically from the NonlinearFactorGraph, on the - * nonlinear side of gtsam. (To be precise, it is possible to permute an - * existing GaussianFactorGraph into a COLAMD ordering instead, this is done - * when computing marginals). - * - * This is not the most efficient algorithm we provide, most efficient is the - * MultifrontalSolver, which performs Multi-frontal QR factorization. However, - * sequential variable elimination is easier to understand so this is a good - * starting point to learn about these algorithms and our implementation. - * Additionally, the first step of MFQR is symbolic sequential elimination. - * - * The EliminationTree recursively produces a BayesNet, - * typedef'ed in linear/GaussianBayesNet, on which this class calls - * optimize(...) to perform back-substitution. - */ -class GaussianSequentialSolver : GenericSequentialSolver { - -protected: - - typedef GenericSequentialSolver Base; - typedef boost::shared_ptr shared_ptr; - - /** flag to determine whether to use Cholesky or QR */ - bool useQR_; - -public: - - /** - * Construct the solver for a factor graph. This builds the elimination - * tree, which already does some of the work of elimination. - */ - GTSAM_EXPORT GaussianSequentialSolver(const FactorGraph& factorGraph, bool useQR = false); - - /** - * Construct the solver with a shared pointer to a factor graph and to a - * VariableIndex. The solver will store these pointers, so this constructor - * is the fastest. - */ - GTSAM_EXPORT GaussianSequentialSolver(const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex, bool useQR = false); - - /** - * Named constructor to return a shared_ptr. This builds the elimination - * tree, which already does some of the symbolic work of elimination. - */ - GTSAM_EXPORT static shared_ptr Create(const FactorGraph::shared_ptr& factorGraph, - const VariableIndex::shared_ptr& variableIndex, bool useQR = false); - - /** - * Return a new solver that solves the given factor graph, which must have - * the *same structure* as the one this solver solves. For some solvers this - * is more efficient than constructing the solver from scratch. This can be - * used in cases where the numerical values of the linear problem change, - * e.g. during iterative nonlinear optimization. - */ - GTSAM_EXPORT void replaceFactors(const FactorGraph::shared_ptr& factorGraph); - - /** - * Eliminate the factor graph sequentially. Uses a column elimination tree - * to recursively eliminate. - */ - GTSAM_EXPORT GaussianBayesNet::shared_ptr eliminate() const; - - /** - * Compute the least-squares solution of the GaussianFactorGraph. This - * eliminates to create a BayesNet and then back-substitutes this BayesNet to - * obtain the solution. - */ - GTSAM_EXPORT VectorValues::shared_ptr optimize() const; - - /** - * Compute the marginal Gaussian density over a variable, by integrating out - * all of the other variables. This function returns the result as an upper- - * triangular R factor and right-hand-side, i.e. a GaussianConditional with - * R*x = d. To get a mean and covariance matrix, use marginalStandard(...) - */ - GTSAM_EXPORT GaussianFactor::shared_ptr marginalFactor(Index j) const; - - /** - * Compute the marginal Gaussian density over a variable, by integrating out - * all of the other variables. This function returns the result as a mean - * vector and covariance matrix. Compared to marginalCanonical, which - * returns a GaussianConditional, this function back-substitutes the R factor - * to obtain the mean, then computes \f$ \Sigma = (R^T * R)^{-1} \f$. - */ - GTSAM_EXPORT Matrix marginalCovariance(Index j) const; - - /** - * Compute the marginal joint over a set of variables, by integrating out - * all of the other variables. This function returns the result as an upper- - * triangular R factor and right-hand-side, i.e. a GaussianBayesNet with - * R*x = d. To get a mean and covariance matrix, use jointStandard(...) - */ - GTSAM_EXPORT GaussianFactorGraph::shared_ptr jointFactorGraph(const std::vector& js) const; - -}; - -} - diff --git a/gtsam/linear/HessianFactor-inl.h b/gtsam/linear/HessianFactor-inl.h new file mode 100644 index 000000000..4a21ab0ff --- /dev/null +++ b/gtsam/linear/HessianFactor-inl.h @@ -0,0 +1,41 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HessianFactor-inl.h + * @brief Contains the HessianFactor class, a general quadratic factor + * @author Richard Roberts + * @date Dec 8, 2010 + */ + +#pragma once + +namespace gtsam { + + /* ************************************************************************* */ + template + HessianFactor::HessianFactor(const KEYS& keys, const SymmetricBlockMatrix& augmentedInformation) : + GaussianFactor(keys), info_(augmentedInformation) + { + // Check number of variables + if((DenseIndex)Base::keys_.size() != augmentedInformation.nBlocks() - 1) + throw std::invalid_argument( + "Error in HessianFactor constructor input. Number of provided keys plus\n" + "one for the information vector must equal the number of provided matrix blocks."); + + // Check RHS dimension + if(augmentedInformation(0, augmentedInformation.nBlocks() - 1).cols() != 1) + throw std::invalid_argument( + "Error in HessianFactor constructor input. The last provided matrix block\n" + "must be the information vector, but the last provided block had more than one column."); + } + +} diff --git a/gtsam/linear/HessianFactor.cpp b/gtsam/linear/HessianFactor.cpp index 385ed2394..1fa94922b 100644 --- a/gtsam/linear/HessianFactor.cpp +++ b/gtsam/linear/HessianFactor.cpp @@ -15,7 +15,17 @@ * @date Dec 8, 2010 */ -#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include #include #include @@ -29,20 +39,18 @@ #ifdef __GNUC__ #pragma GCC diagnostic pop #endif +#include +#include +#include +#include +#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include +#include +#include using namespace std; +using namespace boost::assign; +namespace br { using namespace boost::range; using namespace boost::adaptors; } namespace gtsam { @@ -54,132 +62,124 @@ string SlotEntry::toString() const { } /* ************************************************************************* */ -Scatter::Scatter(const FactorGraph& gfg) { - // First do the set union. - BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, gfg) { - if(factor) { - for(GaussianFactor::const_iterator variable = factor->begin(); variable != factor->end(); ++variable) { - this->insert(make_pair(*variable, SlotEntry(0, factor->getDim(variable)))); - } - } - } - - // Next fill in the slot indices (we can only get these after doing the set - // union. - size_t slot = 0; - BOOST_FOREACH(value_type& var_slot, *this) { - var_slot.second.slot = (slot ++); - } +Scatter::Scatter(const GaussianFactorGraph& gfg, boost::optional ordering) +{ + static const size_t none = std::numeric_limits::max(); + + // First do the set union. + BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, gfg) { + if(factor) { + for(GaussianFactor::const_iterator variable = factor->begin(); variable != factor->end(); ++variable) { + // TODO: Fix this hack to cope with zero-row Jacobians that come from BayesTreeOrphanWrappers + const JacobianFactor* asJacobian = dynamic_cast(factor.get()); + if(!asJacobian || asJacobian->rows() > 0) + this->insert(make_pair(*variable, SlotEntry(none, factor->getDim(variable)))); + } + } + } + + // If we have an ordering, pre-fill the ordered variables first + size_t slot = 0; + if(ordering) { + BOOST_FOREACH(Key key, *ordering) { + const_iterator entry = find(key); + if(entry == end()) + throw std::invalid_argument( + "The ordering provided to the HessianFactor Scatter constructor\n" + "contained extra variables that did not appear in the factors to combine."); + at(key).slot = (slot ++); + } + } + + // Next fill in the slot indices (we can only get these after doing the set + // union. + BOOST_FOREACH(value_type& var_slot, *this) { + if(var_slot.second.slot == none) + var_slot.second.slot = (slot ++); + } } /* ************************************************************************* */ -void HessianFactor::assertInvariants() const { - GaussianFactor::assertInvariants(); // The base class checks for unique keys +HessianFactor::HessianFactor() : + info_(cref_list_of<1>(1)) +{ + linearTerm().setZero(); + constantTerm() = 0.0; } /* ************************************************************************* */ -HessianFactor::HessianFactor(const HessianFactor& gf) : - GaussianFactor(gf), info_(matrix_) { - info_.assignNoalias(gf.info_); - assertInvariants(); -} - -/* ************************************************************************* */ -HessianFactor::HessianFactor() : info_(matrix_) { - // The empty HessianFactor has only a constant error term of zero - FastVector dims; - dims.push_back(1); - info_.resize(dims.begin(), dims.end(), false); - info_(0,0)(0,0) = 0.0; - assertInvariants(); -} - -/* ************************************************************************* */ -HessianFactor::HessianFactor(Index j, const Matrix& G, const Vector& g, double f) : - GaussianFactor(j), info_(matrix_) { - if(G.rows() != G.cols() || G.rows() != g.size()) - throw invalid_argument("Inconsistent matrix and/or vector dimensions in HessianFactor constructor"); - size_t dims[] = { G.rows(), 1 }; - InfoMatrix fullMatrix(G.rows() + 1, G.rows() + 1); - BlockInfo infoMatrix(fullMatrix, dims, dims+2); - infoMatrix(0,0) = G; - infoMatrix.column(0,1,0) = g; - infoMatrix(1,1)(0,0) = f; - infoMatrix.swap(info_); - assertInvariants(); +HessianFactor::HessianFactor(Key j, const Matrix& G, const Vector& g, double f) : + GaussianFactor(cref_list_of<1>(j)), info_(cref_list_of<2>(G.cols())(1)) +{ + if(G.rows() != G.cols() || G.rows() != g.size()) throw invalid_argument( + "Attempting to construct HessianFactor with inconsistent matrix and/or vector dimensions"); + info_(0,0) = G; + info_(0,1) = g; + info_(1,1)(0,0) = f; } /* ************************************************************************* */ // error is 0.5*(x-mu)'*inv(Sigma)*(x-mu) = 0.5*(x'*G*x - 2*x'*G*mu + mu'*G*mu) // where G = inv(Sigma), g = G*mu, f = mu'*G*mu = mu'*g -HessianFactor::HessianFactor(Index j, const Vector& mu, const Matrix& Sigma) : - GaussianFactor(j), info_(matrix_) { +HessianFactor::HessianFactor(Key j, const Vector& mu, const Matrix& Sigma) : + GaussianFactor(cref_list_of<1>(j)), + info_(cref_list_of<2> (Sigma.cols()) (1) ) +{ if (Sigma.rows() != Sigma.cols() || Sigma.rows() != mu.size()) throw invalid_argument( - "Inconsistent matrix and/or vector dimensions in HessianFactor constructor"); - Matrix G = inverse(Sigma); - Vector g = G * mu; - double f = dot(mu, g); - size_t dims[] = { G.rows(), 1 }; - InfoMatrix fullMatrix(G.rows() + 1, G.rows() + 1); - BlockInfo infoMatrix(fullMatrix, dims, dims + 2); - infoMatrix(0, 0) = G; - infoMatrix.column(0, 1, 0) = g; - infoMatrix(1, 1)(0, 0) = f; - infoMatrix.swap(info_); - assertInvariants(); + "Attempting to construct HessianFactor with inconsistent matrix and/or vector dimensions"); + info_(0,0) = Sigma.inverse(); // G + info_(0,1) = info_(0,0) * mu; // g + info_(1,1)(0,0) = mu.dot(info_(0,1).col(0)); // f } /* ************************************************************************* */ -HessianFactor::HessianFactor(Index j1, Index j2, - const Matrix& G11, const Matrix& G12, const Vector& g1, - const Matrix& G22, const Vector& g2, double f) : - GaussianFactor(j1, j2), info_(matrix_) { - if(G11.rows() != G11.cols() || G11.rows() != G12.rows() || G11.rows() != g1.size() || - G22.cols() != G12.cols() || G22.cols() != g2.size()) - throw invalid_argument("Inconsistent matrix and/or vector dimensions in HessianFactor constructor"); - size_t dims[] = { G11.rows(), G22.rows(), 1 }; - InfoMatrix fullMatrix(G11.rows() + G22.rows() + 1, G11.rows() + G22.rows() + 1); - BlockInfo infoMatrix(fullMatrix, dims, dims+3); - infoMatrix(0,0) = G11; - infoMatrix(0,1) = G12; - infoMatrix.column(0,2,0) = g1; - infoMatrix(1,1) = G22; - infoMatrix.column(1,2,0) = g2; - infoMatrix(2,2)(0,0) = f; - infoMatrix.swap(info_); - assertInvariants(); +HessianFactor::HessianFactor(Key j1, Key j2, + const Matrix& G11, const Matrix& G12, const Vector& g1, + const Matrix& G22, const Vector& g2, double f) : +GaussianFactor(cref_list_of<2>(j1)(j2)), + info_(cref_list_of<3> (G11.cols()) (G22.cols()) (1) ) +{ + info_(0,0) = G11; + info_(0,1) = G12; + info_(0,2) = g1; + info_(1,1) = G22; + info_(1,2) = g2; + info_(2,2)(0,0) = f; } /* ************************************************************************* */ -HessianFactor::HessianFactor(Index j1, Index j2, Index j3, - const Matrix& G11, const Matrix& G12, const Matrix& G13, const Vector& g1, - const Matrix& G22, const Matrix& G23, const Vector& g2, - const Matrix& G33, const Vector& g3, double f) : - GaussianFactor(j1, j2, j3), info_(matrix_) { +HessianFactor::HessianFactor(Key j1, Key j2, Key j3, + const Matrix& G11, const Matrix& G12, const Matrix& G13, const Vector& g1, + const Matrix& G22, const Matrix& G23, const Vector& g2, + const Matrix& G33, const Vector& g3, double f) : +GaussianFactor(cref_list_of<3>(j1)(j2)(j3)), + info_(cref_list_of<4> (G11.cols()) (G22.cols()) (G33.cols()) (1) ) +{ if(G11.rows() != G11.cols() || G11.rows() != G12.rows() || G11.rows() != G13.rows() || G11.rows() != g1.size() || - G22.cols() != G12.cols() || G33.cols() != G13.cols() || G22.cols() != g2.size() || G33.cols() != g3.size()) + G22.cols() != G12.cols() || G33.cols() != G13.cols() || G22.cols() != g2.size() || G33.cols() != g3.size()) throw invalid_argument("Inconsistent matrix and/or vector dimensions in HessianFactor constructor"); - size_t dims[] = { G11.rows(), G22.rows(), G33.rows(), 1 }; - InfoMatrix fullMatrix(G11.rows() + G22.rows() + G33.rows() + 1, G11.rows() + G22.rows() + G33.rows() + 1); - BlockInfo infoMatrix(fullMatrix, dims, dims+4); - infoMatrix(0,0) = G11; - infoMatrix(0,1) = G12; - infoMatrix(0,2) = G13; - infoMatrix.column(0,3,0) = g1; - infoMatrix(1,1) = G22; - infoMatrix(1,2) = G23; - infoMatrix.column(1,3,0) = g2; - infoMatrix(2,2) = G33; - infoMatrix.column(2,3,0) = g3; - infoMatrix(3,3)(0,0) = f; - infoMatrix.swap(info_); - assertInvariants(); + info_(0,0) = G11; + info_(0,1) = G12; + info_(0,2) = G13; + info_(0,3) = g1; + info_(1,1) = G22; + info_(1,2) = G23; + info_(1,3) = g2; + info_(2,2) = G33; + info_(2,3) = g3; + info_(3,3)(0,0) = f; } /* ************************************************************************* */ -HessianFactor::HessianFactor(const std::vector& js, const std::vector& Gs, - const std::vector& gs, double f) : GaussianFactor(js), info_(matrix_) { +namespace { + DenseIndex _getSizeHF(const Vector& m) { return m.size(); } +} +/* ************************************************************************* */ +HessianFactor::HessianFactor(const std::vector& js, const std::vector& Gs, + const std::vector& gs, double f) : +GaussianFactor(js), info_(br::join(gs | br::transformed(&_getSizeHF), ListOfOne((DenseIndex)1))) +{ // Get the number of variables size_t variable_count = js.size(); @@ -191,7 +191,7 @@ HessianFactor::HessianFactor(const std::vector& js, const std::vector& js, const std::vector(&gf)) { - const JacobianFactor& jf(static_cast(gf)); - if(jf.model_->isConstrained()) - throw invalid_argument("Cannot construct HessianFactor from JacobianFactor with constrained noise model"); - else { - Vector invsigmas = jf.model_->invsigmas().cwiseProduct(jf.model_->invsigmas()); - info_.copyStructureFrom(jf.Ab_); - BlockInfo::constBlock A = jf.Ab_.full(); - matrix_.noalias() = A.transpose() * invsigmas.asDiagonal() * A; +namespace { + void _FromJacobianHelper(const JacobianFactor& jf, SymmetricBlockMatrix& info) + { + const SharedDiagonal& jfModel = jf.get_model(); + if(jfModel) + { + if(jf.get_model()->isConstrained()) + throw invalid_argument("Cannot construct HessianFactor from JacobianFactor with constrained noise model"); + info.full().noalias() = jf.matrixObject().full().transpose() * jfModel->invsigmas().asDiagonal() * + jfModel->invsigmas().asDiagonal() * jf.matrixObject().full(); + } else { + info.full().noalias() = jf.matrixObject().full().transpose() * jf.matrixObject().full(); } - } else if(dynamic_cast(&gf)) { - const HessianFactor& hf(static_cast(gf)); - info_.assignNoalias(hf.info_); - } else - throw std::invalid_argument("In HessianFactor(const GaussianFactor& gf), gf is neither a JacobianFactor nor a HessianFactor"); - assertInvariants(); + } } /* ************************************************************************* */ -HessianFactor::HessianFactor(const FactorGraph& factors) : info_(matrix_) +HessianFactor::HessianFactor(const JacobianFactor& jf) : + GaussianFactor(jf), info_(SymmetricBlockMatrix::LikeActiveViewOf(jf.matrixObject())) { - Scatter scatter(factors); + _FromJacobianHelper(jf, info_); +} - // Pull out keys and dimensions - gttic(keys); - vector dimensions(scatter.size() + 1); - BOOST_FOREACH(const Scatter::value_type& var_slot, scatter) { - dimensions[var_slot.second.slot] = var_slot.second.dimension; - } - // This is for the r.h.s. vector - dimensions.back() = 1; - gttoc(keys); +/* ************************************************************************* */ +HessianFactor::HessianFactor(const GaussianFactor& gf) : + GaussianFactor(gf) +{ + // Copy the matrix data depending on what type of factor we're copying from + if(const JacobianFactor* jf = dynamic_cast(&gf)) + { + info_ = SymmetricBlockMatrix::LikeActiveViewOf(jf->matrixObject()); + _FromJacobianHelper(*jf, info_); + } + else if(const HessianFactor* hf = dynamic_cast(&gf)) + { + info_ = hf->info_; + } + else + { + throw std::invalid_argument("In HessianFactor(const GaussianFactor& gf), gf is neither a JacobianFactor nor a HessianFactor"); + } +} - const bool debug = ISDEBUG("EliminateCholesky"); - // Form Ab' * Ab +/* ************************************************************************* */ +namespace { + DenseIndex _dimFromScatterEntry(const Scatter::value_type& key_slotentry) { + return key_slotentry.second.dimension; } } + +/* ************************************************************************* */ +HessianFactor::HessianFactor(const GaussianFactorGraph& factors, + boost::optional scatter) +{ + boost::optional computedScatter; + if(!scatter) { + computedScatter = Scatter(factors); + scatter = computedScatter; + } + + // Allocate and copy keys gttic(allocate); - info_.resize(dimensions.begin(), dimensions.end(), false); - // Fill in keys - keys_.resize(scatter.size()); - std::transform(scatter.begin(), scatter.end(), keys_.begin(), boost::bind(&Scatter::value_type::first, ::_1)); + // Allocate with dimensions for each variable plus 1 at the end for the information vector + keys_.resize(scatter->size()); + vector dims(scatter->size() + 1); + BOOST_FOREACH(const Scatter::value_type& key_slotentry, *scatter) { + keys_[key_slotentry.second.slot] = key_slotentry.first; + dims[key_slotentry.second.slot] = key_slotentry.second.dimension; + } + dims.back() = 1; + info_ = SymmetricBlockMatrix(dims); + info_.full().setZero(); gttoc(allocate); - gttic(zero); - matrix_.noalias() = Matrix::Zero(matrix_.rows(),matrix_.cols()); - gttoc(zero); + + // Form A' * A gttic(update); - if (debug) cout << "Combining " << factors.size() << " factors" << endl; BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) { if(factor) { - if(shared_ptr hessian = boost::dynamic_pointer_cast(factor)) - updateATA(*hessian, scatter); - else if(JacobianFactor::shared_ptr jacobianFactor = boost::dynamic_pointer_cast(factor)) - updateATA(*jacobianFactor, scatter); + if(const HessianFactor* hessian = dynamic_cast(factor.get())) + updateATA(*hessian, *scatter); + else if(const JacobianFactor* jacobian = dynamic_cast(factor.get())) + updateATA(*jacobian, *scatter); else throw invalid_argument("GaussianFactor is neither Hessian nor Jacobian"); } } gttoc(update); - - if (debug) gtsam::print(matrix_, "Ab' * Ab: "); - - assertInvariants(); } /* ************************************************************************* */ -HessianFactor& HessianFactor::operator=(const HessianFactor& rhs) { - this->Base::operator=(rhs); // Copy keys - info_.assignNoalias(rhs.info_); // Copy matrix and block structure - return *this; -} - -/* ************************************************************************* */ -void HessianFactor::print(const std::string& s, const IndexFormatter& formatter) const { +void HessianFactor::print(const std::string& s, const KeyFormatter& formatter) const { cout << s << "\n"; cout << " keys: "; for(const_iterator key=this->begin(); key!=this->end(); ++key) cout << formatter(*key) << "(" << this->getDim(key) << ") "; cout << "\n"; - gtsam::print(Matrix(info_.range(0,info_.nBlocks(), 0,info_.nBlocks()).selfadjointView()), "Ab^T * Ab: "); + gtsam::print(Matrix(info_.range(0,info_.nBlocks(), 0,info_.nBlocks()).selfadjointView()), "Augmented information matrix: "); } /* ************************************************************************* */ @@ -335,6 +323,8 @@ bool HessianFactor::equals(const GaussianFactor& lf, double tol) const { if(!dynamic_cast(&lf)) return false; else { + if(!Factor::equals(lf, tol)) + return false; Matrix thisMatrix = this->info_.full().selfadjointView(); thisMatrix(thisMatrix.rows()-1, thisMatrix.cols()-1) = 0.0; Matrix rhsMatrix = static_cast(lf).info_.full().selfadjointView(); @@ -344,15 +334,29 @@ bool HessianFactor::equals(const GaussianFactor& lf, double tol) const { } /* ************************************************************************* */ -Matrix HessianFactor::augmentedInformation() const { +Matrix HessianFactor::augmentedInformation() const +{ return info_.full().selfadjointView(); } /* ************************************************************************* */ -Matrix HessianFactor::information() const { +Matrix HessianFactor::information() const +{ return info_.range(0, this->size(), 0, this->size()).selfadjointView(); } +/* ************************************************************************* */ +Matrix HessianFactor::augmentedJacobian() const +{ + return JacobianFactor(*this).augmentedJacobian(); +} + +/* ************************************************************************* */ +std::pair HessianFactor::jacobian() const +{ + return JacobianFactor(*this).jacobian(); +} + /* ************************************************************************* */ double HessianFactor::error(const VectorValues& c) const { // error 0.5*(f - 2*x'*g + x'*G*x) @@ -367,194 +371,118 @@ double HessianFactor::error(const VectorValues& c) const { } /* ************************************************************************* */ -void HessianFactor::updateATA(const HessianFactor& update, const Scatter& scatter) { - - // This function updates 'combined' with the information in 'update'. - // 'scatter' maps variables in the update factor to slots in the combined - // factor. - - const bool debug = ISDEBUG("updateATA"); +void HessianFactor::updateATA(const HessianFactor& update, const Scatter& scatter) +{ + // This function updates 'combined' with the information in 'update'. 'scatter' maps variables in + // the update factor to slots in the combined factor. // First build an array of slots gttic(slots); - size_t* slots = (size_t*)alloca(sizeof(size_t)*update.size()); // FIXME: alloca is bad, just ask Google. - size_t slot = 0; - BOOST_FOREACH(Index j, update) { - slots[slot] = scatter.find(j)->second.slot; + //size_t* slots = (size_t*)alloca(sizeof(size_t)*update.size()); // FIXME: alloca is bad, just ask Google. + vector slots(update.size()); + DenseIndex slot = 0; + BOOST_FOREACH(Key j, update) { + slots[slot] = scatter.at(j).slot; ++ slot; } gttoc(slots); - if(debug) { - this->print("Updating this: "); - update.print("with (Hessian): "); - } - // Apply updates to the upper triangle gttic(update); - for(size_t j2=0; j2info_.nBlocks()-1 : slots[j2]; - for(size_t j1=0; j1<=j2; ++j1) { - size_t slot1 = (j1 == update.size()) ? this->info_.nBlocks()-1 : slots[j1]; - if(slot2 > slot1) { - if(debug) - cout << "Updating (" << slot1 << "," << slot2 << ") from (" << j1 << "," << j2 << ")" << endl; - matrix_.block(info_.offset(slot1), info_.offset(slot2), info_(slot1,slot2).rows(), info_(slot1,slot2).cols()).noalias() += - update.matrix_.block(update.info_.offset(j1), update.info_.offset(j2), update.info_(j1,j2).rows(), update.info_(j1,j2).cols()); - } else if(slot1 > slot2) { - if(debug) - cout << "Updating (" << slot2 << "," << slot1 << ") from (" << j1 << "," << j2 << ")" << endl; - matrix_.block(info_.offset(slot2), info_.offset(slot1), info_(slot2,slot1).rows(), info_(slot2,slot1).cols()).noalias() += - update.matrix_.block(update.info_.offset(j1), update.info_.offset(j2), update.info_(j1,j2).rows(), update.info_(j1,j2).cols()).transpose(); - } else { - if(debug) - cout << "Updating (" << slot1 << "," << slot2 << ") from (" << j1 << "," << j2 << ")" << endl; - matrix_.block(info_.offset(slot1), info_.offset(slot2), info_(slot1,slot2).rows(), info_(slot1,slot2).cols()).triangularView() += - update.matrix_.block(update.info_.offset(j1), update.info_.offset(j2), update.info_(j1,j2).rows(), update.info_(j1,j2).cols()); - } - if(debug) cout << "Updating block " << slot1 << "," << slot2 << " from block " << j1 << "," << j2 << "\n"; - if(debug) this->print(); + for(DenseIndex j2=0; j2info_.nBlocks()-1 : slots[j2]; + for(DenseIndex j1=0; j1<=j2; ++j1) { + DenseIndex slot1 = (j1 == (DenseIndex)update.size()) ? this->info_.nBlocks()-1 : slots[j1]; + if(slot2 > slot1) + info_(slot1, slot2).noalias() += update.info_(j1, j2); + else if(slot1 > slot2) + info_(slot2, slot1).noalias() += update.info_(j1, j2).transpose(); + else + info_(slot1, slot2).triangularView() += update.info_(j1, j2); } } gttoc(update); } /* ************************************************************************* */ -void HessianFactor::updateATA(const JacobianFactor& update, const Scatter& scatter) { - - // This function updates 'combined' with the information in 'update'. - // 'scatter' maps variables in the update factor to slots in the combined - // factor. - - const bool debug = ISDEBUG("updateATA"); - - // First build an array of slots - gttic(slots); - size_t* slots = (size_t*)alloca(sizeof(size_t)*update.size()); // FIXME: alloca is bad, just ask Google. - size_t slot = 0; - BOOST_FOREACH(Index j, update) { - slots[slot] = scatter.find(j)->second.slot; - ++ slot; - } - gttoc(slots); - - gttic(form_ATA); - if(update.model_->isConstrained()) - throw invalid_argument("Cannot update HessianFactor from JacobianFactor with constrained noise model"); - - if(debug) { - this->print("Updating this: "); - update.print("with (Jacobian): "); - } - - typedef Eigen::Block BlockUpdateMatrix; - BlockUpdateMatrix updateA(update.matrix_.block( - update.Ab_.rowStart(),update.Ab_.offset(0), update.Ab_.full().rows(), update.Ab_.full().cols())); - if (debug) cout << "updateA: \n" << updateA << endl; - - Matrix updateInform; - if(boost::dynamic_pointer_cast(update.model_)) { - updateInform.noalias() = updateA.transpose() * updateA; - } else { - noiseModel::Diagonal::shared_ptr diagonal(boost::dynamic_pointer_cast(update.model_)); - if(diagonal) { - Vector invsigmas2 = update.model_->invsigmas().cwiseProduct(update.model_->invsigmas()); - updateInform.noalias() = updateA.transpose() * invsigmas2.asDiagonal() * updateA; - } else - throw invalid_argument("In HessianFactor::updateATA, JacobianFactor noise model is neither Unit nor Diagonal"); - } - if (debug) cout << "updateInform: \n" << updateInform << endl; - gttoc(form_ATA); - - // Apply updates to the upper triangle - gttic(update); - for(size_t j2=0; j2info_.nBlocks()-1 : slots[j2]; - for(size_t j1=0; j1<=j2; ++j1) { - size_t slot1 = (j1 == update.size()) ? this->info_.nBlocks()-1 : slots[j1]; - size_t off0 = update.Ab_.offset(0); - if(slot2 > slot1) { - if(debug) - cout << "Updating (" << slot1 << "," << slot2 << ") from (" << j1 << "," << j2 << ")" << endl; - matrix_.block(info_.offset(slot1), info_.offset(slot2), info_(slot1,slot2).rows(), info_(slot1,slot2).cols()).noalias() += - updateInform.block(update.Ab_.offset(j1)-off0, update.Ab_.offset(j2)-off0, update.Ab_(j1).cols(), update.Ab_(j2).cols()); - } else if(slot1 > slot2) { - if(debug) - cout << "Updating (" << slot2 << "," << slot1 << ") from (" << j1 << "," << j2 << ")" << endl; - matrix_.block(info_.offset(slot2), info_.offset(slot1), info_(slot2,slot1).rows(), info_(slot2,slot1).cols()).noalias() += - updateInform.block(update.Ab_.offset(j1)-off0, update.Ab_.offset(j2)-off0, update.Ab_(j1).cols(), update.Ab_(j2).cols()).transpose(); - } else { - if(debug) - cout << "Updating (" << slot1 << "," << slot2 << ") from (" << j1 << "," << j2 << ")" << endl; - matrix_.block(info_.offset(slot1), info_.offset(slot2), info_(slot1,slot2).rows(), info_(slot1,slot2).cols()).triangularView() += - updateInform.block(update.Ab_.offset(j1)-off0, update.Ab_.offset(j2)-off0, update.Ab_(j1).cols(), update.Ab_(j2).cols()); - } - if(debug) cout << "Updating block " << slot1 << "," << slot2 << " from block " << j1 << "," << j2 << "\n"; - if(debug) this->print(); - } - } - gttoc(update); +void HessianFactor::updateATA(const JacobianFactor& update, const Scatter& scatter) +{ + if(update.rows() > 0) // Zero-row Jacobians are treated specially + updateATA(HessianFactor(update), scatter); } /* ************************************************************************* */ -void HessianFactor::partialCholesky(size_t nrFrontals) { - if(!choleskyPartial(matrix_, info_.offset(nrFrontals))) - throw IndeterminantLinearSystemException(this->keys().front()); -} - -/* ************************************************************************* */ -GaussianConditional::shared_ptr HessianFactor::splitEliminatedFactor(size_t nrFrontals) { - - static const bool debug = false; - - // Extract conditionals - gttic(extract_conditionals); - GaussianConditional::shared_ptr conditional(new GaussianConditional()); - typedef VerticalBlockView BlockAb; - BlockAb Ab(matrix_, info_); - - size_t varDim = info_.offset(nrFrontals); - Ab.rowEnd() = Ab.rowStart() + varDim; +GaussianConditional::shared_ptr HessianFactor::splitEliminatedFactor(size_t nrFrontals) +{ + gttic(HessianFactor_splitEliminatedFactor); // Create one big conditionals with many frontal variables. - gttic(construct_cond); - Vector sigmas = Vector::Ones(varDim); - conditional = boost::make_shared(keys_.begin(), keys_.end(), nrFrontals, Ab, sigmas); - gttoc(construct_cond); - if(debug) conditional->print("Extracted conditional: "); - - gttoc(extract_conditionals); + gttic(Construct_conditional); + const size_t varDim = info_.offset(nrFrontals); + VerticalBlockMatrix Ab = VerticalBlockMatrix::LikeActiveViewOf(info_, varDim); + Ab.full() = info_.range(0, nrFrontals, 0, info_.nBlocks()); + GaussianConditional::shared_ptr conditional = boost::make_shared( + keys_, nrFrontals, Ab); + gttoc(Construct_conditional); + gttic(Remaining_factor); // Take lower-right block of Ab_ to get the new factor - gttic(remaining_factor); info_.blockStart() = nrFrontals; // Assign the keys - vector remainingKeys(keys_.size() - nrFrontals); - remainingKeys.assign(keys_.begin() + nrFrontals, keys_.end()); - keys_.swap(remainingKeys); - gttoc(remaining_factor); + keys_.erase(begin(), begin() + nrFrontals); + gttoc(Remaining_factor); return conditional; } /* ************************************************************************* */ -GaussianFactor::shared_ptr HessianFactor::negate() const { - // Copy Hessian Blocks from Hessian factor and invert - std::vector js; - std::vector Gs; - std::vector gs; - double f; - js.insert(js.end(), begin(), end()); - for(size_t i = 0; i < js.size(); ++i){ - for(size_t j = i; j < js.size(); ++j){ - Gs.push_back( -info(begin()+i, begin()+j) ); - } - gs.push_back( -linearTerm(begin()+i) ); - } - f = -constantTerm(); +GaussianFactor::shared_ptr HessianFactor::negate() const +{ + shared_ptr result = boost::make_shared(*this); + result->info_.full() = -result->info_.full(); // Negate the information matrix of the result + return result; +} - // Create the Anti-Hessian Factor from the negated blocks - return HessianFactor::shared_ptr(new HessianFactor(js, Gs, gs, f)); +/* ************************************************************************* */ +std::pair, boost::shared_ptr > + EliminateCholesky(const GaussianFactorGraph& factors, const Ordering& keys) +{ + gttic(EliminateCholesky); + + // Build joint factor + HessianFactor::shared_ptr jointFactor; + try { + jointFactor = boost::make_shared(factors, Scatter(factors, keys)); + } catch(std::invalid_argument&) { + throw InvalidDenseElimination( + "EliminateCholesky was called with a request to eliminate variables that are not\n" + "involved in the provided factors."); + } + + // Do dense elimination + if(!choleskyPartial(jointFactor->info_.matrix(), jointFactor->info_.offset(keys.size()))) + throw IndeterminantLinearSystemException(keys.front()); + + // Split conditional + GaussianConditional::shared_ptr conditional = jointFactor->splitEliminatedFactor(keys.size()); + + return make_pair(conditional, jointFactor); +} + +/* ************************************************************************* */ +std::pair, boost::shared_ptr > + EliminatePreferCholesky(const GaussianFactorGraph& factors, const Ordering& keys) +{ + gttic(EliminatePreferCholesky); + + // If any JacobianFactors have constrained noise models, we have to convert + // all factors to JacobianFactors. Otherwise, we can convert all factors + // to HessianFactors. This is because QR can handle constrained noise + // models but Cholesky cannot. + if (hasConstraints(factors)) + return EliminateQR(factors, keys); + else + return EliminateCholesky(factors, keys); } } // gtsam diff --git a/gtsam/linear/HessianFactor.h b/gtsam/linear/HessianFactor.h index 9f44a0964..e380c5c38 100644 --- a/gtsam/linear/HessianFactor.h +++ b/gtsam/linear/HessianFactor.h @@ -1,377 +1,411 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file HessianFactor.h - * @brief Contains the HessianFactor class, a general quadratic factor - * @author Richard Roberts - * @date Dec 8, 2010 - */ - -#pragma once - -#include -#include -#include - -// Forward declarations for friend unit tests -class HessianFactorConversionConstructorTest; -class HessianFactorConstructor1Test; -class HessianFactorConstructor1bTest; -class HessianFactorcombineTest; - - -namespace gtsam { - - // Forward declarations - class JacobianFactor; - class GaussianConditional; - template class BayesNet; - - // Definition of Scatter, which is an intermediate data structure used when - // building a HessianFactor incrementally, to get the keys in the right - // order. - // The "scatter" is a map from global variable indices to slot indices in the - // union of involved variables. We also include the dimensionality of the - // variable. - struct GTSAM_EXPORT SlotEntry { - size_t slot; - size_t dimension; - SlotEntry(size_t _slot, size_t _dimension) - : slot(_slot), dimension(_dimension) {} - std::string toString() const; - }; - class Scatter : public FastMap { - public: - Scatter() {} - Scatter(const FactorGraph& gfg); - }; - - /** - * @brief A Gaussian factor using the canonical parameters (information form) - * - * HessianFactor implements a general quadratic factor of the form - * \f[ E(x) = 0.5 x^T G x - x^T g + 0.5 f \f] - * that stores the matrix \f$ G \f$, the vector \f$ g \f$, and the constant term \f$ f \f$. - * - * When \f$ G \f$ is positive semidefinite, this factor represents a Gaussian, - * in which case \f$ G \f$ is the information matrix \f$ \Lambda \f$, - * \f$ g \f$ is the information vector \f$ \eta \f$, and \f$ f \f$ is the residual - * sum-square-error at the mean, when \f$ x = \mu \f$. - * - * Indeed, the negative log-likelihood of a Gaussian is (up to a constant) - * @f$ E(x) = 0.5(x-\mu)^T P^{-1} (x-\mu) @f$ - * with @f$ \mu @f$ the mean and @f$ P @f$ the covariance matrix. Expanding the product we get - * @f[ - * E(x) = 0.5 x^T P^{-1} x - x^T P^{-1} \mu + 0.5 \mu^T P^{-1} \mu - * @f] - * We define the Information matrix (or Hessian) @f$ \Lambda = P^{-1} @f$ - * and the information vector @f$ \eta = P^{-1} \mu = \Lambda \mu @f$ - * to arrive at the canonical form of the Gaussian: - * @f[ - * E(x) = 0.5 x^T \Lambda x - x^T \eta + 0.5 \mu^T \Lambda \mu - * @f] - * - * This factor is one of the factors that can be in a GaussianFactorGraph. - * It may be returned from NonlinearFactor::linearize(), but is also - * used internally to store the Hessian during Cholesky elimination. - * - * This can represent a quadratic factor with characteristics that cannot be - * represented using a JacobianFactor (which has the form - * \f$ E(x) = \Vert Ax - b \Vert^2 \f$ and stores the Jacobian \f$ A \f$ - * and error vector \f$ b \f$, i.e. is a sum-of-squares factor). For example, - * a HessianFactor need not be positive semidefinite, it can be indefinite or - * even negative semidefinite. - * - * If a HessianFactor is indefinite or negative semi-definite, then in order - * for solving the linear system to be possible, - * the Hessian of the full system must be positive definite (i.e. when all - * small Hessians are combined, the result must be positive definite). If - * this is not the case, an error will occur during elimination. - * - * This class stores G, g, and f as an augmented matrix HessianFactor::matrix_. - * The upper-left n x n blocks of HessianFactor::matrix_ store the upper-right - * triangle of G, the upper-right-most column of length n of HessianFactor::matrix_ - * stores g, and the lower-right entry of HessianFactor::matrix_ stores f, i.e. - * \code - HessianFactor::matrix_ = [ G11 G12 G13 ... g1 - 0 G22 G23 ... g2 - 0 0 G33 ... g3 - : : : : - 0 0 0 ... f ] - \endcode - Blocks can be accessed as follows: - \code - G11 = info(begin(), begin()); - G12 = info(begin(), begin()+1); - G23 = info(begin()+1, begin()+2); - g2 = linearTerm(begin()+1); - f = constantTerm(); - ....... - \endcode - */ - class GTSAM_EXPORT HessianFactor : public GaussianFactor { - protected: - typedef Matrix InfoMatrix; ///< The full augmented Hessian - typedef SymmetricBlockView BlockInfo; ///< A blockwise view of the Hessian - typedef GaussianFactor Base; ///< Typedef to base class - - InfoMatrix matrix_; ///< The full augmented information matrix, s.t. the quadratic error is 0.5*[x -1]'*H*[x -1] - BlockInfo info_; ///< The block view of the full information matrix. - - public: - - typedef boost::shared_ptr shared_ptr; ///< A shared_ptr to this - typedef BlockInfo::Block Block; ///< A block from the Hessian matrix - typedef BlockInfo::constBlock constBlock; ///< A block from the Hessian matrix (const version) - typedef BlockInfo::Column Column; ///< A column containing the linear term h - typedef BlockInfo::constColumn constColumn; ///< A column containing the linear term h (const version) - - /** Copy constructor */ - HessianFactor(const HessianFactor& gf); - - /** default constructor for I/O */ - HessianFactor(); - - /** Construct a unary factor. G is the quadratic term (Hessian matrix), g - * the linear term (a vector), and f the constant term. The quadratic - * error is: - * 0.5*(f - 2*x'*g + x'*G*x) - */ - HessianFactor(Index j, const Matrix& G, const Vector& g, double f); - - /** Construct a unary factor, given a mean and covariance matrix. - * error is 0.5*(x-mu)'*inv(Sigma)*(x-mu) - */ - HessianFactor(Index j, const Vector& mu, const Matrix& Sigma); - - /** Construct a binary factor. Gxx are the upper-triangle blocks of the - * quadratic term (the Hessian matrix), gx the pieces of the linear vector - * term, and f the constant term. - * JacobianFactor error is \f[ 0.5* (Ax-b)' M (Ax-b) = 0.5*x'A'MAx - x'A'Mb + 0.5*b'Mb \f] - * HessianFactor error is \f[ 0.5*(x'Gx - 2x'g + f) = 0.5*x'Gx - x'*g + 0.5*f \f] - * So, with \f$ A = [A1 A2] \f$ and \f$ G=A*'M*A = [A1';A2']*M*[A1 A2] \f$ we have - \code - n1*n1 G11 = A1'*M*A1 - n1*n2 G12 = A1'*M*A2 - n2*n2 G22 = A2'*M*A2 - n1*1 g1 = A1'*M*b - n2*1 g2 = A2'*M*b - 1*1 f = b'*M*b - \endcode - */ - HessianFactor(Index j1, Index j2, - const Matrix& G11, const Matrix& G12, const Vector& g1, - const Matrix& G22, const Vector& g2, double f); - - /** Construct a ternary factor. Gxx are the upper-triangle blocks of the - * quadratic term (the Hessian matrix), gx the pieces of the linear vector - * term, and f the constant term. - */ - HessianFactor(Index j1, Index j2, Index j3, - const Matrix& G11, const Matrix& G12, const Matrix& G13, const Vector& g1, - const Matrix& G22, const Matrix& G23, const Vector& g2, - const Matrix& G33, const Vector& g3, double f); - - /** Construct an n-way factor. Gs contains the upper-triangle blocks of the - * quadratic term (the Hessian matrix) provided in row-order, gs the pieces - * of the linear vector term, and f the constant term. - */ - HessianFactor(const std::vector& js, const std::vector& Gs, - const std::vector& gs, double f); - - /** Construct from Conditional Gaussian */ - explicit HessianFactor(const GaussianConditional& cg); - - /** Convert from a JacobianFactor (computes A^T * A) or HessianFactor */ - explicit HessianFactor(const GaussianFactor& factor); - - /** Combine a set of factors into a single dense HessianFactor */ - HessianFactor(const FactorGraph& factors); - - /** Destructor */ - virtual ~HessianFactor() {} - - /** Aassignment operator */ - HessianFactor& operator=(const HessianFactor& rhs); - - /** Clone this JacobianFactor */ - virtual GaussianFactor::shared_ptr clone() const { - return boost::static_pointer_cast( - shared_ptr(new HessianFactor(*this))); - } - - /** Print the factor for debugging and testing (implementing Testable) */ - virtual void print(const std::string& s = "", - const IndexFormatter& formatter = DefaultIndexFormatter) const; - - /** Compare to another factor for testing (implementing Testable) */ - virtual bool equals(const GaussianFactor& lf, double tol = 1e-9) const; - - /** Evaluate the factor error f(x), see above. */ - virtual double error(const VectorValues& c) const; /** 0.5*[x -1]'*H*[x -1] (also see constructor documentation) */ - - /** Return the dimension of the variable pointed to by the given key iterator - * todo: Remove this in favor of keeping track of dimensions with variables? - * @param variable An iterator pointing to the slot in this factor. You can - * use, for example, begin() + 2 to get the 3rd variable in this factor. - */ - virtual size_t getDim(const_iterator variable) const { return info_(variable-this->begin(), 0).rows(); } - - /** Return the number of columns and rows of the Hessian matrix */ - size_t rows() const { return info_.rows(); } - - /** - * Construct the corresponding anti-factor to negate information - * stored stored in this factor. - * @return a HessianFactor with negated Hessian matrices - */ - virtual GaussianFactor::shared_ptr negate() const; - - /** Return a view of the block at (j1,j2) of the upper-triangular part of the - * information matrix \f$ H \f$, no data is copied. See HessianFactor class documentation - * above to explain that only the upper-triangular part of the information matrix is stored - * and returned by this function. - * @param j1 Which block row to get, as an iterator pointing to the slot in this factor. You can - * use, for example, begin() + 2 to get the 3rd variable in this factor. - * @param j2 Which block column to get, as an iterator pointing to the slot in this factor. You can - * use, for example, begin() + 2 to get the 3rd variable in this factor. - * @return A view of the requested block, not a copy. - */ - constBlock info(const_iterator j1, const_iterator j2) const { return info_(j1-begin(), j2-begin()); } - - /** Return a view of the block at (j1,j2) of the upper-triangular part of the - * information matrix \f$ H \f$, no data is copied. See HessianFactor class documentation - * above to explain that only the upper-triangular part of the information matrix is stored - * and returned by this function. - * @param j1 Which block row to get, as an iterator pointing to the slot in this factor. You can - * use, for example, begin() + 2 to get the 3rd variable in this factor. - * @param j2 Which block column to get, as an iterator pointing to the slot in this factor. You can - * use, for example, begin() + 2 to get the 3rd variable in this factor. - * @return A view of the requested block, not a copy. - */ - Block info(iterator j1, iterator j2) { return info_(j1-begin(), j2-begin()); } - - /** Return the upper-triangular part of the full *augmented* information matrix, - * as described above. See HessianFactor class documentation above to explain that only the - * upper-triangular part of the information matrix is stored and returned by this function. - */ - constBlock info() const { return info_.full(); } - - /** Return the upper-triangular part of the full *augmented* information matrix, - * as described above. See HessianFactor class documentation above to explain that only the - * upper-triangular part of the information matrix is stored and returned by this function. - */ - Block info() { return info_.full(); } - - /** Return the constant term \f$ f \f$ as described above - * @return The constant term \f$ f \f$ - */ - double constantTerm() const { return info_(this->size(), this->size())(0,0); } - - /** Return the constant term \f$ f \f$ as described above - * @return The constant term \f$ f \f$ - */ - double& constantTerm() { return info_(this->size(), this->size())(0,0); } - - /** Return the part of linear term \f$ g \f$ as described above corresponding to the requested variable. - * @param j Which block row to get, as an iterator pointing to the slot in this factor. You can - * use, for example, begin() + 2 to get the 3rd variable in this factor. - * @return The linear term \f$ g \f$ */ - constColumn linearTerm(const_iterator j) const { return info_.column(j-begin(), size(), 0); } - - /** Return the part of linear term \f$ g \f$ as described above corresponding to the requested variable. - * @param j Which block row to get, as an iterator pointing to the slot in this factor. You can - * use, for example, begin() + 2 to get the 3rd variable in this factor. - * @return The linear term \f$ g \f$ */ - Column linearTerm(iterator j) { return info_.column(j-begin(), size(), 0); } - - /** Return the complete linear term \f$ g \f$ as described above. - * @return The linear term \f$ g \f$ */ - constColumn linearTerm() const { return info_.rangeColumn(0, this->size(), this->size(), 0); } - - /** Return the complete linear term \f$ g \f$ as described above. - * @return The linear term \f$ g \f$ */ - Column linearTerm() { return info_.rangeColumn(0, this->size(), this->size(), 0); } - - /** Return the augmented information matrix represented by this GaussianFactor. - * The augmented information matrix contains the information matrix with an - * additional column holding the information vector, and an additional row - * holding the transpose of the information vector. The lower-right entry - * contains the constant error term (when \f$ \delta x = 0 \f$). The - * augmented information matrix is described in more detail in HessianFactor, - * which in fact stores an augmented information matrix. - * - * For HessianFactor, this is the same as info() except that this function - * returns a complete symmetric matrix whereas info() returns a matrix where - * only the upper triangle is valid, but should be interpreted as symmetric. - * This is because info() returns only a reference to the internal - * representation of the augmented information matrix, which stores only the - * upper triangle. - */ - virtual Matrix augmentedInformation() const; - - /** Return the non-augmented information matrix represented by this - * GaussianFactor. - */ - virtual Matrix information() const; - - // Friend unit test classes - friend class ::HessianFactorConversionConstructorTest; - friend class ::HessianFactorConstructor1Test; - friend class ::HessianFactorConstructor1bTest; - friend class ::HessianFactorcombineTest; - - // Friend JacobianFactor for conversion - friend class JacobianFactor; - - // used in eliminateCholesky: - - /** - * Do Cholesky. Note that after this, the lower triangle still contains - * some untouched non-zeros that should be zero. We zero them while - * extracting submatrices in splitEliminatedFactor. Frank says :-( - */ - void partialCholesky(size_t nrFrontals); - - /** split partially eliminated factor */ - boost::shared_ptr splitEliminatedFactor(size_t nrFrontals); - - /** Update the factor by adding the information from the JacobianFactor - * (used internally during elimination). - * @param update The JacobianFactor containing the new information to add - * @param scatter A mapping from variable index to slot index in this HessianFactor - */ - void updateATA(const JacobianFactor& update, const Scatter& scatter); - - /** Update the factor by adding the information from the HessianFactor - * (used internally during elimination). - * @param update The HessianFactor containing the new information to add - * @param scatter A mapping from variable index to slot index in this HessianFactor - */ - void updateATA(const HessianFactor& update, const Scatter& scatter); - - /** assert invariants */ - void assertInvariants() const; - - private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(GaussianFactor); - ar & BOOST_SERIALIZATION_NVP(info_); - ar & BOOST_SERIALIZATION_NVP(matrix_); - } - }; - -} - +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file HessianFactor.h + * @brief Contains the HessianFactor class, a general quadratic factor + * @author Richard Roberts + * @date Dec 8, 2010 + */ + +#pragma once + +#include +#include + +#include +#include + +namespace gtsam { + + // Forward declarations + class Ordering; + class JacobianFactor; + class HessianFactor; + class GaussianConditional; + class GaussianBayesNet; + class GaussianFactorGraph; + + GTSAM_EXPORT std::pair, boost::shared_ptr > + EliminatePreferCholesky(const GaussianFactorGraph& factors, const Ordering& keys); + + GTSAM_EXPORT std::pair, boost::shared_ptr > + EliminateCholesky(const GaussianFactorGraph& factors, const Ordering& keys); + + // Definition of Scatter, which is an intermediate data structure used when building a + // HessianFactor incrementally, to get the keys in the right order. The "scatter" is a map from + // global variable indices to slot indices in the union of involved variables. We also include + // the dimensionality of the variable. + struct GTSAM_EXPORT SlotEntry { + size_t slot; + size_t dimension; + SlotEntry(size_t _slot, size_t _dimension) + : slot(_slot), dimension(_dimension) {} + std::string toString() const; + }; + class Scatter : public FastMap { + public: + Scatter() {} + Scatter(const GaussianFactorGraph& gfg, boost::optional ordering = boost::none); + }; + + /** + * @brief A Gaussian factor using the canonical parameters (information form) + * + * HessianFactor implements a general quadratic factor of the form + * \f[ E(x) = 0.5 x^T G x - x^T g + 0.5 f \f] + * that stores the matrix \f$ G \f$, the vector \f$ g \f$, and the constant term \f$ f \f$. + * + * When \f$ G \f$ is positive semidefinite, this factor represents a Gaussian, + * in which case \f$ G \f$ is the information matrix \f$ \Lambda \f$, + * \f$ g \f$ is the information vector \f$ \eta \f$, and \f$ f \f$ is the residual + * sum-square-error at the mean, when \f$ x = \mu \f$. + * + * Indeed, the negative log-likelihood of a Gaussian is (up to a constant) + * @f$ E(x) = 0.5(x-\mu)^T P^{-1} (x-\mu) @f$ + * with @f$ \mu @f$ the mean and @f$ P @f$ the covariance matrix. Expanding the product we get + * @f[ + * E(x) = 0.5 x^T P^{-1} x - x^T P^{-1} \mu + 0.5 \mu^T P^{-1} \mu + * @f] + * We define the Information matrix (or Hessian) @f$ \Lambda = P^{-1} @f$ + * and the information vector @f$ \eta = P^{-1} \mu = \Lambda \mu @f$ + * to arrive at the canonical form of the Gaussian: + * @f[ + * E(x) = 0.5 x^T \Lambda x - x^T \eta + 0.5 \mu^T \Lambda \mu + * @f] + * + * This factor is one of the factors that can be in a GaussianFactorGraph. + * It may be returned from NonlinearFactor::linearize(), but is also + * used internally to store the Hessian during Cholesky elimination. + * + * This can represent a quadratic factor with characteristics that cannot be + * represented using a JacobianFactor (which has the form + * \f$ E(x) = \Vert Ax - b \Vert^2 \f$ and stores the Jacobian \f$ A \f$ + * and error vector \f$ b \f$, i.e. is a sum-of-squares factor). For example, + * a HessianFactor need not be positive semidefinite, it can be indefinite or + * even negative semidefinite. + * + * If a HessianFactor is indefinite or negative semi-definite, then in order + * for solving the linear system to be possible, + * the Hessian of the full system must be positive definite (i.e. when all + * small Hessians are combined, the result must be positive definite). If + * this is not the case, an error will occur during elimination. + * + * This class stores G, g, and f as an augmented matrix HessianFactor::matrix_. + * The upper-left n x n blocks of HessianFactor::matrix_ store the upper-right + * triangle of G, the upper-right-most column of length n of HessianFactor::matrix_ + * stores g, and the lower-right entry of HessianFactor::matrix_ stores f, i.e. + * \code + HessianFactor::matrix_ = [ G11 G12 G13 ... g1 + 0 G22 G23 ... g2 + 0 0 G33 ... g3 + : : : : + 0 0 0 ... f ] + \endcode + Blocks can be accessed as follows: + \code + G11 = info(begin(), begin()); + G12 = info(begin(), begin()+1); + G23 = info(begin()+1, begin()+2); + g2 = linearTerm(begin()+1); + f = constantTerm(); + ....... + \endcode + */ + class GTSAM_EXPORT HessianFactor : public GaussianFactor { + protected: + + SymmetricBlockMatrix info_; ///< The full augmented information matrix, s.t. the quadratic error is 0.5*[x -1]'*H*[x -1] + + public: + + typedef GaussianFactor Base; ///< Typedef to base class + typedef HessianFactor This; ///< Typedef to this class + typedef boost::shared_ptr shared_ptr; ///< A shared_ptr to this class + typedef SymmetricBlockMatrix::Block Block; ///< A block from the Hessian matrix + typedef SymmetricBlockMatrix::constBlock constBlock; ///< A block from the Hessian matrix (const version) + + /** default constructor for I/O */ + HessianFactor(); + + /** Construct a unary factor. G is the quadratic term (Hessian matrix), g + * the linear term (a vector), and f the constant term. The quadratic + * error is: + * 0.5*(f - 2*x'*g + x'*G*x) + */ + HessianFactor(Index j, const Matrix& G, const Vector& g, double f); + + /** Construct a unary factor, given a mean and covariance matrix. + * error is 0.5*(x-mu)'*inv(Sigma)*(x-mu) + */ + HessianFactor(Index j, const Vector& mu, const Matrix& Sigma); + + /** Construct a binary factor. Gxx are the upper-triangle blocks of the + * quadratic term (the Hessian matrix), gx the pieces of the linear vector + * term, and f the constant term. + * JacobianFactor error is \f[ 0.5* (Ax-b)' M (Ax-b) = 0.5*x'A'MAx - x'A'Mb + 0.5*b'Mb \f] + * HessianFactor error is \f[ 0.5*(x'Gx - 2x'g + f) = 0.5*x'Gx - x'*g + 0.5*f \f] + * So, with \f$ A = [A1 A2] \f$ and \f$ G=A*'M*A = [A1';A2']*M*[A1 A2] \f$ we have + \code + n1*n1 G11 = A1'*M*A1 + n1*n2 G12 = A1'*M*A2 + n2*n2 G22 = A2'*M*A2 + n1*1 g1 = A1'*M*b + n2*1 g2 = A2'*M*b + 1*1 f = b'*M*b + \endcode + */ + HessianFactor(Index j1, Index j2, + const Matrix& G11, const Matrix& G12, const Vector& g1, + const Matrix& G22, const Vector& g2, double f); + + /** Construct a ternary factor. Gxx are the upper-triangle blocks of the + * quadratic term (the Hessian matrix), gx the pieces of the linear vector + * term, and f the constant term. + */ + HessianFactor(Index j1, Index j2, Index j3, + const Matrix& G11, const Matrix& G12, const Matrix& G13, const Vector& g1, + const Matrix& G22, const Matrix& G23, const Vector& g2, + const Matrix& G33, const Vector& g3, double f); + + /** Construct an n-way factor. Gs contains the upper-triangle blocks of the + * quadratic term (the Hessian matrix) provided in row-order, gs the pieces + * of the linear vector term, and f the constant term. + */ + HessianFactor(const std::vector& js, const std::vector& Gs, + const std::vector& gs, double f); + + /** Constructor with an arbitrary number of keys and with the augmented information matrix + * specified as a block matrix. */ + template + HessianFactor(const KEYS& keys, const SymmetricBlockMatrix& augmentedInformation); + + /** Construct from a JacobianFactor (or from a GaussianConditional since it derives from it) */ + explicit HessianFactor(const JacobianFactor& cg); + + /** Attempt to construct from any GaussianFactor - currently supports JacobianFactor, + * HessianFactor, GaussianConditional, or any derived classes. */ + explicit HessianFactor(const GaussianFactor& factor); + + /** Combine a set of factors into a single dense HessianFactor */ + explicit HessianFactor(const GaussianFactorGraph& factors, + boost::optional scatter = boost::none); + + /** Destructor */ + virtual ~HessianFactor() {} + + /** Clone this HessianFactor */ + virtual GaussianFactor::shared_ptr clone() const { + return boost::make_shared(*this); } + + /** Print the factor for debugging and testing (implementing Testable) */ + virtual void print(const std::string& s = "", + const KeyFormatter& formatter = DefaultKeyFormatter) const; + + /** Compare to another factor for testing (implementing Testable) */ + virtual bool equals(const GaussianFactor& lf, double tol = 1e-9) const; + + /** Evaluate the factor error f(x), see above. */ + virtual double error(const VectorValues& c) const; /** 0.5*[x -1]'*H*[x -1] (also see constructor documentation) */ + + /** Return the dimension of the variable pointed to by the given key iterator + * todo: Remove this in favor of keeping track of dimensions with variables? + * @param variable An iterator pointing to the slot in this factor. You can + * use, for example, begin() + 2 to get the 3rd variable in this factor. + */ + virtual DenseIndex getDim(const_iterator variable) const { return info_(variable-this->begin(), 0).rows(); } + + /** Return the number of columns and rows of the Hessian matrix, including the information vector. */ + size_t rows() const { return info_.rows(); } + + /** + * Construct the corresponding anti-factor to negate information + * stored stored in this factor. + * @return a HessianFactor with negated Hessian matrices + */ + virtual GaussianFactor::shared_ptr negate() const; + + /** Check if the factor is empty. TODO: How should this be defined? */ + virtual bool empty() const { return size() == 0 /*|| rows() == 0*/; } + + /** Return a view of the block at (j1,j2) of the upper-triangular part of the + * information matrix \f$ H \f$, no data is copied. See HessianFactor class documentation + * above to explain that only the upper-triangular part of the information matrix is stored + * and returned by this function. + * @param j1 Which block row to get, as an iterator pointing to the slot in this factor. You can + * use, for example, begin() + 2 to get the 3rd variable in this factor. + * @param j2 Which block column to get, as an iterator pointing to the slot in this factor. You can + * use, for example, begin() + 2 to get the 3rd variable in this factor. + * @return A view of the requested block, not a copy. + */ + constBlock info(const_iterator j1, const_iterator j2) const { return info_(j1-begin(), j2-begin()); } + + /** Return a view of the block at (j1,j2) of the upper-triangular part of the + * information matrix \f$ H \f$, no data is copied. See HessianFactor class documentation + * above to explain that only the upper-triangular part of the information matrix is stored + * and returned by this function. + * @param j1 Which block row to get, as an iterator pointing to the slot in this factor. You can + * use, for example, begin() + 2 to get the 3rd variable in this factor. + * @param j2 Which block column to get, as an iterator pointing to the slot in this factor. You can + * use, for example, begin() + 2 to get the 3rd variable in this factor. + * @return A view of the requested block, not a copy. + */ + Block info(iterator j1, iterator j2) { return info_(j1-begin(), j2-begin()); } + + /** Return the upper-triangular part of the full *augmented* information matrix, + * as described above. See HessianFactor class documentation above to explain that only the + * upper-triangular part of the information matrix is stored and returned by this function. + */ + constBlock info() const { return info_.full(); } + + /** Return the upper-triangular part of the full *augmented* information matrix, + * as described above. See HessianFactor class documentation above to explain that only the + * upper-triangular part of the information matrix is stored and returned by this function. + */ + Block info() { return info_.full(); } + + /** Return the constant term \f$ f \f$ as described above + * @return The constant term \f$ f \f$ + */ + double constantTerm() const { return info_(this->size(), this->size())(0,0); } + + /** Return the constant term \f$ f \f$ as described above + * @return The constant term \f$ f \f$ + */ + double& constantTerm() { return info_(this->size(), this->size())(0,0); } + + /** Return the part of linear term \f$ g \f$ as described above corresponding to the requested variable. + * @param j Which block row to get, as an iterator pointing to the slot in this factor. You can + * use, for example, begin() + 2 to get the 3rd variable in this factor. + * @return The linear term \f$ g \f$ */ + constBlock::ColXpr linearTerm(const_iterator j) const { return info_(j-begin(), size()).col(0); } + + /** Return the part of linear term \f$ g \f$ as described above corresponding to the requested variable. + * @param j Which block row to get, as an iterator pointing to the slot in this factor. You can + * use, for example, begin() + 2 to get the 3rd variable in this factor. + * @return The linear term \f$ g \f$ */ + Block::ColXpr linearTerm(iterator j) { return info_(j-begin(), size()).col(0); } + + /** Return the complete linear term \f$ g \f$ as described above. + * @return The linear term \f$ g \f$ */ + constBlock::ColXpr linearTerm() const { return info_.range(0, this->size(), this->size(), this->size() + 1).col(0); } + + /** Return the complete linear term \f$ g \f$ as described above. + * @return The linear term \f$ g \f$ */ + Block::ColXpr linearTerm() { return info_.range(0, this->size(), this->size(), this->size() + 1).col(0); } + + /** Return the augmented information matrix represented by this GaussianFactor. + * The augmented information matrix contains the information matrix with an + * additional column holding the information vector, and an additional row + * holding the transpose of the information vector. The lower-right entry + * contains the constant error term (when \f$ \delta x = 0 \f$). The + * augmented information matrix is described in more detail in HessianFactor, + * which in fact stores an augmented information matrix. + * + * For HessianFactor, this is the same as info() except that this function + * returns a complete symmetric matrix whereas info() returns a matrix where + * only the upper triangle is valid, but should be interpreted as symmetric. + * This is because info() returns only a reference to the internal + * representation of the augmented information matrix, which stores only the + * upper triangle. + */ + virtual Matrix augmentedInformation() const; + + /** Return the non-augmented information matrix represented by this + * GaussianFactor. + */ + virtual Matrix information() const; + + /** + * Return (dense) matrix associated with factor + * @param ordering of variables needed for matrix column order + * @param set weight to true to bake in the weights + */ + virtual std::pair jacobian() const; + + /** + * Return (dense) matrix associated with factor + * The returned system is an augmented matrix: [A b] + * @param set weight to use whitening to bake in weights + */ + virtual Matrix augmentedJacobian() const; + + /** Return the full augmented Hessian matrix of this factor as a SymmetricBlockMatrix object. */ + const SymmetricBlockMatrix& matrixObject() const { return info_; } + + /** Update the factor by adding the information from the JacobianFactor + * (used internally during elimination). + * @param update The JacobianFactor containing the new information to add + * @param scatter A mapping from variable index to slot index in this HessianFactor + */ + void updateATA(const JacobianFactor& update, const Scatter& scatter); + + /** Update the factor by adding the information from the HessianFactor + * (used internally during elimination). + * @param update The HessianFactor containing the new information to add + * @param scatter A mapping from variable index to slot index in this HessianFactor + */ + void updateATA(const HessianFactor& update, const Scatter& scatter); + + /** + * Densely partially eliminate with Cholesky factorization. JacobianFactors are + * left-multiplied with their transpose to form the Hessian using the conversion constructor + * HessianFactor(const JacobianFactor&). + * + * If any factors contain constrained noise models, this function will fail because our current + * implementation cannot handle constrained noise models in Cholesky factorization. The + * function EliminatePreferCholesky() automatically does QR instead when this is the case. + * + * Variables are eliminated in the order specified in \c keys. + * + * @param factors Factors to combine and eliminate + * @param keys The variables to eliminate and their elimination ordering + * @return The conditional and remaining factor + * + * \addtogroup LinearSolving */ + friend GTSAM_EXPORT std::pair, boost::shared_ptr > + EliminateCholesky(const GaussianFactorGraph& factors, const Ordering& keys); + + /** + * Densely partially eliminate with Cholesky factorization. JacobianFactors are + * left-multiplied with their transpose to form the Hessian using the conversion constructor + * HessianFactor(const JacobianFactor&). + * + * This function will fall back on QR factorization for any cliques containing JacobianFactor's + * with constrained noise models. + * + * Variables are eliminated in the order specified in \c keys. + * + * @param factors Factors to combine and eliminate + * @param keys The variables to eliminate and their elimination ordering + * @return The conditional and remaining factor + * + * \addtogroup LinearSolving */ + friend GTSAM_EXPORT std::pair, boost::shared_ptr > + EliminatePreferCholesky(const GaussianFactorGraph& factors, const Ordering& keys); + + private: + + /** split partially eliminated factor */ + boost::shared_ptr splitEliminatedFactor(size_t nrFrontals); + + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(GaussianFactor); + ar & BOOST_SERIALIZATION_NVP(info_); + } + }; + +} + +#include diff --git a/gtsam/linear/IterativeSolver.h b/gtsam/linear/IterativeSolver.h index 38515952e..988293a4f 100644 --- a/gtsam/linear/IterativeSolver.h +++ b/gtsam/linear/IterativeSolver.h @@ -11,12 +11,17 @@ #pragma once -#include -#include +#include + #include +#include namespace gtsam { + // Forward declarations + class VectorValues; + class GaussianFactorGraph; + /** * parameters for iterative linear solvers */ @@ -73,7 +78,7 @@ namespace gtsam { virtual VectorValues optimize (const VectorValues &initial) = 0; /* update interface to the nonlinear optimizer */ - virtual void replaceFactors(const GaussianFactorGraph::shared_ptr &factorGraph, const double lambda) {} + virtual void replaceFactors(const boost::shared_ptr &factorGraph, const double lambda) {} }; } diff --git a/gtsam/linear/JacobianFactor-inl.h b/gtsam/linear/JacobianFactor-inl.h new file mode 100644 index 000000000..d703cde9f --- /dev/null +++ b/gtsam/linear/JacobianFactor-inl.h @@ -0,0 +1,110 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file JacobianFactor.h + * @author Richard Roberts + * @author Christian Potthast + * @author Frank Dellaert + * @date Dec 8, 2010 + */ +#pragma once + +#include +#include +#include +#include + +namespace gtsam { + + /* ************************************************************************* */ + template + JacobianFactor::JacobianFactor(const TERMS&terms, const Vector &b, const SharedDiagonal& model) + { + fillTerms(terms, b, model); + } + + /* ************************************************************************* */ + template + JacobianFactor::JacobianFactor( + const KEYS& keys, const VerticalBlockMatrix& augmentedMatrix, const SharedDiagonal& model) : + Base(keys), Ab_(augmentedMatrix) + { + // Check noise model dimension + if(model && (DenseIndex)model->dim() != augmentedMatrix.rows()) + throw InvalidNoiseModel(augmentedMatrix.rows(), model->dim()); + + // Check number of variables + if((DenseIndex)Base::keys_.size() != augmentedMatrix.nBlocks() - 1) + throw std::invalid_argument( + "Error in JacobianFactor constructor input. Number of provided keys plus\n" + "one for the RHS vector must equal the number of provided matrix blocks."); + + // Check RHS dimension + if(augmentedMatrix(augmentedMatrix.nBlocks() - 1).cols() != 1) + throw std::invalid_argument( + "Error in JacobianFactor constructor input. The last provided matrix block\n" + "must be the RHS vector, but the last provided block had more than one column."); + + // Take noise model + model_ = model; + } + + /* ************************************************************************* */ + namespace { + DenseIndex _getColsJF(const std::pair& p) { + return p.second.cols(); + } + } + + /* ************************************************************************* */ + template + void JacobianFactor::fillTerms(const TERMS& terms, const Vector& b, const SharedDiagonal& noiseModel) + { + // Check noise model dimension + if(noiseModel && (DenseIndex)noiseModel->dim() != b.size()) + throw InvalidNoiseModel(b.size(), noiseModel->dim()); + + // Resize base class key vector + Base::keys_.resize(terms.size()); + + // Gather dimensions - uses boost range adaptors to take terms, extract .second which are the + // matrices, then extract the number of columns e.g. dimensions in each matrix. Then joins with + // a single '1' to add a dimension for the b vector. + { + using boost::adaptors::transformed; + namespace br = boost::range; + Ab_ = VerticalBlockMatrix(br::join(terms | transformed(&_getColsJF), ListOfOne((DenseIndex)1)), b.size()); + } + + // Check and add terms + typedef std::pair Term; + DenseIndex i = 0; // For block index + BOOST_FOREACH(const Term& term, terms) { + // Check block rows + if(term.second.rows() != b.size()) + throw InvalidMatrixBlock(b.size(), term.second.rows()); + // Assign key and matrix + Base::keys_[i] = term.first; + Ab_(i) = term.second; + // Increment block index + ++ i; + } + + // Assign RHS vector + getb() = b; + + // Assign noise model + model_ = noiseModel; + } + +} // gtsam + diff --git a/gtsam/linear/JacobianFactor.cpp b/gtsam/linear/JacobianFactor.cpp index 5de446d25..be2636a16 100644 --- a/gtsam/linear/JacobianFactor.cpp +++ b/gtsam/linear/JacobianFactor.cpp @@ -1,528 +1,594 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file JacobianFactor.cpp - * @author Richard Roberts - * @date Dec 8, 2010 - */ - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wunused-variable" -#endif -#include -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif - -#include -#include -#include - -using namespace std; - -namespace gtsam { - - /* ************************************************************************* */ - void JacobianFactor::assertInvariants() const { -#ifndef NDEBUG - GaussianFactor::assertInvariants(); // The base class checks for unique keys - assert((size() == 0 && Ab_.rows() == 0 && Ab_.nBlocks() == 0) || size()+1 == Ab_.nBlocks()); -#endif - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(const JacobianFactor& gf) : - GaussianFactor(gf), model_(gf.model_), Ab_(matrix_) { - Ab_.assignNoalias(gf.Ab_); - assertInvariants(); - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(const GaussianFactor& gf) : Ab_(matrix_) { - // Copy the matrix data depending on what type of factor we're copying from - if(const JacobianFactor* rhs = dynamic_cast(&gf)) - *this = JacobianFactor(*rhs); - else if(const HessianFactor* rhs = dynamic_cast(&gf)) - *this = JacobianFactor(*rhs); - else - throw std::invalid_argument("In JacobianFactor(const GaussianFactor& rhs), rhs is neither a JacobianFactor nor a HessianFactor"); - assertInvariants(); - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor() : Ab_(matrix_) { assertInvariants(); } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(const Vector& b_in) : Ab_(matrix_) { - size_t dims[] = { 1 }; - Ab_.copyStructureFrom(BlockAb(matrix_, dims, dims+1, b_in.size())); - getb() = b_in; - model_ = noiseModel::Unit::Create(this->rows()); - assertInvariants(); - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(Index i1, const Matrix& A1, - const Vector& b, const SharedDiagonal& model) : - GaussianFactor(i1), model_(model), Ab_(matrix_) { - - if(model->dim() != (size_t) b.size()) - throw InvalidNoiseModel(b.size(), model->dim()); - - size_t dims[] = { A1.cols(), 1}; - Ab_.copyStructureFrom(BlockAb(matrix_, dims, dims+2, b.size())); - Ab_(0) = A1; - getb() = b; - assertInvariants(); - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(Index i1, const Matrix& A1, Index i2, const Matrix& A2, - const Vector& b, const SharedDiagonal& model) : - GaussianFactor(i1,i2), model_(model), Ab_(matrix_) { - - if(model->dim() != (size_t) b.size()) - throw InvalidNoiseModel(b.size(), model->dim()); - - size_t dims[] = { A1.cols(), A2.cols(), 1}; - Ab_.copyStructureFrom(BlockAb(matrix_, dims, dims+3, b.size())); - Ab_(0) = A1; - Ab_(1) = A2; - getb() = b; - assertInvariants(); - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(Index i1, const Matrix& A1, Index i2, const Matrix& A2, - Index i3, const Matrix& A3, const Vector& b, const SharedDiagonal& model) : - GaussianFactor(i1,i2,i3), model_(model), Ab_(matrix_) { - - if(model->dim() != (size_t) b.size()) - throw InvalidNoiseModel(b.size(), model->dim()); - - size_t dims[] = { A1.cols(), A2.cols(), A3.cols(), 1}; - Ab_.copyStructureFrom(BlockAb(matrix_, dims, dims+4, b.size())); - Ab_(0) = A1; - Ab_(1) = A2; - Ab_(2) = A3; - getb() = b; - assertInvariants(); - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(const std::vector > &terms, - const Vector &b, const SharedDiagonal& model) : - GaussianFactor(GetKeys(terms.size(), terms.begin(), terms.end())), - model_(model), Ab_(matrix_) - { - // get number of measurements and variables involved in this factor - size_t m = b.size(), n = terms.size(); - - if(model->dim() != (size_t) m) - throw InvalidNoiseModel(b.size(), model->dim()); - - // Get the dimensions of each variable and copy to "dims" array, add 1 for RHS - size_t* dims = (size_t*)alloca(sizeof(size_t)*(n+1)); // FIXME: alloca is bad, just ask Google. - for(size_t j=0; j > &terms, - const Vector &b, const SharedDiagonal& model) : - GaussianFactor(GetKeys(terms.size(), terms.begin(), terms.end())), - model_(model), Ab_(matrix_) - { - - if(model->dim() != (size_t) b.size()) - throw InvalidNoiseModel(b.size(), model->dim()); - - size_t* dims=(size_t*)alloca(sizeof(size_t)*(terms.size()+1)); // FIXME: alloca is bad, just ask Google. - size_t j=0; - std::list >::const_iterator term=terms.begin(); - for(; term!=terms.end(); ++term,++j) - dims[j] = term->second.cols(); - dims[j] = 1; - Ab_.copyStructureFrom(BlockAb(matrix_, dims, dims+terms.size()+1, b.size())); - j = 0; - for(term=terms.begin(); term!=terms.end(); ++term,++j) - Ab_(j) = term->second; - getb() = b; - assertInvariants(); - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(const GaussianConditional& cg) : - GaussianFactor(cg), - model_(noiseModel::Diagonal::Sigmas(cg.get_sigmas(), true)), - Ab_(matrix_) { - Ab_.assignNoalias(cg.rsd_); - assertInvariants(); - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(const HessianFactor& factor) : Ab_(matrix_) { - keys_ = factor.keys_; - Ab_.assignNoalias(factor.info_); - - // Do Cholesky to get a Jacobian - size_t maxrank; - bool success; - boost::tie(maxrank, success) = choleskyCareful(matrix_); - - // Check for indefinite system - if(!success) - throw IndeterminantLinearSystemException(factor.keys().front()); - - // Zero out lower triangle - matrix_.topRows(maxrank).triangularView() = - Matrix::Zero(maxrank, matrix_.cols()); - // FIXME: replace with triangular system - Ab_.rowEnd() = maxrank; - model_ = noiseModel::Unit::Create(maxrank); - - assertInvariants(); - } - - /* ************************************************************************* */ - JacobianFactor::JacobianFactor(const GaussianFactorGraph& gfg) : Ab_(matrix_) { - // Cast or convert to Jacobians - FactorGraph jacobians; - BOOST_FOREACH(const GaussianFactorGraph::sharedFactor& factor, gfg) { - if(factor) { - if(JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast(factor)) - jacobians.push_back(jf); - else - jacobians.push_back(boost::make_shared(*factor)); - } - } - - *this = *CombineJacobians(jacobians, VariableSlots(jacobians)); - } - - /* ************************************************************************* */ - JacobianFactor& JacobianFactor::operator=(const JacobianFactor& rhs) { - this->Base::operator=(rhs); // Copy keys - model_ = rhs.model_; // Copy noise model - Ab_.assignNoalias(rhs.Ab_); // Copy matrix and block structure - assertInvariants(); - return *this; - } - - /* ************************************************************************* */ - void JacobianFactor::print(const string& s, const IndexFormatter& formatter) const { - cout << s << "\n"; - if (empty()) { - cout << " empty, keys: "; - BOOST_FOREACH(const Index& key, keys()) { cout << formatter(key) << " "; } - cout << endl; - } else { - for(const_iterator key=begin(); key!=end(); ++key) - cout << boost::format("A[%1%]=\n")%formatter(*key) << getA(key) << endl; - cout << "b=" << getb() << endl; - model_->print("model"); - } - } - - /* ************************************************************************* */ - // Check if two linear factors are equal - bool JacobianFactor::equals(const GaussianFactor& f_, double tol) const { - if(!dynamic_cast(&f_)) - return false; - else { - const JacobianFactor& f(static_cast(f_)); - if (empty()) return (f.empty()); - if(keys()!=f.keys() /*|| !model_->equals(lf->model_, tol)*/) - return false; - - if (!(Ab_.rows() == f.Ab_.rows() && Ab_.cols() == f.Ab_.cols())) - return false; - - constABlock Ab1(Ab_.range(0, Ab_.nBlocks())); - constABlock Ab2(f.Ab_.range(0, f.Ab_.nBlocks())); - for(size_t row=0; row< (size_t) Ab1.rows(); ++row) - if(!equal_with_abs_tol(Ab1.row(row), Ab2.row(row), tol) && - !equal_with_abs_tol(-Ab1.row(row), Ab2.row(row), tol)) - return false; - - return true; - } - } - - /* ************************************************************************* */ - Vector JacobianFactor::unweighted_error(const VectorValues& c) const { - Vector e = -getb(); - if (empty()) return e; - for(size_t pos=0; poswhiten(-getb()); - return model_->whiten(unweighted_error(c)); - } - - /* ************************************************************************* */ - double JacobianFactor::error(const VectorValues& c) const { - if (empty()) return 0; - Vector weighted = error_vector(c); - return 0.5 * weighted.dot(weighted); - } - - /* ************************************************************************* */ - Matrix JacobianFactor::augmentedInformation() const { - Matrix AbWhitened = Ab_.full(); - model_->WhitenInPlace(AbWhitened); - return AbWhitened.transpose() * AbWhitened; - } - - /* ************************************************************************* */ - Matrix JacobianFactor::information() const { - Matrix AWhitened = this->getA(); - model_->WhitenInPlace(AWhitened); - return AWhitened.transpose() * AWhitened; - } - - /* ************************************************************************* */ - Vector JacobianFactor::operator*(const VectorValues& x) const { - Vector Ax = zero(Ab_.rows()); - if (empty()) return Ax; - - // Just iterate over all A matrices and multiply in correct config part - for(size_t pos=0; poswhiten(Ax); - } - - /* ************************************************************************* */ - void JacobianFactor::transposeMultiplyAdd(double alpha, const Vector& e, - VectorValues& x) const { - Vector E = alpha * model_->whiten(e); - // Just iterate over all A matrices and insert Ai^e into VectorValues - for(size_t pos=0; pos JacobianFactor::matrix(bool weight) const { - Matrix A(Ab_.range(0, size())); - Vector b(getb()); - // divide in sigma so error is indeed 0.5*|Ax-b| - if (weight) model_->WhitenSystem(A,b); - return make_pair(A, b); - } - - /* ************************************************************************* */ - Matrix JacobianFactor::matrix_augmented(bool weight) const { - if (weight) { Matrix Ab(Ab_.range(0,Ab_.nBlocks())); model_->WhitenInPlace(Ab); return Ab; } - else return Ab_.range(0, Ab_.nBlocks()); - } - - /* ************************************************************************* */ - std::vector > - JacobianFactor::sparse(const std::vector& columnIndices) const { - - std::vector > entries; - - // iterate over all variables in the factor - for(const_iterator var=begin(); varWhiten(getA(var))); - // find first column index for this key - size_t column_start = columnIndices[*var]; - for (size_t i = 0; i < (size_t) whitenedA.rows(); i++) - for (size_t j = 0; j < (size_t) whitenedA.cols(); j++) { - double s = whitenedA(i,j); - if (std::abs(s) > 1e-12) entries.push_back( - boost::make_tuple(i, column_start + j, s)); - } - } - - Vector whitenedb(model_->whiten(getb())); - size_t bcolumn = columnIndices.back(); - for (size_t i = 0; i < (size_t) whitenedb.size(); i++) - entries.push_back(boost::make_tuple(i, bcolumn, whitenedb(i))); - - // return the result - return entries; - } - - /* ************************************************************************* */ - JacobianFactor JacobianFactor::whiten() const { - JacobianFactor result(*this); - result.model_->WhitenInPlace(result.matrix_); - result.model_ = noiseModel::Unit::Create(result.model_->dim()); - return result; - } - - /* ************************************************************************* */ - GaussianFactor::shared_ptr JacobianFactor::negate() const { - HessianFactor hessian(*this); - return hessian.negate(); - } - - /* ************************************************************************* */ - GaussianConditional::shared_ptr JacobianFactor::eliminateFirst() { - return this->eliminate(1); - } - - /* ************************************************************************* */ - GaussianConditional::shared_ptr JacobianFactor::splitConditional(size_t nrFrontals) { - assert(Ab_.rowStart() == 0 && Ab_.rowEnd() == (size_t) matrix_.rows() && Ab_.firstBlock() == 0); - assert(size() >= nrFrontals); - assertInvariants(); - - const bool debug = ISDEBUG("JacobianFactor::splitConditional"); - - if(debug) cout << "Eliminating " << nrFrontals << " frontal variables" << endl; - if(debug) this->print("Splitting JacobianFactor: "); - - size_t frontalDim = Ab_.range(0,nrFrontals).cols(); - - // Check for singular factor - if(model_->dim() < frontalDim) - throw IndeterminantLinearSystemException(this->keys().front()); - - // Extract conditional - gttic(cond_Rd); - - // Restrict the matrix to be in the first nrFrontals variables - Ab_.rowEnd() = Ab_.rowStart() + frontalDim; - const Eigen::VectorBlock sigmas = model_->sigmas().segment(Ab_.rowStart(), Ab_.rowEnd()-Ab_.rowStart()); - GaussianConditional::shared_ptr conditional(new GaussianConditional(begin(), end(), nrFrontals, Ab_, sigmas)); - if(debug) conditional->print("Extracted conditional: "); - Ab_.rowStart() += frontalDim; - Ab_.firstBlock() += nrFrontals; - gttoc(cond_Rd); - - if(debug) conditional->print("Extracted conditional: "); - - gttic(remaining_factor); - // Take lower-right block of Ab to get the new factor - Ab_.rowEnd() = model_->dim(); - keys_.erase(begin(), begin() + nrFrontals); - // Set sigmas with the right model - if (model_->isConstrained()) - model_ = noiseModel::Constrained::MixedSigmas(sub(model_->sigmas(), frontalDim, model_->dim())); - else - model_ = noiseModel::Diagonal::Sigmas(sub(model_->sigmas(), frontalDim, model_->dim())); - if(debug) this->print("Eliminated factor: "); - assert(Ab_.rows() <= Ab_.cols()-1); - gttoc(remaining_factor); - - if(debug) print("Eliminated factor: "); - - assertInvariants(); - - return conditional; - } - - /* ************************************************************************* */ - GaussianConditional::shared_ptr JacobianFactor::eliminate(size_t nrFrontals) { - - assert(Ab_.rowStart() == 0 && Ab_.rowEnd() == (size_t) matrix_.rows() && Ab_.firstBlock() == 0); - assert(size() >= nrFrontals); - assertInvariants(); - - const bool debug = ISDEBUG("JacobianFactor::eliminate"); - - if(debug) cout << "Eliminating " << nrFrontals << " frontal variables" << endl; - if(debug) this->print("Eliminating JacobianFactor: "); - if(debug) gtsam::print(matrix_, "Augmented Ab: "); - - size_t frontalDim = Ab_.range(0,nrFrontals).cols(); - - if(debug) cout << "frontalDim = " << frontalDim << endl; - - // Use in-place QR dense Ab appropriate to NoiseModel - gttic(QR); - SharedDiagonal noiseModel = model_->QR(matrix_); - gttoc(QR); - - // Zero the lower-left triangle. todo: not all of these entries actually - // need to be zeroed if we are careful to start copying rows after the last - // structural zero. - if(matrix_.rows() > 0) - for(size_t j=0; j<(size_t) matrix_.cols(); ++j) - for(size_t i=j+1; idim(); ++i) - matrix_(i,j) = 0.0; - - if(debug) gtsam::print(matrix_, "QR result: "); - if(debug) noiseModel->print("QR result noise model: "); - - // Start of next part - model_ = noiseModel; - return splitConditional(nrFrontals); - } - - /* ************************************************************************* */ - void JacobianFactor::allocate(const VariableSlots& variableSlots, vector< - size_t>& varDims, size_t m) { - keys_.resize(variableSlots.size()); - std::transform(variableSlots.begin(), variableSlots.end(), begin(), - boost::bind(&VariableSlots::const_iterator::value_type::first, _1)); - varDims.push_back(1); - Ab_.copyStructureFrom(BlockAb(matrix_, varDims.begin(), varDims.end(), m)); - } - - /* ************************************************************************* */ - void JacobianFactor::setModel(bool anyConstrained, const Vector& sigmas) { - if((size_t) sigmas.size() != this->rows()) - throw InvalidNoiseModel(this->rows(), sigmas.size()); - if (anyConstrained) - model_ = noiseModel::Constrained::MixedSigmas(sigmas); - else - model_ = noiseModel::Diagonal::Sigmas(sigmas); - } - - /* ************************************************************************* */ - const char* JacobianFactor::InvalidNoiseModel::what() const throw() { - if(description_.empty()) - description_ = (boost::format( - "A JacobianFactor was attempted to be constructed or modified to use a\n" - "noise model of incompatible dimension. The JacobianFactor has\n" - "dimensionality (i.e. length of error vector) %d but the provided noise\n" - "model has dimensionality %d.") % factorDims % noiseModelDims).str(); - return description_.c_str(); - } - -} +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file JacobianFactor.cpp + * @author Richard Roberts + * @author Christian Potthast + * @author Frank Dellaert + * @date Dec 8, 2010 + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wunused-variable" +#endif +#include +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif +#include +#include +#include + +#include +#include +#include + +using namespace std; +using namespace boost::assign; + +namespace gtsam { + + /* ************************************************************************* */ + JacobianFactor::JacobianFactor() : + Ab_(cref_list_of<1>(1), 0) + { + getb().setZero(); + } + + /* ************************************************************************* */ + JacobianFactor::JacobianFactor(const GaussianFactor& gf) { + // Copy the matrix data depending on what type of factor we're copying from + if(const JacobianFactor* rhs = dynamic_cast(&gf)) + *this = JacobianFactor(*rhs); + else if(const HessianFactor* rhs = dynamic_cast(&gf)) + *this = JacobianFactor(*rhs); + else + throw std::invalid_argument("In JacobianFactor(const GaussianFactor& rhs), rhs is neither a JacobianFactor nor a HessianFactor"); + } + + /* ************************************************************************* */ + JacobianFactor::JacobianFactor(const Vector& b_in) : + Ab_(cref_list_of<1>(1), b_in.size()) + { + getb() = b_in; + } + + /* ************************************************************************* */ + JacobianFactor::JacobianFactor(Key i1, const Matrix& A1, + const Vector& b, const SharedDiagonal& model) + { + fillTerms(cref_list_of<1>(make_pair(i1, A1)), b, model); + } + + /* ************************************************************************* */ + JacobianFactor::JacobianFactor( + const Key i1, const Matrix& A1, Key i2, const Matrix& A2, + const Vector& b, const SharedDiagonal& model) + { + fillTerms(cref_list_of<2> + (make_pair(i1,A1)) + (make_pair(i2,A2)), b, model); + } + + /* ************************************************************************* */ + JacobianFactor::JacobianFactor( + const Key i1, const Matrix& A1, Key i2, const Matrix& A2, + Key i3, const Matrix& A3, const Vector& b, const SharedDiagonal& model) + { + fillTerms(cref_list_of<3> + (make_pair(i1,A1)) + (make_pair(i2,A2)) + (make_pair(i3,A3)), b, model); + } + + /* ************************************************************************* */ + JacobianFactor::JacobianFactor(const HessianFactor& factor) : + Base(factor), Ab_(VerticalBlockMatrix::LikeActiveViewOf(factor.matrixObject(), factor.rows())) + { + // Copy Hessian into our matrix and then do in-place Cholesky + Ab_.full() = factor.matrixObject().full(); + + // Do Cholesky to get a Jacobian + size_t maxrank; + bool success; + boost::tie(maxrank, success) = choleskyCareful(Ab_.matrix()); + + // Check for indefinite system + if(!success) + throw IndeterminantLinearSystemException(factor.keys().front()); + + // Zero out lower triangle + Ab_.matrix().topRows(maxrank).triangularView() = + Matrix::Zero(maxrank, Ab_.matrix().cols()); + // FIXME: replace with triangular system + Ab_.rowEnd() = maxrank; + model_ = noiseModel::Unit::Create(maxrank); + } + + /* ************************************************************************* */ + // Helper functions for combine constructor + namespace { + boost::tuple, DenseIndex, DenseIndex> _countDims( + const std::vector& factors, const vector& variableSlots) + { + gttic(countDims); +#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS + vector varDims(variableSlots.size(), numeric_limits::max()); +#else + vector varDims(variableSlots.size()); +#endif + DenseIndex m = 0; + DenseIndex n = 0; + { + size_t jointVarpos = 0; + BOOST_FOREACH(VariableSlots::const_iterator slots, variableSlots) + { + assert(slots->second.size() == factors.size()); + + size_t sourceFactorI = 0; + BOOST_FOREACH(const size_t sourceVarpos, slots->second) { + if(sourceVarpos < numeric_limits::max()) { + const JacobianFactor& sourceFactor = *factors[sourceFactorI]; + if(sourceFactor.rows() > 0) { + DenseIndex vardim = sourceFactor.getDim(sourceFactor.begin() + sourceVarpos); +#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS + if(varDims[jointVarpos] == numeric_limits::max()) { + varDims[jointVarpos] = vardim; + n += vardim; + } else + assert(varDims[jointVarpos] == vardim); +#else + varDims[jointVarpos] = vardim; + n += vardim; + break; +#endif + } + } + ++ sourceFactorI; + } + ++ jointVarpos; + } + BOOST_FOREACH(const JacobianFactor::shared_ptr& factor, factors) { + m += factor->rows(); + } + } + return boost::make_tuple(varDims, m, n); + } + + /* ************************************************************************* */ + std::vector + _convertOrCastToJacobians(const GaussianFactorGraph& factors) + { + gttic(Convert_to_Jacobians); + std::vector jacobians; + jacobians.reserve(factors.size()); + BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) { + if(factor) { + if(JacobianFactor::shared_ptr jf = boost::dynamic_pointer_cast(factor)) + jacobians.push_back(jf); + else + jacobians.push_back(boost::make_shared(*factor)); + } + } + return jacobians; + } + } + + /* ************************************************************************* */ + JacobianFactor::JacobianFactor( + const GaussianFactorGraph& graph, + boost::optional ordering, + boost::optional variableSlots) + { + gttic(JacobianFactor_combine_constructor); + + // Compute VariableSlots if one was not provided + boost::optional computedVariableSlots; + if(!variableSlots) { + computedVariableSlots = VariableSlots(graph); + variableSlots = computedVariableSlots; // Binds reference, does not copy VariableSlots + } + + // Cast or convert to Jacobians + std::vector jacobians = _convertOrCastToJacobians(graph); + + gttic(Order_slots); + // Order variable slots - we maintain the vector of ordered slots, as well as keep a list + // 'unorderedSlots' of any variables discovered that are not in the ordering. Those will then + // be added after all of the ordered variables. + vector orderedSlots; + orderedSlots.reserve(variableSlots->size()); + if(ordering) { + // If an ordering is provided, arrange the slots first that ordering + FastList unorderedSlots; + size_t nOrderingSlotsUsed = 0; + orderedSlots.resize(ordering->size()); + FastMap inverseOrdering = ordering->invert(); + for(VariableSlots::const_iterator item = variableSlots->begin(); item != variableSlots->end(); ++item) { + FastMap::const_iterator orderingPosition = inverseOrdering.find(item->first); + if(orderingPosition == inverseOrdering.end()) { + unorderedSlots.push_back(item); + } else { + orderedSlots[orderingPosition->second] = item; + ++ nOrderingSlotsUsed; + } + } + if(nOrderingSlotsUsed != ordering->size()) + throw std::invalid_argument( + "The ordering provided to the JacobianFactor combine constructor\n" + "contained extra variables that did not appear in the factors to combine."); + // Add the remaining slots + BOOST_FOREACH(VariableSlots::const_iterator item, unorderedSlots) { + orderedSlots.push_back(item); + } + } else { + // If no ordering is provided, arrange the slots as they were, which will be sorted + // numerically since VariableSlots uses a map sorting on Key. + for(VariableSlots::const_iterator item = variableSlots->begin(); item != variableSlots->end(); ++item) + orderedSlots.push_back(item); + } + gttoc(Order_slots); + + // Count dimensions + vector varDims; + DenseIndex m, n; + boost::tie(varDims, m, n) = _countDims(jacobians, orderedSlots); + + // Allocate matrix and copy keys in order + gttic(allocate); + Ab_ = VerticalBlockMatrix(boost::join(varDims, ListOfOne((DenseIndex)1)), m); // Allocate augmented matrix + Base::keys_.resize(orderedSlots.size()); + boost::range::copy( // Get variable keys + orderedSlots | boost::adaptors::indirected | boost::adaptors::map_keys, Base::keys_.begin()); + gttoc(allocate); + + // Loop over slots in combined factor and copy blocks from source factors + gttic(copy_blocks); + size_t combinedSlot = 0; + BOOST_FOREACH(VariableSlots::const_iterator varslot, orderedSlots) { + JacobianFactor::ABlock destSlot(this->getA(this->begin()+combinedSlot)); + // Loop over source jacobians + DenseIndex nextRow = 0; + for(size_t factorI = 0; factorI < jacobians.size(); ++factorI) { + // Slot in source factor + const size_t sourceSlot = varslot->second[factorI]; + const DenseIndex sourceRows = jacobians[factorI]->rows(); + if(sourceRows > 0) { + JacobianFactor::ABlock::RowsBlockXpr destBlock(destSlot.middleRows(nextRow, sourceRows)); + // Copy if exists in source factor, otherwise set zero + if(sourceSlot != numeric_limits::max()) + destBlock = jacobians[factorI]->getA(jacobians[factorI]->begin()+sourceSlot); + else + destBlock.setZero(); + nextRow += sourceRows; + } + } + ++combinedSlot; + } + gttoc(copy_blocks); + + // Copy the RHS vectors and sigmas + gttic(copy_vectors); + bool anyConstrained = false; + boost::optional sigmas; + // Loop over source jacobians + DenseIndex nextRow = 0; + for(size_t factorI = 0; factorI < jacobians.size(); ++factorI) { + const DenseIndex sourceRows = jacobians[factorI]->rows(); + if(sourceRows > 0) { + this->getb().segment(nextRow, sourceRows) = jacobians[factorI]->getb(); + if(jacobians[factorI]->get_model()) { + // If the factor has a noise model and we haven't yet allocated sigmas, allocate it. + if(!sigmas) + sigmas = Vector::Constant(m, 1.0); + sigmas->segment(nextRow, sourceRows) = jacobians[factorI]->get_model()->sigmas(); + if (jacobians[factorI]->isConstrained()) + anyConstrained = true; + } + nextRow += sourceRows; + } + } + gttoc(copy_vectors); + + if(sigmas) + this->setModel(anyConstrained, *sigmas); + } + + /* ************************************************************************* */ + void JacobianFactor::print(const string& s, const KeyFormatter& formatter) const + { + if(!s.empty()) + cout << s << "\n"; + for(const_iterator key = begin(); key != end(); ++key) { + cout << + formatMatrixIndented((boost::format(" A[%1%] = ") % formatter(*key)).str(), getA(key)) + << endl; + } + cout << formatMatrixIndented(" b = ", getb(), true) << "\n"; + if(model_) + model_->print(" Noise model: "); + else + cout << " No noise model" << endl; + } + + /* ************************************************************************* */ + // Check if two linear factors are equal + bool JacobianFactor::equals(const GaussianFactor& f_, double tol) const + { + if(!dynamic_cast(&f_)) + return false; + else { + const JacobianFactor& f(static_cast(f_)); + + // Check keys + if(keys() != f.keys()) + return false; + + // Check noise model + if((model_ && !f.model_) || (!model_ && f.model_)) + return false; + if(model_ && f.model_ && !model_->equals(*f.model_, tol)) + return false; + + // Check matrix sizes + if (!(Ab_.rows() == f.Ab_.rows() && Ab_.cols() == f.Ab_.cols())) + return false; + + // Check matrix contents + constABlock Ab1(Ab_.range(0, Ab_.nBlocks())); + constABlock Ab2(f.Ab_.range(0, f.Ab_.nBlocks())); + for(size_t row=0; row< (size_t) Ab1.rows(); ++row) + if(!equal_with_abs_tol(Ab1.row(row), Ab2.row(row), tol) && + !equal_with_abs_tol(-Ab1.row(row), Ab2.row(row), tol)) + return false; + + return true; + } + } + + /* ************************************************************************* */ + Vector JacobianFactor::unweighted_error(const VectorValues& c) const { + Vector e = -getb(); + for(size_t pos=0; poswhiten(unweighted_error(c)); + else + return unweighted_error(c); + } + + /* ************************************************************************* */ + double JacobianFactor::error(const VectorValues& c) const { + if (empty()) return 0; + Vector weighted = error_vector(c); + return 0.5 * weighted.dot(weighted); + } + + /* ************************************************************************* */ + Matrix JacobianFactor::augmentedInformation() const { + if(model_) { + Matrix AbWhitened = Ab_.full(); + model_->WhitenInPlace(AbWhitened); + return AbWhitened.transpose() * AbWhitened; + } else { + return Ab_.full().transpose() * Ab_.full(); + } + } + + /* ************************************************************************* */ + Matrix JacobianFactor::information() const { + if(model_) { + Matrix AWhitened = this->getA(); + model_->WhitenInPlace(AWhitened); + return AWhitened.transpose() * AWhitened; + } else { + return this->getA().transpose() * this->getA(); + } + } + + /* ************************************************************************* */ + Vector JacobianFactor::operator*(const VectorValues& x) const { + Vector Ax = zero(Ab_.rows()); + if (empty()) return Ax; + + // Just iterate over all A matrices and multiply in correct config part + for(size_t pos=0; poswhiten(Ax) : Ax; + } + + /* ************************************************************************* */ + void JacobianFactor::transposeMultiplyAdd(double alpha, const Vector& e, + VectorValues& x) const + { + Vector E = alpha * (model_ ? model_->whiten(e) : e); + // Just iterate over all A matrices and insert Ai^e into VectorValues + for(size_t pos=0; pos xi = x.tryInsert(keys_[pos], Vector()); + if(xi.second) + xi.first->second = Vector::Zero(getDim(begin() + pos)); + gtsam::transposeMultiplyAdd(1.0, Ab_(pos), E, xi.first->second); + } + } + + /* ************************************************************************* */ + pair JacobianFactor::jacobian() const { + pair result = jacobianUnweighted(); + // divide in sigma so error is indeed 0.5*|Ax-b| + if (model_) + model_->WhitenSystem(result.first, result.second); + return result; + } + + /* ************************************************************************* */ + pair JacobianFactor::jacobianUnweighted() const { + Matrix A(Ab_.range(0, size())); + Vector b(getb()); + return make_pair(A, b); + } + + /* ************************************************************************* */ + Matrix JacobianFactor::augmentedJacobian() const { + Matrix Ab = augmentedJacobianUnweighted(); + if (model_) + model_->WhitenInPlace(Ab); + return Ab; + } + + /* ************************************************************************* */ + Matrix JacobianFactor::augmentedJacobianUnweighted() const { + return Ab_.range(0, Ab_.nBlocks()); + } + + /* ************************************************************************* */ + JacobianFactor JacobianFactor::whiten() const { + JacobianFactor result(*this); + if(model_) { + result.model_->WhitenInPlace(result.Ab_.matrix()); + result.model_ = noiseModel::Unit::Create(result.model_->dim()); + } + return result; + } + + /* ************************************************************************* */ + GaussianFactor::shared_ptr JacobianFactor::negate() const { + HessianFactor hessian(*this); + return hessian.negate(); + } + + /* ************************************************************************* */ + std::pair, boost::shared_ptr > + JacobianFactor::eliminate(const Ordering& keys) + { + GaussianFactorGraph graph; + graph.add(*this); + return EliminateQR(graph, keys); + } + + /* ************************************************************************* */ + void JacobianFactor::setModel(bool anyConstrained, const Vector& sigmas) { + if((size_t) sigmas.size() != this->rows()) + throw InvalidNoiseModel(this->rows(), sigmas.size()); + if (anyConstrained) + model_ = noiseModel::Constrained::MixedSigmas(sigmas); + else + model_ = noiseModel::Diagonal::Sigmas(sigmas); + } + + /* ************************************************************************* */ + std::pair, boost::shared_ptr > + EliminateQR(const GaussianFactorGraph& factors, const Ordering& keys) + { + gttic(EliminateQR); + // Combine and sort variable blocks in elimination order + JacobianFactor::shared_ptr jointFactor; + try { + jointFactor = boost::make_shared(factors, keys); + } catch(std::invalid_argument&) { + throw InvalidDenseElimination( + "EliminateQR was called with a request to eliminate variables that are not\n" + "involved in the provided factors."); + } + + // Do dense elimination + SharedDiagonal noiseModel; + if(jointFactor->model_) + jointFactor->model_ = jointFactor->model_->QR(jointFactor->Ab_.matrix()); + else + inplace_QR(jointFactor->Ab_.matrix()); + + // Zero below the diagonal + jointFactor->Ab_.matrix().triangularView().setZero(); + + // Split elimination result into conditional and remaining factor + GaussianConditional::shared_ptr conditional = jointFactor->splitConditional(keys.size()); + + return make_pair(conditional, jointFactor); + } + + /* ************************************************************************* */ + GaussianConditional::shared_ptr JacobianFactor::splitConditional(size_t nrFrontals) + { + gttic(JacobianFactor_splitConditional); + + if(nrFrontals > size()) + throw std::invalid_argument("Requesting to split more variables than exist using JacobianFactor::splitConditional"); + + DenseIndex frontalDim = Ab_.range(0, nrFrontals).cols(); + + // Restrict the matrix to be in the first nrFrontals variables and create the conditional + gttic(cond_Rd); + const DenseIndex originalRowEnd = Ab_.rowEnd(); + Ab_.rowEnd() = Ab_.rowStart() + frontalDim; + SharedDiagonal conditionalNoiseModel; + if(model_) { + if((DenseIndex)model_->dim() < frontalDim) + throw IndeterminantLinearSystemException(this->keys().front()); + conditionalNoiseModel = + noiseModel::Diagonal::Sigmas(model_->sigmas().segment(Ab_.rowStart(), Ab_.rows())); + } + GaussianConditional::shared_ptr conditional = boost::make_shared( + Base::keys_, nrFrontals, Ab_, conditionalNoiseModel); + const DenseIndex maxRemainingRows = std::min(Ab_.cols() - 1, originalRowEnd) - Ab_.rowStart() - frontalDim; + const DenseIndex remainingRows = + model_ ? std::min(model_->sigmas().size() - frontalDim, maxRemainingRows) + : maxRemainingRows; + Ab_.rowStart() += frontalDim; + Ab_.rowEnd() = Ab_.rowStart() + remainingRows; + Ab_.firstBlock() += nrFrontals; + gttoc(cond_Rd); + + // Take lower-right block of Ab to get the new factor + gttic(remaining_factor); + keys_.erase(begin(), begin() + nrFrontals); + // Set sigmas with the right model + if(model_) { + if (model_->isConstrained()) + model_ = noiseModel::Constrained::MixedSigmas(model_->sigmas().tail(remainingRows)); + else + model_ = noiseModel::Diagonal::Sigmas(model_->sigmas().tail(remainingRows)); + assert(model_->dim() == Ab_.rows()); + } + gttoc(remaining_factor); + + return conditional; + } + +} diff --git a/gtsam/linear/JacobianFactor.h b/gtsam/linear/JacobianFactor.h index 01de4953a..e3412c79d 100644 --- a/gtsam/linear/JacobianFactor.h +++ b/gtsam/linear/JacobianFactor.h @@ -1,335 +1,324 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file JacobianFactor.h - * @author Richard Roberts - * @date Dec 8, 2010 - */ -#pragma once - -#include -#include -#include -#include -#include -#include -#include - -#include - -// Forward declarations of friend unit tests -class JacobianFactorCombine2Test; -class JacobianFactoreliminateFrontalsTest; -class JacobianFactorconstructor2Test; - -namespace gtsam { - - // Forward declarations - class HessianFactor; - class VariableSlots; - template class BayesNet; - class GaussianFactorGraph; - - /** - * A Gaussian factor in the squared-error form. - * - * JacobianFactor implements a - * Gaussian, which has quadratic negative log-likelihood - * \f[ E(x) = \frac{1}{2} (Ax-b)^T \Sigma^{-1} (Ax-b) \f] - * where \f$ \Sigma \f$ is a \a diagonal covariance matrix. The - * matrix \f$ A \f$, r.h.s. vector \f$ b \f$, and diagonal noise model - * \f$ \Sigma \f$ are stored in this class. - * - * This factor represents the sum-of-squares error of a \a linear - * measurement function, and is created upon linearization of a NoiseModelFactor, - * which in turn is a sum-of-squares factor with a nonlinear measurement function. - * - * Here is an example of how this factor represents a sum-of-squares error: - * - * Letting \f$ h(x) \f$ be a \a linear measurement prediction function, \f$ z \f$ be - * the actual observed measurement, the residual is - * \f[ f(x) = h(x) - z . \f] - * If we expect noise with diagonal covariance matrix \f$ \Sigma \f$ on this - * measurement, then the negative log-likelihood of the Gaussian induced by this - * measurement model is - * \f[ E(x) = \frac{1}{2} (h(x) - z)^T \Sigma^{-1} (h(x) - z) . \f] - * Because \f$ h(x) \f$ is linear, we can write it as - * \f[ h(x) = Ax + e \f] - * and thus we have - * \f[ E(x) = \frac{1}{2} (Ax-b)^T \Sigma^{-1} (Ax-b) \f] - * where \f$ b = z - e \f$. - * - * This factor can involve an arbitrary number of variables, and in the - * above example \f$ x \f$ would almost always be only be a subset of the variables - * in the entire factor graph. There are special constructors for 1-, 2-, and 3- - * way JacobianFactors, and additional constructors for creating n-way JacobianFactors. - * The Jacobian matrix \f$ A \f$ is passed to these constructors in blocks, - * for example, for a 2-way factor, the constructor would accept \f$ A1 \f$ and \f$ A2 \f$, - * as well as the variable indices \f$ j1 \f$ and \f$ j2 \f$ - * and the negative log-likelihood represented by this factor would be - * \f[ E(x) = \frac{1}{2} (A_1 x_{j1} + A_2 x_{j2} - b)^T \Sigma^{-1} (A_1 x_{j1} + A_2 x_{j2} - b) . \f] - */ - class GTSAM_EXPORT JacobianFactor : public GaussianFactor { - protected: - typedef Matrix AbMatrix; - typedef VerticalBlockView BlockAb; - - noiseModel::Diagonal::shared_ptr model_; // Gaussian noise model with diagonal covariance matrix - AbMatrix matrix_; // the full matrix corresponding to the factor - BlockAb Ab_; // the block view of the full matrix - typedef GaussianFactor Base; // typedef to base - - public: - typedef boost::shared_ptr shared_ptr; - typedef BlockAb::Block ABlock; - typedef BlockAb::constBlock constABlock; - typedef BlockAb::Column BVector; - typedef BlockAb::constColumn constBVector; - - /** Copy constructor */ - JacobianFactor(const JacobianFactor& gf); - - /** Convert from other GaussianFactor */ - JacobianFactor(const GaussianFactor& gf); - - /** default constructor for I/O */ - JacobianFactor(); - - /** Construct Null factor */ - JacobianFactor(const Vector& b_in); - - /** Construct unary factor */ - JacobianFactor(Index i1, const Matrix& A1, - const Vector& b, const SharedDiagonal& model); - - /** Construct binary factor */ - JacobianFactor(Index i1, const Matrix& A1, - Index i2, const Matrix& A2, - const Vector& b, const SharedDiagonal& model); - - /** Construct ternary factor */ - JacobianFactor(Index i1, const Matrix& A1, Index i2, - const Matrix& A2, Index i3, const Matrix& A3, - const Vector& b, const SharedDiagonal& model); - - /** Construct an n-ary factor */ - JacobianFactor(const std::vector > &terms, - const Vector &b, const SharedDiagonal& model); - - JacobianFactor(const std::list > &terms, - const Vector &b, const SharedDiagonal& model); - - /** Construct from Conditional Gaussian */ - JacobianFactor(const GaussianConditional& cg); - - /** Convert from a HessianFactor (does Cholesky) */ - JacobianFactor(const HessianFactor& factor); - - /** Build a dense joint factor from all the factors in a factor graph. */ - JacobianFactor(const GaussianFactorGraph& gfg); - - /** Virtual destructor */ - virtual ~JacobianFactor() {} - - /** Aassignment operator */ - JacobianFactor& operator=(const JacobianFactor& rhs); - - /** Clone this JacobianFactor */ - virtual GaussianFactor::shared_ptr clone() const { - return boost::static_pointer_cast( - shared_ptr(new JacobianFactor(*this))); - } - - // Implementing Testable interface - virtual void print(const std::string& s = "", - const IndexFormatter& formatter = DefaultIndexFormatter) const; - virtual bool equals(const GaussianFactor& lf, double tol = 1e-9) const; - - Vector unweighted_error(const VectorValues& c) const; /** (A*x-b) */ - Vector error_vector(const VectorValues& c) const; /** (A*x-b)/sigma */ - virtual double error(const VectorValues& c) const; /** 0.5*(A*x-b)'*D*(A*x-b) */ - - /** Return the augmented information matrix represented by this GaussianFactor. - * The augmented information matrix contains the information matrix with an - * additional column holding the information vector, and an additional row - * holding the transpose of the information vector. The lower-right entry - * contains the constant error term (when \f$ \delta x = 0 \f$). The - * augmented information matrix is described in more detail in HessianFactor, - * which in fact stores an augmented information matrix. - */ - virtual Matrix augmentedInformation() const; +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file JacobianFactor.h + * @author Richard Roberts + * @author Christian Potthast + * @author Frank Dellaert + * @date Dec 8, 2010 + */ +#pragma once + +#include +#include +#include +#include + +#include + +namespace gtsam { + + // Forward declarations + //class HessianFactor; + class VariableSlots; + class GaussianFactorGraph; + class GaussianConditional; + class HessianFactor; + class VectorValues; + class Ordering; + class JacobianFactor; + + GTSAM_EXPORT std::pair, boost::shared_ptr > + EliminateQR(const GaussianFactorGraph& factors, const Ordering& keys); + + /** + * A Gaussian factor in the squared-error form. + * + * JacobianFactor implements a + * Gaussian, which has quadratic negative log-likelihood + * \f[ E(x) = \frac{1}{2} (Ax-b)^T \Sigma^{-1} (Ax-b) \f] + * where \f$ \Sigma \f$ is a \a diagonal covariance matrix. The + * matrix \f$ A \f$, r.h.s. vector \f$ b \f$, and diagonal noise model + * \f$ \Sigma \f$ are stored in this class. + * + * This factor represents the sum-of-squares error of a \a linear + * measurement function, and is created upon linearization of a NoiseModelFactor, + * which in turn is a sum-of-squares factor with a nonlinear measurement function. + * + * Here is an example of how this factor represents a sum-of-squares error: + * + * Letting \f$ h(x) \f$ be a \a linear measurement prediction function, \f$ z \f$ be + * the actual observed measurement, the residual is + * \f[ f(x) = h(x) - z . \f] + * If we expect noise with diagonal covariance matrix \f$ \Sigma \f$ on this + * measurement, then the negative log-likelihood of the Gaussian induced by this + * measurement model is + * \f[ E(x) = \frac{1}{2} (h(x) - z)^T \Sigma^{-1} (h(x) - z) . \f] + * Because \f$ h(x) \f$ is linear, we can write it as + * \f[ h(x) = Ax + e \f] + * and thus we have + * \f[ E(x) = \frac{1}{2} (Ax-b)^T \Sigma^{-1} (Ax-b) \f] + * where \f$ b = z - e \f$. + * + * This factor can involve an arbitrary number of variables, and in the + * above example \f$ x \f$ would almost always be only be a subset of the variables + * in the entire factor graph. There are special constructors for 1-, 2-, and 3- + * way JacobianFactors, and additional constructors for creating n-way JacobianFactors. + * The Jacobian matrix \f$ A \f$ is passed to these constructors in blocks, + * for example, for a 2-way factor, the constructor would accept \f$ A1 \f$ and \f$ A2 \f$, + * as well as the variable indices \f$ j1 \f$ and \f$ j2 \f$ + * and the negative log-likelihood represented by this factor would be + * \f[ E(x) = \frac{1}{2} (A_1 x_{j1} + A_2 x_{j2} - b)^T \Sigma^{-1} (A_1 x_{j1} + A_2 x_{j2} - b) . \f] + */ + class GTSAM_EXPORT JacobianFactor : public GaussianFactor + { + public: + typedef JacobianFactor This; ///< Typedef to this class + typedef GaussianFactor Base; ///< Typedef to base class + typedef boost::shared_ptr shared_ptr; ///< shared_ptr to this class + + protected: + VerticalBlockMatrix Ab_; // the block view of the full matrix + noiseModel::Diagonal::shared_ptr model_; // Gaussian noise model with diagonal covariance matrix + + public: + typedef VerticalBlockMatrix::Block ABlock; + typedef VerticalBlockMatrix::constBlock constABlock; + typedef ABlock::ColXpr BVector; + typedef constABlock::ConstColXpr constBVector; + + /** Convert from other GaussianFactor */ + explicit JacobianFactor(const GaussianFactor& gf); + + /** Copy constructor */ + JacobianFactor(const JacobianFactor& jf) : Base(jf), Ab_(jf.Ab_), model_(jf.model_) {} + + /** Conversion from HessianFactor (does Cholesky to obtain Jacobian matrix) */ + explicit JacobianFactor(const HessianFactor& hf); + + /** default constructor for I/O */ + JacobianFactor(); + + /** Construct Null factor */ + explicit JacobianFactor(const Vector& b_in); + + /** Construct unary factor */ + JacobianFactor(Key i1, const Matrix& A1, + const Vector& b, const SharedDiagonal& model = SharedDiagonal()); + + /** Construct binary factor */ + JacobianFactor(Key i1, const Matrix& A1, + Key i2, const Matrix& A2, + const Vector& b, const SharedDiagonal& model = SharedDiagonal()); + + /** Construct ternary factor */ + JacobianFactor(Key i1, const Matrix& A1, Key i2, + const Matrix& A2, Key i3, const Matrix& A3, + const Vector& b, const SharedDiagonal& model = SharedDiagonal()); + + /** Construct an n-ary factor + * @tparam TERMS A container whose value type is std::pair, specifying the + * collection of keys and matrices making up the factor. */ + template + JacobianFactor(const TERMS& terms, const Vector& b, const SharedDiagonal& model = SharedDiagonal()); + + /** Constructor with arbitrary number keys, and where the augmented matrix is given all together + * instead of in block terms. Note that only the active view of the provided augmented matrix + * is used, and that the matrix data is copied into a newly-allocated matrix in the constructed + * factor. */ + template + JacobianFactor( + const KEYS& keys, const VerticalBlockMatrix& augmentedMatrix, const SharedDiagonal& sigmas = SharedDiagonal()); + + /** Convert from a HessianFactor (does Cholesky) */ + //JacobianFactor(const HessianFactor& factor); + + /** + * Build a dense joint factor from all the factors in a factor graph. If a VariableSlots + * structure computed for \c graph is already available, providing it will reduce the amount of + * computation performed. */ + explicit JacobianFactor( + const GaussianFactorGraph& graph, + boost::optional ordering = boost::none, + boost::optional variableSlots = boost::none); + + /** Virtual destructor */ + virtual ~JacobianFactor() {} + + /** Clone this JacobianFactor */ + virtual GaussianFactor::shared_ptr clone() const { + return boost::static_pointer_cast( + boost::make_shared(*this)); + } + + // Implementing Testable interface + virtual void print(const std::string& s = "", + const KeyFormatter& formatter = DefaultKeyFormatter) const; + virtual bool equals(const GaussianFactor& lf, double tol = 1e-9) const; + + Vector unweighted_error(const VectorValues& c) const; /** (A*x-b) */ + Vector error_vector(const VectorValues& c) const; /** (A*x-b)/sigma */ + virtual double error(const VectorValues& c) const; /** 0.5*(A*x-b)'*D*(A*x-b) */ + + /** Return the augmented information matrix represented by this GaussianFactor. + * The augmented information matrix contains the information matrix with an + * additional column holding the information vector, and an additional row + * holding the transpose of the information vector. The lower-right entry + * contains the constant error term (when \f$ \delta x = 0 \f$). The + * augmented information matrix is described in more detail in HessianFactor, + * which in fact stores an augmented information matrix. + */ + virtual Matrix augmentedInformation() const; /** Return the non-augmented information matrix represented by this * GaussianFactor. */ virtual Matrix information() const; - - /** - * Construct the corresponding anti-factor to negate information - * stored stored in this factor. - * @return a HessianFactor with negated Hessian matrices - */ - virtual GaussianFactor::shared_ptr negate() const; - - /** Check if the factor contains no information, i.e. zero rows. This does - * not necessarily mean that the factor involves no variables (to check for - * involving no variables use keys().empty()). - */ - bool empty() const { return Ab_.rows() == 0;} - - /** is noise model constrained ? */ - bool isConstrained() const { return model_->isConstrained();} - - /** Return the dimension of the variable pointed to by the given key iterator - * todo: Remove this in favor of keeping track of dimensions with variables? - */ - virtual size_t getDim(const_iterator variable) const { return Ab_(variable - begin()).cols(); } - - /** - * return the number of rows in the corresponding linear system - */ - size_t rows() const { return Ab_.rows(); } - - /** - * return the number of rows in the corresponding linear system - */ - size_t numberOfRows() const { return rows(); } - - /** - * return the number of columns in the corresponding linear system - */ - size_t cols() const { return Ab_.cols(); } - - /** get a copy of model */ - const SharedDiagonal& get_model() const { return model_; } - - /** get a copy of model (non-const version) */ - SharedDiagonal& get_model() { return model_; } - - /** Get a view of the r.h.s. vector b */ - const constBVector getb() const { return Ab_.column(size(), 0); } - - /** Get a view of the A matrix for the variable pointed to by the given key iterator */ - constABlock getA(const_iterator variable) const { return Ab_(variable - begin()); } - - /** Get a view of the A matrix */ - constABlock getA() const { return Ab_.range(0, size()); } - - /** Get a view of the r.h.s. vector b (non-const version) */ - BVector getb() { return Ab_.column(size(), 0); } - - /** Get a view of the A matrix for the variable pointed to by the given key iterator (non-const version) */ - ABlock getA(iterator variable) { return Ab_(variable - begin()); } - - /** Get a view of the A matrix */ - ABlock getA() { return Ab_.range(0, size()); } - - /** Return A*x */ - Vector operator*(const VectorValues& x) const; - - /** x += A'*e */ - void transposeMultiplyAdd(double alpha, const Vector& e, VectorValues& x) const; - - /** - * Return (dense) matrix associated with factor - * @param ordering of variables needed for matrix column order - * @param set weight to true to bake in the weights - */ - std::pair matrix(bool weight = true) const; - - /** - * Return (dense) matrix associated with factor - * The returned system is an augmented matrix: [A b] - * @param set weight to use whitening to bake in weights - */ - Matrix matrix_augmented(bool weight = true) const; - - /** - * Return vector of i, j, and s to generate an m-by-n sparse matrix - * such that S(i(k),j(k)) = s(k), which can be given to MATLAB's sparse. - * As above, the standard deviations are baked into A and b - * @param columnIndices First column index for each variable. - */ - std::vector > - sparse(const std::vector& columnIndices) const; - - /** - * Return a whitened version of the factor, i.e. with unit diagonal noise - * model. */ - JacobianFactor whiten() const; - - /** Eliminate the first variable, modifying the factor in place to contain the remaining marginal. */ - boost::shared_ptr eliminateFirst(); - - /** Eliminate the requested number of frontal variables, modifying the factor in place to contain the remaining marginal. */ - boost::shared_ptr eliminate(size_t nrFrontals = 1); - - /** - * splits a pre-factorized factor into a conditional, and changes the current - * factor to be the remaining component. Performs same operation as eliminate(), - * but without running QR. - */ - boost::shared_ptr splitConditional(size_t nrFrontals = 1); - - // following methods all used in CombineJacobians: - // Many imperative, perhaps all need to be combined in constructor - - /** allocate space */ - void allocate(const VariableSlots& variableSlots, - std::vector& varDims, size_t m); - - /** set noiseModel correctly */ - void setModel(bool anyConstrained, const Vector& sigmas); - - /** Assert invariants after elimination */ - void assertInvariants() const; - - /** An exception indicating that the noise model dimension passed into the - * JacobianFactor has a different dimensionality than the factor. */ - class InvalidNoiseModel : public std::exception { - public: - const size_t factorDims; ///< The dimensionality of the factor - const size_t noiseModelDims; ///< The dimensionality of the noise model - - InvalidNoiseModel(size_t factorDims, size_t noiseModelDims) : - factorDims(factorDims), noiseModelDims(noiseModelDims) {} - virtual ~InvalidNoiseModel() throw() {} - - virtual const char* what() const throw(); - - private: - mutable std::string description_; - }; - - private: - - // Friend HessianFactor to facilitate conversion constructors - friend class HessianFactor; - - // Friend unit tests (see also forward declarations above) - friend class ::JacobianFactorCombine2Test; - friend class ::JacobianFactoreliminateFrontalsTest; - friend class ::JacobianFactorconstructor2Test; - - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(GaussianFactor); - ar & BOOST_SERIALIZATION_NVP(Ab_); - ar & BOOST_SERIALIZATION_NVP(model_); - ar & BOOST_SERIALIZATION_NVP(matrix_); - } - }; // JacobianFactor - -} // gtsam - + + /** + * Return (dense) matrix associated with factor + * @param ordering of variables needed for matrix column order + * @param set weight to true to bake in the weights + */ + virtual std::pair jacobian() const; + + /** + * Return (dense) matrix associated with factor + * @param ordering of variables needed for matrix column order + * @param set weight to true to bake in the weights + */ + std::pair jacobianUnweighted() const; + + /** Return (dense) matrix associated with factor. The returned system is an augmented matrix: + * [A b] + * @param set weight to use whitening to bake in weights*/ + virtual Matrix augmentedJacobian() const; + + /** Return (dense) matrix associated with factor. The returned system is an augmented matrix: + * [A b] + * @param set weight to use whitening to bake in weights */ + Matrix augmentedJacobianUnweighted() const; + + /** Return the full augmented Jacobian matrix of this factor as a VerticalBlockMatrix object. */ + const VerticalBlockMatrix& matrixObject() const { return Ab_; } + + /** + * Construct the corresponding anti-factor to negate information + * stored stored in this factor. + * @return a HessianFactor with negated Hessian matrices + */ + virtual GaussianFactor::shared_ptr negate() const; + + /** Check if the factor is empty. TODO: How should this be defined? */ + virtual bool empty() const { return size() == 0 /*|| rows() == 0*/; } + + /** is noise model constrained ? */ + bool isConstrained() const { return model_->isConstrained(); } + + /** Return the dimension of the variable pointed to by the given key iterator + * todo: Remove this in favor of keeping track of dimensions with variables? + */ + virtual DenseIndex getDim(const_iterator variable) const { return Ab_(variable - begin()).cols(); } + + /** + * return the number of rows in the corresponding linear system + */ + size_t rows() const { return Ab_.rows(); } + + /** + * return the number of columns in the corresponding linear system + */ + size_t cols() const { return Ab_.cols(); } + + /** get a copy of model */ + const SharedDiagonal& get_model() const { return model_; } + + /** get a copy of model (non-const version) */ + SharedDiagonal& get_model() { return model_; } + + /** Get a view of the r.h.s. vector b */ + const constBVector getb() const { return Ab_(size()).col(0); } + + /** Get a view of the A matrix for the variable pointed to by the given key iterator */ + constABlock getA(const_iterator variable) const { return Ab_(variable - begin()); } + + /** Get a view of the A matrix */ + constABlock getA() const { return Ab_.range(0, size()); } + + /** Get a view of the r.h.s. vector b (non-const version) */ + BVector getb() { return Ab_(size()).col(0); } + + /** Get a view of the A matrix for the variable pointed to by the given key iterator (non-const version) */ + ABlock getA(iterator variable) { return Ab_(variable - begin()); } + + /** Get a view of the A matrix */ + ABlock getA() { return Ab_.range(0, size()); } + + /** Return A*x */ + Vector operator*(const VectorValues& x) const; + + /** x += A'*e. If x is initially missing any values, they are created and assumed to start as + * zero vectors. */ + void transposeMultiplyAdd(double alpha, const Vector& e, VectorValues& x) const; + + /** Return a whitened version of the factor, i.e. with unit diagonal noise model. */ + JacobianFactor whiten() const; + + /** Eliminate the requested variables. */ + std::pair, boost::shared_ptr > + eliminate(const Ordering& keys); + + /** set noiseModel correctly */ + void setModel(bool anyConstrained, const Vector& sigmas); + + /** + * Densely partially eliminate with QR factorization, this is usually provided as an argument to + * one of the factor graph elimination functions (see EliminateableFactorGraph). HessianFactors + * are first factored with Cholesky decomposition to produce JacobianFactors, by calling the + * conversion constructor JacobianFactor(const HessianFactor&). Variables are eliminated in the + * order specified in \c keys. + * @param factors Factors to combine and eliminate + * @param keys The variables to eliminate in the order as specified here in \c keys + * @return The conditional and remaining factor + * + * \addtogroup LinearSolving */ + friend GTSAM_EXPORT std::pair, boost::shared_ptr > + EliminateQR(const GaussianFactorGraph& factors, const Ordering& keys); + + private: + + /// Internal function to fill blocks and set dimensions + template + void fillTerms(const TERMS& terms, const Vector& b, const SharedDiagonal& noiseModel); + + /** + * splits a pre-factorized factor into a conditional, and changes the current + * factor to be the remaining component. Performs same operation as eliminate(), + * but without running QR. + */ + boost::shared_ptr splitConditional(size_t nrFrontals); + + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + ar & BOOST_SERIALIZATION_NVP(Ab_); + ar & BOOST_SERIALIZATION_NVP(model_); + } + }; // JacobianFactor + +} // gtsam + +#include + + diff --git a/gtsam/linear/KalmanFilter.cpp b/gtsam/linear/KalmanFilter.cpp index 4a4730e60..c37df5ad2 100644 --- a/gtsam/linear/KalmanFilter.cpp +++ b/gtsam/linear/KalmanFilter.cpp @@ -20,72 +20,68 @@ * @author Frank Dellaert */ -#include -#include #include +#include +#include #include -#include #include #include +#include + +using namespace boost::assign; +using namespace std; namespace gtsam { - using namespace std; - /// Auxiliary function to solve factor graph and return pointer to root conditional - KalmanFilter::State solve(const GaussianFactorGraph& factorGraph, bool useQR) + KalmanFilter::State solve(const GaussianFactorGraph& factorGraph, const GaussianFactorGraph::Eliminate& function) { - // Start indices at zero - Index nVars = 0; - internal::Reduction remapping; - BOOST_FOREACH(const GaussianFactorGraph::sharedFactor& factor, factorGraph) - BOOST_FOREACH(Index j, *factor) - if(remapping.insert(make_pair(j, nVars)).second) - ++ nVars; - Permutation inverseRemapping = remapping.inverse(); - GaussianFactorGraph factorGraphOrdered(factorGraph); // NOTE this shares the factors with the original!! - factorGraphOrdered.reduceWithInverse(remapping); + // Do a dense solve, e.g. don't use the multifrontal framework + // Eliminate the keys in increasing order so that the last key is the one we want to keep. + FastSet keysToEliminate = factorGraph.keys(); + FastSet::const_iterator lastKeyPos = keysToEliminate.end(); + -- lastKeyPos; + Key remainingKey = *lastKeyPos; + keysToEliminate.erase(lastKeyPos); + GaussianFactorGraph::EliminationResult result = function(factorGraph, Ordering(keysToEliminate)); - // Solve the factor graph - GaussianSequentialSolver solver(factorGraphOrdered, useQR); - GaussianBayesNet::shared_ptr bayesNet = solver.eliminate(); + // As this is a filter, all we need is the posterior P(x_t). Eliminate it to be + // upper-triangular. + GaussianFactorGraph graphOfRemainingFactor; + graphOfRemainingFactor += result.second; + GaussianDensity::shared_ptr state = boost::make_shared( + *function(graphOfRemainingFactor, Ordering(cref_list_of<1>(remainingKey))).first); - // As this is a filter, all we need is the posterior P(x_t), - // so we just keep the root of the Bayes net - GaussianConditional::shared_ptr conditional = bayesNet->back(); - - // Undo the remapping - factorGraphOrdered.permuteWithInverse(inverseRemapping); - conditional->permuteWithInverse(inverseRemapping); - - // TODO: awful ! A copy constructor followed by ANOTHER copy constructor in make_shared? - return boost::make_shared(*conditional); + return state; } /* ************************************************************************* */ - KalmanFilter::State fuse(const KalmanFilter::State& p, - GaussianFactor* newFactor, bool useQR) { - + KalmanFilter::State fuse(const KalmanFilter::State& p, GaussianFactor::shared_ptr newFactor, + const GaussianFactorGraph::Eliminate& function) + { // Create a factor graph GaussianFactorGraph factorGraph; - - // push back previous solution and new factor - factorGraph.push_back(p->toFactor()); - factorGraph.push_back(GaussianFactor::shared_ptr(newFactor)); + factorGraph += p, newFactor; // Eliminate graph in order x0, x1, to get Bayes net P(x0|x1)P(x1) - return solve(factorGraph, useQR); + return solve(factorGraph, function); } /* ************************************************************************* */ - KalmanFilter::State KalmanFilter::init(const Vector& x0, - const SharedDiagonal& P0) { + GaussianFactorGraph::Eliminate KalmanFilter::factorization() const { + return method_ == QR ? + GaussianFactorGraph::Eliminate(EliminateQR) : + GaussianFactorGraph::Eliminate(EliminateCholesky); + } + + /* ************************************************************************* */ + KalmanFilter::State KalmanFilter::init(const Vector& x0, const SharedDiagonal& P0) { // Create a factor graph f(x0), eliminate it into P(x0) GaussianFactorGraph factorGraph; - factorGraph.add(0, I_, x0, P0); // |x-x0|^2_diagSigma - return solve(factorGraph, useQR()); + factorGraph += JacobianFactor(0, I_, x0, P0); // |x-x0|^2_diagSigma + return solve(factorGraph, factorization()); } /* ************************************************************************* */ @@ -93,10 +89,8 @@ namespace gtsam { // Create a factor graph f(x0), eliminate it into P(x0) GaussianFactorGraph factorGraph; - // 0.5*(x-x0)'*inv(Sigma)*(x-x0) - HessianFactor::shared_ptr factor(new HessianFactor(0, x, P0)); - factorGraph.push_back(factor); - return solve(factorGraph, useQR()); + factorGraph += HessianFactor(0, x, P0); // 0.5*(x-x0)'*inv(Sigma)*(x-x0) + return solve(factorGraph, factorization()); } /* ************************************************************************* */ @@ -111,7 +105,7 @@ namespace gtsam { // The factor related to the motion model is defined as // f2(x_{t},x_{t+1}) = (F*x_{t} + B*u - x_{t+1}) * Q^-1 * (F*x_{t} + B*u - x_{t+1})^T Index k = step(p); - return fuse(p, new JacobianFactor(k, -F, k + 1, I_, B * u, model), useQR()); + return fuse(p, boost::make_shared(k, -F, k + 1, I_, B * u, model), factorization()); } /* ************************************************************************* */ @@ -119,7 +113,7 @@ namespace gtsam { const Matrix& B, const Vector& u, const Matrix& Q) { #ifndef NDEBUG - int n = F.cols(); + DenseIndex n = F.cols(); assert(F.rows() == n); assert(B.rows() == n); assert(B.cols() == u.size()); @@ -136,8 +130,7 @@ namespace gtsam { Vector b = B * u, g2 = M * b, g1 = -Ft * g2; double f = dot(b, g2); Index k = step(p); - return fuse(p, new HessianFactor(k, k + 1, G11, G12, g1, G22, g2, f), - useQR()); + return fuse(p, boost::make_shared(k, k + 1, G11, G12, g1, G22, g2, f), factorization()); } /* ************************************************************************* */ @@ -146,7 +139,7 @@ namespace gtsam { // Nhe factor related to the motion model is defined as // f2(x_{t},x_{t+1}) = |A0*x_{t} + A1*x_{t+1} - b|^2 Index k = step(p); - return fuse(p, new JacobianFactor(k, A0, k + 1, A1, b, model), useQR()); + return fuse(p, boost::make_shared(k, A0, k + 1, A1, b, model), factorization()); } /* ************************************************************************* */ @@ -156,7 +149,7 @@ namespace gtsam { // f2 = (h(x_{t}) - z_{t}) * R^-1 * (h(x_{t}) - z_{t})^T // = (x_{t} - z_{t}) * R^-1 * (x_{t} - z_{t})^T Index k = step(p); - return fuse(p, new JacobianFactor(k, H, z, model), useQR()); + return fuse(p, boost::make_shared(k, H, z, model), factorization()); } /* ************************************************************************* */ @@ -167,7 +160,7 @@ namespace gtsam { Matrix G = Ht * M * H; Vector g = Ht * M * z; double f = dot(z, M * z); - return fuse(p, new HessianFactor(k, G, g, f), useQR()); + return fuse(p, boost::make_shared(k, G, g, f), factorization()); } /* ************************************************************************* */ diff --git a/gtsam/linear/KalmanFilter.h b/gtsam/linear/KalmanFilter.h index 0089bd34d..bcbea4394 100644 --- a/gtsam/linear/KalmanFilter.h +++ b/gtsam/linear/KalmanFilter.h @@ -20,6 +20,7 @@ #pragma once #include +#include #include #ifndef KALMANFILTER_DEFAULT_FACTORIZATION @@ -60,9 +61,7 @@ namespace gtsam { const Matrix I_; /** identity matrix of size n*n */ const Factorization method_; /** algorithm */ - bool useQR() const { - return method_ == QR; - } + GaussianFactorGraph::Eliminate factorization() const; public: diff --git a/gtsam/linear/NoiseModel.cpp b/gtsam/linear/NoiseModel.cpp index 5c8d591b9..be2e8bb5b 100644 --- a/gtsam/linear/NoiseModel.cpp +++ b/gtsam/linear/NoiseModel.cpp @@ -110,12 +110,14 @@ void Gaussian::WhitenInPlace(Matrix& H) const { // General QR, see also special version in Constrained SharedDiagonal Gaussian::QR(Matrix& Ab) const { + gttic(Gaussian_noise_model_QR); + static const bool debug = false; // get size(A) and maxRank // TODO: really no rank problems ? size_t m = Ab.rows(), n = Ab.cols()-1; - size_t maxRank = min(m,n); +// size_t maxRank = min(m,n); // pre-whiten everything (cheaply if possible) WhitenInPlace(Ab); @@ -129,7 +131,7 @@ SharedDiagonal Gaussian::QR(Matrix& Ab) const { // TODO: necessary to isolate last column? // householder(Ab, maxRank); - return Unit::Create(maxRank); + return SharedDiagonal(); } void Gaussian::WhitenSystem(vector& A, Vector& b) const { @@ -273,9 +275,9 @@ double Constrained::distance(const Vector& v) const { Vector w = Diagonal::whiten(v); // get noisemodel for constrained elements // TODO Find a better way of doing these checks for (size_t i=0; i #include +#include #include using namespace std; @@ -39,47 +40,47 @@ namespace gtsam { /* ************************************************************************* */ SubgraphPreconditioner::SubgraphPreconditioner(const sharedFG& Ab2, const sharedBayesNet& Rc1, const sharedValues& xbar) : - Ab2_(convertToJacobianFactors(*Ab2)), Rc1_(Rc1), xbar_(xbar), b2bar_(new Errors(-gaussianErrors(*Ab2_,*xbar))) { + Ab2_(convertToJacobianFactors(*Ab2)), Rc1_(Rc1), xbar_(xbar), b2bar_(new Errors(-Ab2_->gaussianErrors(*xbar))) { } /* ************************************************************************* */ // x = xbar + inv(R1)*y VectorValues SubgraphPreconditioner::x(const VectorValues& y) const { - return *xbar_ + gtsam::backSubstitute(*Rc1_, y); + return *xbar_ + Rc1_->backSubstitute(y); } /* ************************************************************************* */ - double error(const SubgraphPreconditioner& sp, const VectorValues& y) { + double SubgraphPreconditioner::error(const VectorValues& y) const { Errors e(y); - VectorValues x = sp.x(y); - Errors e2 = gaussianErrors(*sp.Ab2(),x); + VectorValues x = this->x(y); + Errors e2 = Ab2()->gaussianErrors(x); return 0.5 * (dot(e, e) + dot(e2,e2)); } /* ************************************************************************* */ // gradient is y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar), - VectorValues gradient(const SubgraphPreconditioner& sp, const VectorValues& y) { - VectorValues x = gtsam::backSubstitute(*sp.Rc1(), y); /* inv(R1)*y */ - Errors e = (*sp.Ab2()*x - *sp.b2bar()); /* (A2*inv(R1)*y-b2bar) */ + VectorValues SubgraphPreconditioner::gradient(const VectorValues& y) const { + VectorValues x = Rc1()->backSubstitute(y); /* inv(R1)*y */ + Errors e = (*Ab2()*x - *b2bar()); /* (A2*inv(R1)*y-b2bar) */ VectorValues v = VectorValues::Zero(x); - transposeMultiplyAdd(*sp.Ab2(), 1.0, e, v); /* A2'*(A2*inv(R1)*y-b2bar) */ - return y + gtsam::backSubstituteTranspose(*sp.Rc1(), v); + Ab2()->transposeMultiplyAdd(1.0, e, v); /* A2'*(A2*inv(R1)*y-b2bar) */ + return y + Rc1()->backSubstituteTranspose(v); } /* ************************************************************************* */ // Apply operator A, A*y = [I;A2*inv(R1)]*y = [y; A2*inv(R1)*y] - Errors operator*(const SubgraphPreconditioner& sp, const VectorValues& y) { + Errors SubgraphPreconditioner::operator*(const VectorValues& y) const { Errors e(y); - VectorValues x = gtsam::backSubstitute(*sp.Rc1(), y); /* x=inv(R1)*y */ - Errors e2 = *sp.Ab2() * x; /* A2*x */ + VectorValues x = Rc1()->backSubstitute(y); /* x=inv(R1)*y */ + Errors e2 = *Ab2() * x; /* A2*x */ e.splice(e.end(), e2); return e; } /* ************************************************************************* */ // In-place version that overwrites e - void multiplyInPlace(const SubgraphPreconditioner& sp, const VectorValues& y, Errors& e) { + void SubgraphPreconditioner::multiplyInPlace(const VectorValues& y, Errors& e) const { Errors::iterator ei = e.begin(); for ( Index i = 0 ; i < y.size() ; ++i, ++ei ) { @@ -87,33 +88,33 @@ namespace gtsam { } // Add A2 contribution - VectorValues x = gtsam::backSubstitute(*sp.Rc1(), y); // x=inv(R1)*y - gtsam::multiplyInPlace(*sp.Ab2(), x, ei); // use iterator version + VectorValues x = Rc1()->backSubstitute(y); // x=inv(R1)*y + Ab2()->multiplyInPlace(x, ei); // use iterator version } /* ************************************************************************* */ // Apply operator A', A'*e = [I inv(R1')*A2']*e = e1 + inv(R1')*A2'*e2 - VectorValues operator^(const SubgraphPreconditioner& sp, const Errors& e) { + VectorValues SubgraphPreconditioner::operator^(const Errors& e) const { Errors::const_iterator it = e.begin(); - VectorValues y = sp.zero(); + VectorValues y = zero(); for ( Index i = 0 ; i < y.size() ; ++i, ++it ) y[i] = *it ; - sp.transposeMultiplyAdd2(1.0,it,e.end(),y); + transposeMultiplyAdd2(1.0,it,e.end(),y); return y; } /* ************************************************************************* */ // y += alpha*A'*e - void transposeMultiplyAdd - (const SubgraphPreconditioner& sp, double alpha, const Errors& e, VectorValues& y) { + void SubgraphPreconditioner::transposeMultiplyAdd + (double alpha, const Errors& e, VectorValues& y) const { Errors::const_iterator it = e.begin(); for ( Index i = 0 ; i < y.size() ; ++i, ++it ) { const Vector& ei = *it; axpy(alpha, ei, y[i]); } - sp.transposeMultiplyAdd2(alpha, it, e.end(), y); + transposeMultiplyAdd2(alpha, it, e.end(), y); } /* ************************************************************************* */ @@ -127,8 +128,8 @@ namespace gtsam { while (it != end) e2.push_back(*(it++)); VectorValues x = VectorValues::Zero(y); // x = 0 - gtsam::transposeMultiplyAdd(*Ab2_,1.0,e2,x); // x += A2'*e2 - axpy(alpha, gtsam::backSubstituteTranspose(*Rc1_, x), y); // y += alpha*inv(R1')*x + Ab2_->transposeMultiplyAdd(1.0,e2,x); // x += A2'*e2 + axpy(alpha, Rc1_->backSubstituteTranspose(x), y); // y += alpha*inv(R1')*x } /* ************************************************************************* */ diff --git a/gtsam/linear/SubgraphPreconditioner.h b/gtsam/linear/SubgraphPreconditioner.h index 4396a59dc..d9d7524b6 100644 --- a/gtsam/linear/SubgraphPreconditioner.h +++ b/gtsam/linear/SubgraphPreconditioner.h @@ -17,12 +17,19 @@ #pragma once -#include -#include -#include +#include +#include +#include + +#include namespace gtsam { + // Forward declarations + class GaussianBayesNet; + class GaussianFactorGraph; + class VectorValues; + /** * Subgraph conditioner class, as explained in the RSS 2010 submission. * Starting with a graph A*x=b, we split it in two systems A1*x=b1 and A2*x=b2 @@ -57,6 +64,9 @@ namespace gtsam { */ SubgraphPreconditioner(const sharedFG& Ab2, const sharedBayesNet& Rc1, const sharedValues& xbar); + /** print the object */ + void print(const std::string& s = "SubgraphPreconditioner") const; + /** Access Ab2 */ const sharedFG& Ab2() const { return Ab2_; } @@ -89,29 +99,26 @@ namespace gtsam { void transposeMultiplyAdd2(double alpha, Errors::const_iterator begin, Errors::const_iterator end, VectorValues& y) const; - /** print the object */ - void print(const std::string& s = "SubgraphPreconditioner") const; - }; + /* error, given y */ + double error(const VectorValues& y) const; - /* error, given y */ - GTSAM_EXPORT double error(const SubgraphPreconditioner& sp, const VectorValues& y); + /** gradient = y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar) */ + VectorValues gradient(const VectorValues& y) const; - /** gradient = y + inv(R1')*A2'*(A2*inv(R1)*y-b2bar) */ - GTSAM_EXPORT VectorValues gradient(const SubgraphPreconditioner& sp, const VectorValues& y); + /** Apply operator A */ + Errors operator*(const VectorValues& y) const; - /** Apply operator A */ - GTSAM_EXPORT Errors operator*(const SubgraphPreconditioner& sp, const VectorValues& y); - - /** Apply operator A in place: needs e allocated already */ - GTSAM_EXPORT void multiplyInPlace(const SubgraphPreconditioner& sp, const VectorValues& y, Errors& e); + /** Apply operator A in place: needs e allocated already */ + void multiplyInPlace(const VectorValues& y, Errors& e) const; /** Apply operator A' */ - GTSAM_EXPORT VectorValues operator^(const SubgraphPreconditioner& sp, const Errors& e); + VectorValues operator^(const Errors& e) const; - /** - * Add A'*e to y - * y += alpha*A'*[e1;e2] = [alpha*e1; alpha*inv(R1')*A2'*e2] - */ - GTSAM_EXPORT void transposeMultiplyAdd(const SubgraphPreconditioner& sp, double alpha, const Errors& e, VectorValues& y); + /** + * Add A'*e to y + * y += alpha*A'*[e1;e2] = [alpha*e1; alpha*inv(R1')*A2'*e2] + */ + void transposeMultiplyAdd(double alpha, const Errors& e, VectorValues& y) const; + }; } // namespace gtsam diff --git a/gtsam/linear/SubgraphSolver.cpp b/gtsam/linear/SubgraphSolver.cpp index 519192144..b6c2a6911 100644 --- a/gtsam/linear/SubgraphSolver.cpp +++ b/gtsam/linear/SubgraphSolver.cpp @@ -12,13 +12,11 @@ #include #include #include -#include #include #include #include #include #include -#include #include #include #include @@ -29,60 +27,64 @@ using namespace std; namespace gtsam { /**************************************************************************************************/ -SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &gfg, const Parameters ¶meters) - : parameters_(parameters) +SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &gfg, const Parameters ¶meters, const Ordering& ordering) + : parameters_(parameters), ordering_(ordering) { initialize(gfg); } /**************************************************************************************************/ -SubgraphSolver::SubgraphSolver(const GaussianFactorGraph::shared_ptr &jfg, const Parameters ¶meters) - : parameters_(parameters) +SubgraphSolver::SubgraphSolver(const GaussianFactorGraph::shared_ptr &jfg, const Parameters ¶meters, const Ordering& ordering) + : parameters_(parameters), ordering_(ordering) { initialize(*jfg); } /**************************************************************************************************/ -SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &Ab1, const GaussianFactorGraph &Ab2, const Parameters ¶meters) - : parameters_(parameters) { +SubgraphSolver::SubgraphSolver(const GaussianFactorGraph &Ab1, const GaussianFactorGraph &Ab2, const Parameters ¶meters, const Ordering& ordering) + : parameters_(parameters), ordering_(ordering) { - GaussianBayesNet::shared_ptr Rc1 = EliminationTree::Create(Ab1)->eliminate(&EliminateQR); + GaussianBayesNet::shared_ptr Rc1 = Ab1.eliminateSequential(ordering_, EliminateQR); initialize(Rc1, boost::make_shared(Ab2)); } /**************************************************************************************************/ SubgraphSolver::SubgraphSolver(const GaussianFactorGraph::shared_ptr &Ab1, - const GaussianFactorGraph::shared_ptr &Ab2, const Parameters ¶meters) - : parameters_(parameters) { + const GaussianFactorGraph::shared_ptr &Ab2, const Parameters ¶meters, const Ordering& ordering) + : parameters_(parameters), ordering_(ordering) { - GaussianBayesNet::shared_ptr Rc1 = EliminationTree::Create(*Ab1)->eliminate(&EliminateQR); + GaussianBayesNet::shared_ptr Rc1 = Ab1->eliminateSequential(ordering_, EliminateQR); initialize(Rc1, Ab2); } /**************************************************************************************************/ SubgraphSolver::SubgraphSolver(const GaussianBayesNet::shared_ptr &Rc1, const GaussianFactorGraph &Ab2, - const Parameters ¶meters) : parameters_(parameters) + const Parameters ¶meters, const Ordering& ordering) : parameters_(parameters), ordering_(ordering) { initialize(Rc1, boost::make_shared(Ab2)); } /**************************************************************************************************/ SubgraphSolver::SubgraphSolver(const GaussianBayesNet::shared_ptr &Rc1, - const GaussianFactorGraph::shared_ptr &Ab2, const Parameters ¶meters) : parameters_(parameters) + const GaussianFactorGraph::shared_ptr &Ab2, const Parameters ¶meters, const Ordering& ordering) : +parameters_(parameters), ordering_(ordering) { initialize(Rc1, Ab2); } +/**************************************************************************************************/ VectorValues SubgraphSolver::optimize() { VectorValues ybar = conjugateGradients(*pc_, pc_->zero(), parameters_); return pc_->x(ybar); } +/**************************************************************************************************/ VectorValues SubgraphSolver::optimize(const VectorValues &initial) { // the initial is ignored in this case ... return optimize(); } +/**************************************************************************************************/ void SubgraphSolver::initialize(const GaussianFactorGraph &jfg) { GaussianFactorGraph::shared_ptr Ab1 = boost::make_shared(), @@ -92,17 +94,19 @@ void SubgraphSolver::initialize(const GaussianFactorGraph &jfg) if (parameters_.verbosity()) cout << "Split A into (A1) " << Ab1->size() << " and (A2) " << Ab2->size() << " factors" << endl; - GaussianBayesNet::shared_ptr Rc1 = EliminationTree::Create(*Ab1)->eliminate(&EliminateQR); - VectorValues::shared_ptr xbar(new VectorValues(gtsam::optimize(*Rc1))); + GaussianBayesNet::shared_ptr Rc1 = Ab1->eliminateSequential(ordering_, EliminateQR); + VectorValues::shared_ptr xbar = boost::make_shared(Rc1->optimize()); pc_ = boost::make_shared(Ab2, Rc1, xbar); } +/**************************************************************************************************/ void SubgraphSolver::initialize(const GaussianBayesNet::shared_ptr &Rc1, const GaussianFactorGraph::shared_ptr &Ab2) { - VectorValues::shared_ptr xbar(new VectorValues(gtsam::optimize(*Rc1))); + VectorValues::shared_ptr xbar = boost::make_shared(Rc1->optimize()); pc_ = boost::make_shared(Ab2, Rc1, xbar); } +/**************************************************************************************************/ boost::tuple SubgraphSolver::splitGraph(const GaussianFactorGraph &jfg) { diff --git a/gtsam/linear/SubgraphSolver.h b/gtsam/linear/SubgraphSolver.h index 1f071fcdd..acb654ba5 100644 --- a/gtsam/linear/SubgraphSolver.h +++ b/gtsam/linear/SubgraphSolver.h @@ -11,14 +11,19 @@ #pragma once + #include -#include -#include #include -#include +#include + +#include namespace gtsam { + // Forward declarations + class GaussianFactorGraph; + class GaussianBayesNet; + class GTSAM_EXPORT SubgraphSolverParameters : public ConjugateGradientParameters { public: typedef ConjugateGradientParameters Base; @@ -55,20 +60,21 @@ public: protected: Parameters parameters_; + Ordering ordering_; SubgraphPreconditioner::shared_ptr pc_; ///< preconditioner object public: /* Given a gaussian factor graph, split it into a spanning tree (A1) + others (A2) for SPCG */ - SubgraphSolver(const GaussianFactorGraph &A, const Parameters ¶meters); - SubgraphSolver(const GaussianFactorGraph::shared_ptr &A, const Parameters ¶meters); + SubgraphSolver(const GaussianFactorGraph &A, const Parameters ¶meters, const Ordering& ordering); + SubgraphSolver(const boost::shared_ptr &A, const Parameters ¶meters, const Ordering& ordering); /* The user specify the subgraph part and the constraint part, may throw exception if A1 is underdetermined */ - SubgraphSolver(const GaussianFactorGraph &Ab1, const GaussianFactorGraph &Ab2, const Parameters ¶meters); - SubgraphSolver(const GaussianFactorGraph::shared_ptr &Ab1, const GaussianFactorGraph::shared_ptr &Ab2, const Parameters ¶meters); + SubgraphSolver(const GaussianFactorGraph &Ab1, const GaussianFactorGraph &Ab2, const Parameters ¶meters, const Ordering& ordering); + SubgraphSolver(const boost::shared_ptr &Ab1, const boost::shared_ptr &Ab2, const Parameters ¶meters, const Ordering& ordering); /* The same as above, but the A1 is solved before */ - SubgraphSolver(const GaussianBayesNet::shared_ptr &Rc1, const GaussianFactorGraph &Ab2, const Parameters ¶meters); - SubgraphSolver(const GaussianBayesNet::shared_ptr &Rc1, const GaussianFactorGraph::shared_ptr &Ab2, const Parameters ¶meters); + SubgraphSolver(const boost::shared_ptr &Rc1, const GaussianFactorGraph &Ab2, const Parameters ¶meters, const Ordering& ordering); + SubgraphSolver(const boost::shared_ptr &Rc1, const boost::shared_ptr &Ab2, const Parameters ¶meters, const Ordering& ordering); virtual ~SubgraphSolver() {} virtual VectorValues optimize () ; @@ -77,12 +83,10 @@ public: protected: void initialize(const GaussianFactorGraph &jfg); - void initialize(const GaussianBayesNet::shared_ptr &Rc1, const GaussianFactorGraph::shared_ptr &Ab2); + void initialize(const boost::shared_ptr &Rc1, const boost::shared_ptr &Ab2); - boost::tuple + boost::tuple, boost::shared_ptr > splitGraph(const GaussianFactorGraph &gfg) ; }; } // namespace gtsam - - diff --git a/gtsam/linear/VectorValues.cpp b/gtsam/linear/VectorValues.cpp index 3c6c7133f..c7153e349 100644 --- a/gtsam/linear/VectorValues.cpp +++ b/gtsam/linear/VectorValues.cpp @@ -1,237 +1,305 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file VectorValues.cpp - * @brief Implementations for VectorValues - * @author Richard Roberts - * @author Alex Cunningham - */ - -#include -#include -#include - -#include - -using namespace std; - -namespace gtsam { - -/* ************************************************************************* */ -VectorValues VectorValues::Zero(const VectorValues& x) { - VectorValues result; - result.values_.resize(x.size()); - for(size_t j=0; j VectorValues::dims() const { - std::vector result(this->size()); - for(Index j = 0; j < this->size(); ++j) - result[j] = this->dim(j); - return result; -} - -/* ************************************************************************* */ -void VectorValues::insert(Index j, const Vector& value) { - // Make sure j does not already exist - if(exists(j)) - throw invalid_argument("VectorValues: requested variable index to insert already exists."); - - // If this adds variables at the end, insert zero-length entries up to j - if(j >= size()) - values_.resize(j+1); - - // Assign value - values_[j] = value; -} - -/* ************************************************************************* */ -void VectorValues::print(const std::string& str, const IndexFormatter& formatter) const { - std::cout << str << ": " << size() << " elements\n"; - for (Index var = 0; var < size(); ++var) - std::cout << " " << formatter(var) << ": \n" << (*this)[var] << "\n"; - std::cout.flush(); -} - -/* ************************************************************************* */ -bool VectorValues::equals(const VectorValues& x, double tol) const { - if(this->size() != x.size()) - return false; - for(Index j=0; j < size(); ++j) - if(!equal_with_abs_tol(values_[j], x.values_[j], tol)) - return false; - return true; -} - -/* ************************************************************************* */ -void VectorValues::resize(Index nVars, size_t varDim) { - values_.resize(nVars); - for(Index j = 0; j < nVars; ++j) - values_[j] = Vector(varDim); -} - -/* ************************************************************************* */ -void VectorValues::resizeLike(const VectorValues& other) { - values_.resize(other.size()); - for(Index j = 0; j < other.size(); ++j) - values_[j].resize(other.values_[j].size()); -} - -/* ************************************************************************* */ -VectorValues VectorValues::SameStructure(const VectorValues& other) { - VectorValues ret; - ret.resizeLike(other); - return ret; -} - -/* ************************************************************************* */ -VectorValues VectorValues::Zero(Index nVars, size_t varDim) { - VectorValues ret(nVars, varDim); - ret.setZero(); - return ret; -} - -/* ************************************************************************* */ -void VectorValues::setZero() { - BOOST_FOREACH(Vector& v, *this) { - v.setZero(); - } -} - -/* ************************************************************************* */ -const Vector VectorValues::asVector() const { - return internal::extractVectorValuesSlices(*this, - boost::make_counting_iterator(size_t(0)), boost::make_counting_iterator(this->size()), true); -} - -/* ************************************************************************* */ -const Vector VectorValues::vector(const std::vector& indices) const { - return internal::extractVectorValuesSlices(*this, indices.begin(), indices.end()); -} - -/* ************************************************************************* */ -bool VectorValues::hasSameStructure(const VectorValues& other) const { - if(this->size() != other.size()) - return false; - for(size_t j = 0; j < size(); ++j) - // Directly accessing maps instead of using VV::dim in case some values are empty - if(this->values_[j].rows() != other.values_[j].rows()) - return false; - return true; -} - -/* ************************************************************************* */ -void VectorValues::permuteInPlace(const Permutation& permutation) { - // Allocate new values - Values newValues(this->size()); - - // Swap values from this VectorValues to the permuted VectorValues - for(size_t i = 0; i < this->size(); ++i) - newValues[i].swap(this->at(permutation[i])); - - // Swap the values into this VectorValues - values_.swap(newValues); -} - -/* ************************************************************************* */ -void VectorValues::permuteInPlace(const Permutation& selector, const Permutation& permutation) { - if(selector.size() != permutation.size()) - throw invalid_argument("VariableIndex::permuteInPlace (partial permutation version) called with selector and permutation of different sizes."); - // Create new index the size of the permuted entries - Values reorderedEntries(selector.size()); - // Permute the affected entries into the new index - for(size_t dstSlot = 0; dstSlot < selector.size(); ++dstSlot) - reorderedEntries[dstSlot].swap(values_[selector[permutation[dstSlot]]]); - // Put the affected entries back in the new order - for(size_t slot = 0; slot < selector.size(); ++slot) - values_[selector[slot]].swap(reorderedEntries[slot]); -} - -/* ************************************************************************* */ -void VectorValues::swap(VectorValues& other) { - this->values_.swap(other.values_); -} - -/* ************************************************************************* */ -double VectorValues::dot(const VectorValues& v) const { - double result = 0.0; - if(this->size() != v.size()) - throw invalid_argument("VectorValues::dot called with different vector sizes"); - for(Index j = 0; j < this->size(); ++j) - // Directly accessing maps instead of using VV::dim in case some values are empty - if(this->values_[j].size() == v.values_[j].size()) - result += this->values_[j].dot(v.values_[j]); - else - throw invalid_argument("VectorValues::dot called with different vector sizes"); - return result; -} - -/* ************************************************************************* */ -double VectorValues::norm() const { - return std::sqrt(this->squaredNorm()); -} - -/* ************************************************************************* */ -double VectorValues::squaredNorm() const { - double sumSquares = 0.0; - for(Index j = 0; j < this->size(); ++j) - // Directly accessing maps instead of using VV::dim in case some values are empty - sumSquares += this->values_[j].squaredNorm(); - return sumSquares; -} - -/* ************************************************************************* */ -VectorValues VectorValues::operator+(const VectorValues& c) const { - VectorValues result = SameStructure(*this); - if(this->size() != c.size()) - throw invalid_argument("VectorValues::operator+ called with different vector sizes"); - for(Index j = 0; j < this->size(); ++j) - // Directly accessing maps instead of using VV::dim in case some values are empty - if(this->values_[j].size() == c.values_[j].size()) - result.values_[j] = this->values_[j] + c.values_[j]; - else - throw invalid_argument("VectorValues::operator- called with different vector sizes"); - return result; -} - -/* ************************************************************************* */ -VectorValues VectorValues::operator-(const VectorValues& c) const { - VectorValues result = SameStructure(*this); - if(this->size() != c.size()) - throw invalid_argument("VectorValues::operator- called with different vector sizes"); - for(Index j = 0; j < this->size(); ++j) - // Directly accessing maps instead of using VV::dim in case some values are empty - if(this->values_[j].size() == c.values_[j].size()) - result.values_[j] = this->values_[j] - c.values_[j]; - else - throw invalid_argument("VectorValues::operator- called with different vector sizes"); - return result; -} - -/* ************************************************************************* */ -void VectorValues::operator+=(const VectorValues& c) { - if(this->size() != c.size()) - throw invalid_argument("VectorValues::operator+= called with different vector sizes"); - for(Index j = 0; j < this->size(); ++j) - // Directly accessing maps instead of using VV::dim in case some values are empty - if(this->values_[j].size() == c.values_[j].size()) - this->values_[j] += c.values_[j]; - else - throw invalid_argument("VectorValues::operator+= called with different vector sizes"); -} - -/* ************************************************************************* */ - -} // \namespace gtsam +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file VectorValues.cpp + * @brief Implementations for VectorValues + * @author Richard Roberts + * @author Alex Cunningham + */ + +#include + +#include +#include +#include +#include +#include +#include + +using namespace std; + +namespace gtsam { + + using boost::combine; + using boost::adaptors::transformed; + using boost::adaptors::map_values; + using boost::accumulate; + + /* ************************************************************************* */ + VectorValues::VectorValues(const VectorValues& first, const VectorValues& second) + { + // Merge using predicate for comparing first of pair + std::merge(first.begin(), first.end(), second.begin(), second.end(), std::inserter(values_, values_.end()), + boost::bind(&std::less::operator(), std::less(), boost::bind(&KeyValuePair::first, _1), boost::bind(&KeyValuePair::first, _2))); + if(size() != first.size() + second.size()) + throw std::invalid_argument("Requested to merge two VectorValues that have one or more variables in common."); + } + + /* ************************************************************************* */ + VectorValues VectorValues::Zero(const VectorValues& other) + { + VectorValues result; + BOOST_FOREACH(const KeyValuePair& v, other) + result.values_.insert(make_pair(v.first, Vector::Zero(v.second.size()))); + return result; + } + + /* ************************************************************************* */ + void VectorValues::update(const VectorValues& values) + { + iterator hint = begin(); + BOOST_FOREACH(const KeyValuePair& key_value, values) + { + // Use this trick to find the value using a hint, since we are inserting from another sorted map + size_t oldSize = values_.size(); + hint = values_.insert(hint, key_value); + if(values_.size() > oldSize) { + values_.erase(hint); + throw std::out_of_range("Requested to update a VectorValues with another VectorValues that contains keys not present in the first."); + } else { + hint->second = key_value.second; + } + } + } + + /* ************************************************************************* */ + void VectorValues::insert(const VectorValues& values) + { + size_t originalSize = size(); + values_.insert(values.begin(), values.end()); + if(size() != originalSize + values.size()) + throw std::invalid_argument("Requested to insert a VectorValues into another VectorValues that already contains one or more of its keys."); + } + + /* ************************************************************************* */ + void VectorValues::setZero() + { + BOOST_FOREACH(Vector& v, values_ | map_values) + v.setZero(); + } + + /* ************************************************************************* */ + void VectorValues::print(const std::string& str, const KeyFormatter& formatter) const { + std::cout << str << ": " << size() << " elements\n"; + BOOST_FOREACH(const value_type& key_value, *this) + std::cout << " " << formatter(key_value.first) << ": " << key_value.second.transpose() << "\n"; + std::cout.flush(); + } + + /* ************************************************************************* */ + bool VectorValues::equals(const VectorValues& x, double tol) const { + if(this->size() != x.size()) + return false; + typedef boost::tuple ValuePair; + BOOST_FOREACH(const ValuePair& values, boost::combine(*this, x)) { + if(values.get<0>().first != values.get<1>().first || + !equal_with_abs_tol(values.get<0>().second, values.get<1>().second, tol)) + return false; + } + return true; + } + + /* ************************************************************************* */ + Vector VectorValues::vector() const + { + // Count dimensions + DenseIndex totalDim = 0; + BOOST_FOREACH(const value_type& v, *this) + totalDim += v.second.size(); + + // Copy vectors + Vector result(totalDim); + DenseIndex pos = 0; + BOOST_FOREACH(const Vector& v, *this | map_values) { + result.segment(pos, v.size()) = v; + pos += v.size(); + } + + return result; + } + + /* ************************************************************************* */ + Vector VectorValues::vector(const std::vector& keys) const + { + // Count dimensions and collect pointers to avoid double lookups + DenseIndex totalDim = 0; + std::vector items(keys.size()); + for(size_t i = 0; i < keys.size(); ++i) { + items[i] = &at(keys[i]); + totalDim += items[i]->size(); + } + + // Copy vectors + Vector result(totalDim); + DenseIndex pos = 0; + BOOST_FOREACH(const Vector *v, items) { + result.segment(pos, v->size()) = *v; + pos += v->size(); + } + + return result; + } + + /* ************************************************************************* */ + void VectorValues::swap(VectorValues& other) { + this->values_.swap(other.values_); + } + + /* ************************************************************************* */ + namespace internal + { + bool structureCompareOp(const boost::tuple& vv) + { + return vv.get<0>().first == vv.get<1>().first + && vv.get<0>().second.size() == vv.get<1>().second.size(); + } + } + + /* ************************************************************************* */ + bool VectorValues::hasSameStructure(const VectorValues other) const + { + return accumulate(combine(*this, other) + | transformed(internal::structureCompareOp), true, std::logical_and()); + } + + /* ************************************************************************* */ + double VectorValues::dot(const VectorValues& v) const + { + if(this->size() != v.size()) + throw invalid_argument("VectorValues::dot called with a VectorValues of different structure"); + double result = 0.0; + typedef boost::tuple ValuePair; + using boost::adaptors::map_values; + BOOST_FOREACH(const ValuePair& values, boost::combine(*this, v)) { + assert_throw(values.get<0>().first == values.get<1>().first, + std::invalid_argument("VectorValues::dot called with a VectorValues of different structure")); + assert_throw(values.get<0>().second.size() == values.get<1>().second.size(), + std::invalid_argument("VectorValues::dot called with a VectorValues of different structure")); + result += values.get<0>().second.dot(values.get<1>().second); + } + return result; + } + + /* ************************************************************************* */ + double VectorValues::norm() const { + return std::sqrt(this->squaredNorm()); + } + + /* ************************************************************************* */ + double VectorValues::squaredNorm() const { + double sumSquares = 0.0; + using boost::adaptors::map_values; + BOOST_FOREACH(const Vector& v, *this | map_values) + sumSquares += v.squaredNorm(); + return sumSquares; + } + + /* ************************************************************************* */ + VectorValues VectorValues::operator+(const VectorValues& c) const + { + if(this->size() != c.size()) + throw invalid_argument("VectorValues::operator+ called with different vector sizes"); + assert_throw(hasSameStructure(c), + invalid_argument("VectorValues::operator+ called with different vector sizes")); + + VectorValues result; + // The result.end() hint here should result in constant-time inserts + for(const_iterator j1 = begin(), j2 = c.begin(); j1 != end(); ++j1, ++j2) + result.values_.insert(result.end(), make_pair(j1->first, j1->second + j2->second)); + + return result; + } + + /* ************************************************************************* */ + VectorValues VectorValues::add(const VectorValues& c) const + { + return *this + c; + } + + /* ************************************************************************* */ + VectorValues& VectorValues::operator+=(const VectorValues& c) + { + if(this->size() != c.size()) + throw invalid_argument("VectorValues::operator+= called with different vector sizes"); + assert_throw(hasSameStructure(c), + invalid_argument("VectorValues::operator+= called with different vector sizes")); + + iterator j1 = begin(); + const_iterator j2 = c.begin(); + // The result.end() hint here should result in constant-time inserts + for(; j1 != end(); ++j1, ++j2) + j1->second += j2->second; + + return *this; + } + + /* ************************************************************************* */ + VectorValues& VectorValues::addInPlace(const VectorValues& c) + { + return *this += c; + } + + /* ************************************************************************* */ + VectorValues VectorValues::operator-(const VectorValues& c) const + { + if(this->size() != c.size()) + throw invalid_argument("VectorValues::operator- called with different vector sizes"); + assert_throw(hasSameStructure(c), + invalid_argument("VectorValues::operator- called with different vector sizes")); + + VectorValues result; + // The result.end() hint here should result in constant-time inserts + for(const_iterator j1 = begin(), j2 = c.begin(); j1 != end(); ++j1, ++j2) + result.values_.insert(result.end(), make_pair(j1->first, j1->second - j2->second)); + + return result; + } + + /* ************************************************************************* */ + VectorValues VectorValues::subtract(const VectorValues& c) const + { + return *this - c; + } + + /* ************************************************************************* */ + VectorValues operator*(const double a, const VectorValues &v) + { + VectorValues result; + BOOST_FOREACH(const VectorValues::KeyValuePair& key_v, v) + result.values_.insert(result.values_.end(), make_pair(key_v.first, a * key_v.second)); + return result; + } + + /* ************************************************************************* */ + VectorValues VectorValues::scale(const double a) const + { + return a * *this; + } + + /* ************************************************************************* */ + VectorValues& VectorValues::operator*=(double alpha) + { + BOOST_FOREACH(Vector& v, *this | map_values) + v *= alpha; + return *this; + } + + /* ************************************************************************* */ + VectorValues& VectorValues::scaleInPlace(double alpha) + { + return *this *= alpha; + } + + /* ************************************************************************* */ + +} // \namespace gtsam diff --git a/gtsam/linear/VectorValues.h b/gtsam/linear/VectorValues.h index af13231fb..c8de78dca 100644 --- a/gtsam/linear/VectorValues.h +++ b/gtsam/linear/VectorValues.h @@ -1,500 +1,370 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file VectorValues.h - * @brief Factor Graph Values - * @author Richard Roberts - */ - -#pragma once - -#include -#include - -#include -#include -#include -#include -#include -#include - -namespace gtsam { - - // Forward declarations - class Permutation; - - /** - * This class represents a collection of vector-valued variables associated - * each with a unique integer index. It is typically used to store the variables - * of a GaussianFactorGraph. Optimizing a GaussianFactorGraph or GaussianBayesNet - * returns this class. - * - * For basic usage, such as receiving a linear solution from gtsam solving functions, - * or creating this class in unit tests and examples where speed is not important, - * you can use a simple interface: - * - The default constructor VectorValues() to create this class - * - insert(Index, const Vector&) to add vector variables - * - operator[](Index) for read and write access to stored variables - * - \ref exists (Index) to check if a variable is present - * - Other facilities like iterators, size(), dim(), etc. - * - * Indices can be non-consecutive and inserted out-of-order, but you should not - * use indices that are larger than a reasonable array size because the indices - * correspond to positions in an internal array. - * - * Example: - * \code - VectorValues values; - values.insert(3, Vector_(3, 1.0, 2.0, 3.0)); - values.insert(4, Vector_(2, 4.0, 5.0)); - values.insert(0, Vector_(4, 6.0, 7.0, 8.0, 9.0)); - - // Prints [ 3.0 4.0 ] - gtsam::print(values[1]); - - // Prints [ 8.0 9.0 ] - values[1] = Vector_(2, 8.0, 9.0); - gtsam::print(values[1]); - \endcode - * - *

Advanced Interface and Performance Information

- * - * Internally, all vector values are stored as part of one large vector. In - * gtsam this vector is always pre-allocated for efficiency, using the - * advanced interface described below. Accessing and modifying already-allocated - * values is \f$ O(1) \f$. Using the insert() function of the standard interface - * is slow because it requires re-allocating the internal vector. - * - * For advanced usage, or where speed is important: - * - Allocate space ahead of time using a pre-allocating constructor - * (\ref AdvancedConstructors "Advanced Constructors"), Zero(), - * SameStructure(), resize(), or append(). Do not use - * insert(Index, const Vector&), which always has to re-allocate the - * internal vector. - * - The vector() function permits access to the underlying Vector, for - * doing mathematical or other operations that require all values. - * - operator[]() returns a SubVector view of the underlying Vector, - * without copying any data. - * - * Access is through the variable index j, and returns a SubVector, - * which is a view on the underlying data structure. - * - * This class is additionally used in gradient descent and dog leg to store the gradient. - * \nosubgrouping - */ - class GTSAM_EXPORT VectorValues { - protected: - typedef std::vector Values; ///< Typedef for the collection of Vectors making up a VectorValues - Values values_; ///< Collection of Vectors making up this VectorValues - - public: - typedef Values::iterator iterator; ///< Iterator over vector values - typedef Values::const_iterator const_iterator; ///< Const iterator over vector values - typedef Values::reverse_iterator reverse_iterator; ///< Reverse iterator over vector values - typedef Values::const_reverse_iterator const_reverse_iterator; ///< Const reverse iterator over vector values - typedef boost::shared_ptr shared_ptr; ///< shared_ptr to this class - - /// @name Standard Constructors - /// @{ - - /** - * Default constructor creates an empty VectorValues. - */ - VectorValues() {} - - /** Named constructor to create a VectorValues of the same structure of the - * specified one, but filled with zeros. - * @return - */ - static VectorValues Zero(const VectorValues& model); - - /// @} - /// @name Standard Interface - /// @{ - - /** Number of variables stored, always 1 more than the highest variable index, - * even if some variables with lower indices are not present. */ - Index size() const { return values_.size(); } - - /** Return the dimension of variable \c j. */ - size_t dim(Index j) const { checkExists(j); return (*this)[j].rows(); } - - /** Return the dimension of each vector in this container */ - std::vector dims() const; - - /** Check whether a variable with index \c j exists. */ - bool exists(Index j) const { return j < size() && values_[j].rows() > 0; } - - /** Read/write access to the vector value with index \c j, throws std::out_of_range if \c j does not exist, identical to operator[](Index). */ - Vector& at(Index j) { checkExists(j); return values_[j]; } - - /** Access the vector value with index \c j (const version), throws std::out_of_range if \c j does not exist, identical to operator[](Index). */ - const Vector& at(Index j) const { checkExists(j); return values_[j]; } - - /** Read/write access to the vector value with index \c j, throws std::out_of_range if \c j does not exist, identical to at(Index). */ - Vector& operator[](Index j) { return at(j); } - - /** Access the vector value with index \c j (const version), throws std::out_of_range if \c j does not exist, identical to at(Index). */ - const Vector& operator[](Index j) const { return at(j); } - - /** Insert a vector \c value with index \c j. - * Causes reallocation, but can insert values in any order. - * Throws an invalid_argument exception if the index \c j is already used. - * @param value The vector to be inserted. - * @param j The index with which the value will be associated. - */ - void insert(Index j, const Vector& value); - - iterator begin() { return values_.begin(); } ///< Iterator over variables - const_iterator begin() const { return values_.begin(); } ///< Iterator over variables - iterator end() { return values_.end(); } ///< Iterator over variables - const_iterator end() const { return values_.end(); } ///< Iterator over variables - reverse_iterator rbegin() { return values_.rbegin(); } ///< Reverse iterator over variables - const_reverse_iterator rbegin() const { return values_.rbegin(); } ///< Reverse iterator over variables - reverse_iterator rend() { return values_.rend(); } ///< Reverse iterator over variables - const_reverse_iterator rend() const { return values_.rend(); } ///< Reverse iterator over variables - - /** print required by Testable for unit testing */ - void print(const std::string& str = "VectorValues: ", - const IndexFormatter& formatter = DefaultIndexFormatter) const; - - /** equals required by Testable for unit testing */ - bool equals(const VectorValues& x, double tol = 1e-9) const; - - /// @{ - /// \anchor AdvancedConstructors - /// @name Advanced Constructors - /// @} - - /** Construct from a container of variable dimensions (in variable order), without initializing any values. */ - template - explicit VectorValues(const CONTAINER& dimensions) { this->append(dimensions); } - - /** Construct from a container of variable dimensions (in variable order), with initial values. */ - template - explicit VectorValues(const Vector& d, const CONTAINER& dimensions) { - this->append(d, dimensions); - } - - /** Construct to hold nVars vectors of varDim dimension each. */ - VectorValues(Index nVars, size_t varDim) { this->resize(nVars, varDim); } - - /** Named constructor to create a VectorValues that matches the structure of - * the specified VectorValues, but do not initialize the new values. */ - static VectorValues SameStructure(const VectorValues& other); - - /** Named constructor to create a VectorValues from a container of variable - * dimensions that is filled with zeros. - * @param dimensions A container of the dimension of each variable to create. - */ - template - static VectorValues Zero(const CONTAINER& dimensions); - - /** Named constructor to create a VectorValues filled with zeros that has - * \c nVars variables, each of dimension \c varDim - * @param nVars The number of variables to create - * @param varDim The dimension of each variable - * @return The new VectorValues - */ - static VectorValues Zero(Index nVars, size_t varDim); - - /// @} - /// @name Advanced Interface - /// @{ - - /** Resize this VectorValues to have identical structure to other, leaving - * this VectorValues with uninitialized values. - * @param other The VectorValues whose structure to copy - */ - void resizeLike(const VectorValues& other); - - /** Resize the VectorValues to hold \c nVars variables, each of dimension - * \c varDim. Any individual vectors that do not change size will keep - * their values, but any new or resized vectors will be uninitialized. - * @param nVars The number of variables to create - * @param varDim The dimension of each variable - */ - void resize(Index nVars, size_t varDim); - - /** Resize the VectorValues to contain variables of the dimensions stored - * in \c dimensions. Any individual vectors that do not change size will keep - * their values, but any new or resized vectors will be uninitialized. - * @param dimensions A container of the dimension of each variable to create. - */ - template - void resize(const CONTAINER& dimensions); - - /** Append to the VectorValues to additionally contain variables of the - * dimensions stored in \c dimensions. The new variables are uninitialized, - * but this function is used to pre-allocate space for performance. This - * function preserves the original data, so all previously-existing variables - * are left unchanged. - * @param dimensions A container of the dimension of each variable to create. - */ - template - void append(const CONTAINER& dimensions); - - /** Append to the VectorValues to additionally contain variables of the - * dimensions stored in \c dimensions. Initial values for the new variables - * are extracted from the input vector, holding values for all components - * in the same order specified in the dimensions container. - * This function preserves the original data, so all previously-existing - * variables are left unchanged. - * @param d A vector holding values for all variables, which order - * specified in the below container - * @param dimensions A container of the dimension of each variable to create. - */ - template - void append(const Vector& d, const CONTAINER& dimensions); - - /** Removes the last subvector from the VectorValues */ - void pop_back() { values_.pop_back(); }; - - /** Set all entries to zero, does not modify the size. */ - void setZero(); - - /** Retrieve the entire solution as a single vector */ - const Vector asVector() const; - - /** Access a vector that is a subset of relevant indices */ - const Vector vector(const std::vector& indices) const; - - /** Check whether this VectorValues has the same structure, meaning has the - * same number of variables and that all variables are of the same dimension, - * as another VectorValues - * @param other The other VectorValues with which to compare structure - * @return \c true if the structure is the same, \c false if not. - */ - bool hasSameStructure(const VectorValues& other) const; - - /** - * Permute the variables in the VariableIndex according to the given partial permutation - */ - void permuteInPlace(const Permutation& selector, const Permutation& permutation); - - /** - * Permute the entries of this VectorValues in place - */ - void permuteInPlace(const Permutation& permutation); - - /** - * Swap the data in this VectorValues with another. - */ - void swap(VectorValues& other); - - /// @} - /// @name Linear algebra operations - /// @{ - - /** Dot product with another VectorValues, interpreting both as vectors of - * their concatenated values. */ - double dot(const VectorValues& v) const; - - /** Vector L2 norm */ - double norm() const; - - /** Squared vector L2 norm */ - double squaredNorm() const; - - /** - * + operator does element-wise addition. Both VectorValues must have the - * same structure (checked when NDEBUG is not defined). - */ - VectorValues operator+(const VectorValues& c) const; - - /** - * + operator does element-wise subtraction. Both VectorValues must have the - * same structure (checked when NDEBUG is not defined). - */ - VectorValues operator-(const VectorValues& c) const; - - /** - * += operator does element-wise addition. Both VectorValues must have the - * same structure (checked when NDEBUG is not defined). - */ - void operator+=(const VectorValues& c); - - /// @} - - /// @} - /// @name Matlab syntactic sugar for linear algebra operations - /// @{ - - inline VectorValues add(const VectorValues& c) const { return *this + c; } - inline VectorValues scale(const double a, const VectorValues& c) const { return a * (*this); } - - /// @} - - private: - // Throw an exception if j does not exist - void checkExists(Index j) const { - if(!exists(j)) { - const std::string msg = - (boost::format("VectorValues: requested variable index j=%1% is not in this VectorValues.") % j).str(); - throw std::out_of_range(msg); - } - } - - public: - - /** - * scale a vector by a scalar - */ - friend VectorValues operator*(const double a, const VectorValues &v) { - VectorValues result = VectorValues::SameStructure(v); - for(Index j = 0; j < v.size(); ++j) - result.values_[j] = a * v.values_[j]; - return result; - } - - /// TODO: linear algebra interface seems to have been added for SPCG. - friend void scal(double alpha, VectorValues& x) { - for(Index j = 0; j < x.size(); ++j) - x.values_[j] *= alpha; - } - /// TODO: linear algebra interface seems to have been added for SPCG. - friend void axpy(double alpha, const VectorValues& x, VectorValues& y) { - if(x.size() != y.size()) - throw std::invalid_argument("axpy(VectorValues) called with different vector sizes"); - for(Index j = 0; j < x.size(); ++j) - if(x.values_[j].size() == y.values_[j].size()) - y.values_[j] += alpha * x.values_[j]; - else - throw std::invalid_argument("axpy(VectorValues) called with different vector sizes"); - } - /// TODO: linear algebra interface seems to have been added for SPCG. - friend void sqrt(VectorValues &x) { - for(Index j = 0; j < x.size(); ++j) - x.values_[j] = x.values_[j].cwiseSqrt(); - } - - /// TODO: linear algebra interface seems to have been added for SPCG. - friend void ediv(const VectorValues& numerator, const VectorValues& denominator, VectorValues &result) { - if(numerator.size() != denominator.size() || numerator.size() != result.size()) - throw std::invalid_argument("ediv(VectorValues) called with different vector sizes"); - for(Index j = 0; j < numerator.size(); ++j) - if(numerator.values_[j].size() == denominator.values_[j].size() && numerator.values_[j].size() == result.values_[j].size()) - result.values_[j] = numerator.values_[j].cwiseQuotient(denominator.values_[j]); - else - throw std::invalid_argument("ediv(VectorValues) called with different vector sizes"); - } - - /// TODO: linear algebra interface seems to have been added for SPCG. - friend void edivInPlace(VectorValues& x, const VectorValues& y) { - if(x.size() != y.size()) - throw std::invalid_argument("edivInPlace(VectorValues) called with different vector sizes"); - for(Index j = 0; j < x.size(); ++j) - if(x.values_[j].size() == y.values_[j].size()) - x.values_[j].array() /= y.values_[j].array(); - else - throw std::invalid_argument("edivInPlace(VectorValues) called with different vector sizes"); - } - - private: - /** Serialization function */ - friend class boost::serialization::access; - template - void serialize(ARCHIVE & ar, const unsigned int version) { - ar & BOOST_SERIALIZATION_NVP(values_); - } - }; // VectorValues definition - - // Implementations of template and inline functions - - /* ************************************************************************* */ - template - void VectorValues::resize(const CONTAINER& dimensions) { - values_.clear(); - append(dimensions); - } - - /* ************************************************************************* */ - template - void VectorValues::append(const CONTAINER& dimensions) { - size_t i = size(); - values_.resize(size() + dimensions.size()); - BOOST_FOREACH(size_t dim, dimensions) { - values_[i] = Vector(dim); - ++ i; - } - } - - /* ************************************************************************* */ - template - void VectorValues::append(const Vector& d, const CONTAINER& dimensions) { - size_t i = size(); - size_t idx = 0; - values_.resize(size() + dimensions.size()); - BOOST_FOREACH(size_t dim, dimensions) { - values_[i] = sub(d, idx, idx+dim); - ++ i; - idx += dim; - } - } - - /* ************************************************************************* */ - template - VectorValues VectorValues::Zero(const CONTAINER& dimensions) { - VectorValues ret; - ret.values_.resize(dimensions.size()); - size_t i = 0; - BOOST_FOREACH(size_t dim, dimensions) { - ret.values_[i] = Vector::Zero(dim); - ++ i; - } - return ret; - } - - namespace internal { - /* ************************************************************************* */ - // Helper function, extracts vectors with variable indices - // in the first and last iterators, and concatenates them in that order into the - // output. - template - const Vector extractVectorValuesSlices(const VectorValues& values, ITERATOR first, ITERATOR last, bool allowNonexistant = false) { - // Find total dimensionality - size_t dim = 0; - for(ITERATOR j = first; j != last; ++j) - // If allowNonexistant is true, skip nonexistent indices (otherwise dim will throw an error on nonexistent) - if(!allowNonexistant || values.exists(*j)) - dim += values.dim(*j); - - // Copy vectors - Vector ret(dim); - size_t varStart = 0; - for(ITERATOR j = first; j != last; ++j) { - // If allowNonexistant is true, skip nonexistent indices (otherwise dim will throw an error on nonexistent) - if(!allowNonexistant || values.exists(*j)) { - ret.segment(varStart, values.dim(*j)) = values[*j]; - varStart += values.dim(*j); - } - } - return ret; - } - - /* ************************************************************************* */ - // Helper function, writes to the variables in values - // with indices iterated over by first and last, interpreting vector as the - // concatenated vectors to write. - template - void writeVectorValuesSlices(const VECTOR& vector, VectorValues& values, ITERATOR first, ITERATOR last) { - // Copy vectors - size_t varStart = 0; - for(ITERATOR j = first; j != last; ++j) { - values[*j] = vector.segment(varStart, values[*j].rows()); - varStart += values[*j].rows(); - } - assert(varStart == vector.rows()); - } - } - -} // \namespace gtsam +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file VectorValues.h + * @brief Factor Graph Values + * @author Richard Roberts + */ + +#pragma once + +#include +#include +#include + +#include + +namespace gtsam { + + /** + * This class represents a collection of vector-valued variables associated + * each with a unique integer index. It is typically used to store the variables + * of a GaussianFactorGraph. Optimizing a GaussianFactorGraph or GaussianBayesNet + * returns this class. + * + * For basic usage, such as receiving a linear solution from gtsam solving functions, + * or creating this class in unit tests and examples where speed is not important, + * you can use a simple interface: + * - The default constructor VectorValues() to create this class + * - insert(Key, const Vector&) to add vector variables + * - operator[](Key) for read and write access to stored variables + * - \ref exists (Key) to check if a variable is present + * - Other facilities like iterators, size(), dim(), etc. + * + * Indices can be non-consecutive and inserted out-of-order, but you should not + * use indices that are larger than a reasonable array size because the indices + * correspond to positions in an internal array. + * + * Example: + * \code + VectorValues values; + values.insert(3, Vector_(3, 1.0, 2.0, 3.0)); + values.insert(4, Vector_(2, 4.0, 5.0)); + values.insert(0, Vector_(4, 6.0, 7.0, 8.0, 9.0)); + + // Prints [ 3.0 4.0 ] + gtsam::print(values[1]); + + // Prints [ 8.0 9.0 ] + values[1] = Vector_(2, 8.0, 9.0); + gtsam::print(values[1]); + \endcode + * + *

Advanced Interface and Performance Information

+ * + * Internally, all vector values are stored as part of one large vector. In + * gtsam this vector is always pre-allocated for efficiency, using the + * advanced interface described below. Accessing and modifying already-allocated + * values is \f$ O(1) \f$. Using the insert() function of the standard interface + * is slow because it requires re-allocating the internal vector. + * + * For advanced usage, or where speed is important: + * - Allocate space ahead of time using a pre-allocating constructor + * (\ref AdvancedConstructors "Advanced Constructors"), Zero(), + * SameStructure(), resize(), or append(). Do not use + * insert(Key, const Vector&), which always has to re-allocate the + * internal vector. + * - The vector() function permits access to the underlying Vector, for + * doing mathematical or other operations that require all values. + * - operator[]() returns a SubVector view of the underlying Vector, + * without copying any data. + * + * Access is through the variable index j, and returns a SubVector, + * which is a view on the underlying data structure. + * + * This class is additionally used in gradient descent and dog leg to store the gradient. + * \nosubgrouping + */ + class GTSAM_EXPORT VectorValues { + protected: + typedef VectorValues This; + typedef FastMap Values; ///< Typedef for the collection of Vectors making up a VectorValues + Values values_; ///< Collection of Vectors making up this VectorValues + + public: + typedef Values::iterator iterator; ///< Iterator over vector values + typedef Values::const_iterator const_iterator; ///< Const iterator over vector values + typedef Values::reverse_iterator reverse_iterator; ///< Reverse iterator over vector values + typedef Values::const_reverse_iterator const_reverse_iterator; ///< Const reverse iterator over vector values + typedef boost::shared_ptr shared_ptr; ///< shared_ptr to this class + typedef Values::value_type value_type; ///< Typedef to pair, a key-value pair + typedef value_type KeyValuePair; ///< Typedef to pair, a key-value pair + + /// @name Standard Constructors + /// @{ + + /** + * Default constructor creates an empty VectorValues. + */ + VectorValues() {} + + /** Merge two VectorValues into one, this is more efficient than inserting elements one by one. */ + VectorValues(const VectorValues& first, const VectorValues& second); + + /** Create from another container holding pair. */ + template + explicit VectorValues(const CONTAINER& c) : values_(c.begin(), c.end()) {} + + /** Implicit copy constructor to specialize the explicit constructor from any container. */ + VectorValues(const VectorValues& c) : values_(c.values_) {} + + /** Create from a pair of iterators over pair. */ + template + VectorValues(ITERATOR first, ITERATOR last) : values_(first, last) {} + + /** Create a VectorValues with the same structure as \c other, but filled with zeros. */ + static VectorValues Zero(const VectorValues& other); + + /// @} + /// @name Standard Interface + /// @{ + + /** Number of variables stored. */ + Key size() const { return values_.size(); } + + /** Return the dimension of variable \c j. */ + size_t dim(Key j) const { return at(j).rows(); } + + /** Check whether a variable with key \c j exists. */ + bool exists(Key j) const { return find(j) != end(); } + + /** Read/write access to the vector value with key \c j, throws std::out_of_range if \c j does not exist, identical to operator[](Key). */ + Vector& at(Key j) { + iterator item = find(j); + if(item == end()) + throw std::out_of_range( + "Requested variable '" + DefaultKeyFormatter(j) + "' is not in this VectorValues."); + else + return item->second; + } + + /** Access the vector value with key \c j (const version), throws std::out_of_range if \c j does not exist, identical to operator[](Key). */ + const Vector& at(Key j) const { + const_iterator item = find(j); + if(item == end()) + throw std::out_of_range( + "Requested variable '" + DefaultKeyFormatter(j) + "' is not in this VectorValues."); + else + return item->second; + } + + /** Read/write access to the vector value with key \c j, throws std::out_of_range if \c j does + * not exist, identical to at(Key). */ + Vector& operator[](Key j) { return at(j); } + + /** Access the vector value with key \c j (const version), throws std::out_of_range if \c j does + * not exist, identical to at(Key). */ + const Vector& operator[](Key j) const { return at(j); } + + /** For all key/value pairs in \c values, replace values with corresponding keys in this class + * with those in \c values. Throws std::out_of_range if any keys in \c values are not present + * in this class. */ + void update(const VectorValues& values); + + /** Insert a vector \c value with key \c j. Throws an invalid_argument exception if the key \c + * j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(Key j, const Vector& value) { + insert(std::pair(j, value)); // Note only passing a reference to the Vector + } + + /** Insert a vector \c value with key \c j. Throws an invalid_argument exception if the key \c + * j is already used. + * @param value The vector to be inserted. + * @param j The index with which the value will be associated. */ + void insert(std::pair key_value) { + // Note that here we accept a pair with a reference to the Vector, but the Vector is copied as + // it is inserted into the values_ map. + if(!values_.insert(key_value).second) + throw std::invalid_argument( + "Requested to insert variable '" + DefaultKeyFormatter(key_value.first) + + "' already in this VectorValues."); + } + + /** Insert all values from \c values. Throws an invalid_argument exception if any keys to be + * inserted are already used. */ + void insert(const VectorValues& values); + + /** insert that mimics the STL map insert - if the value already exists, the map is not modified + * and an iterator to the existing value is returned, along with 'false'. If the value did not + * exist, it is inserted and an iterator pointing to the new element, along with 'true', is + * returned. */ + std::pair tryInsert(Key j, const Vector& value) { + return values_.insert(std::make_pair(j, value)); } + + /** Erase the vector with the given key, or throw std::out_of_range if it does not exist */ + void erase(Key var) { + if(values_.erase(var) == 0) + throw std::invalid_argument("Requested variable '" + DefaultKeyFormatter(var) + "', is not in this VectorValues."); + } + + /** Set all values to zero vectors. */ + void setZero(); + + iterator begin() { return values_.begin(); } ///< Iterator over variables + const_iterator begin() const { return values_.begin(); } ///< Iterator over variables + iterator end() { return values_.end(); } ///< Iterator over variables + const_iterator end() const { return values_.end(); } ///< Iterator over variables + reverse_iterator rbegin() { return values_.rbegin(); } ///< Reverse iterator over variables + const_reverse_iterator rbegin() const { return values_.rbegin(); } ///< Reverse iterator over variables + reverse_iterator rend() { return values_.rend(); } ///< Reverse iterator over variables + const_reverse_iterator rend() const { return values_.rend(); } ///< Reverse iterator over variables + + /** Return the iterator corresponding to the requested key, or end() if no variable is present with this key. */ + iterator find(Key j) { return values_.find(j); } + + /** Return the iterator corresponding to the requested key, or end() if no variable is present with this key. */ + const_iterator find(Key j) const { return values_.find(j); } + + /** print required by Testable for unit testing */ + void print(const std::string& str = "VectorValues: ", + const KeyFormatter& formatter = DefaultKeyFormatter) const; + + /** equals required by Testable for unit testing */ + bool equals(const VectorValues& x, double tol = 1e-9) const; + + /// @{ + /// @name Advanced Interface + /// @{ + + /** Retrieve the entire solution as a single vector */ + Vector vector() const; + + /** Access a vector that is a subset of relevant keys. */ + Vector vector(const std::vector& keys) const; + + /** Swap the data in this VectorValues with another. */ + void swap(VectorValues& other); + + /** Check if this VectorValues has the same structure (keys and dimensions) as another */ + bool hasSameStructure(const VectorValues other) const; + + /// @} + /// @name Linear algebra operations + /// @{ + + /** Dot product with another VectorValues, interpreting both as vectors of + * their concatenated values. Both VectorValues must have the + * same structure (checked when NDEBUG is not defined). */ + double dot(const VectorValues& v) const; + + /** Vector L2 norm */ + double norm() const; + + /** Squared vector L2 norm */ + double squaredNorm() const; + + /** Element-wise addition, synonym for add(). Both VectorValues must have the same structure + * (checked when NDEBUG is not defined). */ + VectorValues operator+(const VectorValues& c) const; + + /** Element-wise addition, synonym for operator+(). Both VectorValues must have the same + * structure (checked when NDEBUG is not defined). */ + VectorValues add(const VectorValues& c) const; + + /** Element-wise addition in-place, synonym for operator+=(). Both VectorValues must have the + * same structure (checked when NDEBUG is not defined). */ + VectorValues& operator+=(const VectorValues& c); + + /** Element-wise addition in-place, synonym for operator+=(). Both VectorValues must have the + * same structure (checked when NDEBUG is not defined). */ + VectorValues& addInPlace(const VectorValues& c); + + /** Element-wise subtraction, synonym for subtract(). Both VectorValues must have the same + * structure (checked when NDEBUG is not defined). */ + VectorValues operator-(const VectorValues& c) const; + + /** Element-wise subtraction, synonym for operator-(). Both VectorValues must have the same + * structure (checked when NDEBUG is not defined). */ + VectorValues subtract(const VectorValues& c) const; + + /** Element-wise scaling by a constant. */ + friend GTSAM_EXPORT VectorValues operator*(const double a, const VectorValues &v); + + /** Element-wise scaling by a constant. */ + VectorValues scale(const double a) const; + + /** Element-wise scaling by a constant in-place. */ + VectorValues& operator*=(double alpha); + + /** Element-wise scaling by a constant in-place. */ + VectorValues& scaleInPlace(double alpha); + + /// @} + + /// @} + /// @name Matlab syntactic sugar for linear algebra operations + /// @{ + + //inline VectorValues scale(const double a, const VectorValues& c) const { return a * (*this); } + + /// @} + + /** + * scale a vector by a scalar + */ + //friend VectorValues operator*(const double a, const VectorValues &v) { + // VectorValues result = VectorValues::SameStructure(v); + // for(Key j = 0; j < v.size(); ++j) + // result.values_[j] = a * v.values_[j]; + // return result; + //} + + //// TODO: linear algebra interface seems to have been added for SPCG. + //friend void axpy(double alpha, const VectorValues& x, VectorValues& y) { + // if(x.size() != y.size()) + // throw std::invalid_argument("axpy(VectorValues) called with different vector sizes"); + // for(Key j = 0; j < x.size(); ++j) + // if(x.values_[j].size() == y.values_[j].size()) + // y.values_[j] += alpha * x.values_[j]; + // else + // throw std::invalid_argument("axpy(VectorValues) called with different vector sizes"); + //} + //// TODO: linear algebra interface seems to have been added for SPCG. + //friend void sqrt(VectorValues &x) { + // for(Key j = 0; j < x.size(); ++j) + // x.values_[j] = x.values_[j].cwiseSqrt(); + //} + + //// TODO: linear algebra interface seems to have been added for SPCG. + //friend void ediv(const VectorValues& numerator, const VectorValues& denominator, VectorValues &result) { + // if(numerator.size() != denominator.size() || numerator.size() != result.size()) + // throw std::invalid_argument("ediv(VectorValues) called with different vector sizes"); + // for(Key j = 0; j < numerator.size(); ++j) + // if(numerator.values_[j].size() == denominator.values_[j].size() && numerator.values_[j].size() == result.values_[j].size()) + // result.values_[j] = numerator.values_[j].cwiseQuotient(denominator.values_[j]); + // else + // throw std::invalid_argument("ediv(VectorValues) called with different vector sizes"); + //} + + //// TODO: linear algebra interface seems to have been added for SPCG. + //friend void edivInPlace(VectorValues& x, const VectorValues& y) { + // if(x.size() != y.size()) + // throw std::invalid_argument("edivInPlace(VectorValues) called with different vector sizes"); + // for(Key j = 0; j < x.size(); ++j) + // if(x.values_[j].size() == y.values_[j].size()) + // x.values_[j].array() /= y.values_[j].array(); + // else + // throw std::invalid_argument("edivInPlace(VectorValues) called with different vector sizes"); + //} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_NVP(values_); + } + }; // VectorValues definition + +} // \namespace gtsam diff --git a/gtsam/linear/iterative-inl.h b/gtsam/linear/iterative-inl.h index 47997b44c..e61b67f73 100644 --- a/gtsam/linear/iterative-inl.h +++ b/gtsam/linear/iterative-inl.h @@ -45,11 +45,11 @@ namespace gtsam { // Start with g0 = A'*(A*x0-b), d0 = - g0 // i.e., first step is in direction of negative gradient - g = gradient(Ab,x); + g = Ab.gradient(x); d = g; // instead of negating gradient, alpha will be negated // init gamma and calculate threshold - gamma = dot(g,g) ; + gamma = dot(g,g); threshold = std::max(parameters_.epsilon_abs(), parameters_.epsilon() * parameters_.epsilon() * gamma); // Allocate and calculate A*d for first iteration @@ -86,9 +86,9 @@ namespace gtsam { double alpha = takeOptimalStep(x); // update gradient (or re-calculate at reset time) - if (k % parameters_.reset() == 0) g = gradient(Ab,x); + if (k % parameters_.reset() == 0) g = Ab.gradient(x); // axpy(alpha, Ab ^ Ad, g); // g += alpha*(Ab^Ad) - else transposeMultiplyAdd(Ab, alpha, Ad, g); + else Ab.transposeMultiplyAdd(alpha, Ad, g); // check for convergence double new_gamma = dot(g, g); @@ -105,14 +105,14 @@ namespace gtsam { else { double beta = new_gamma / gamma; // d = g + d*beta; - scal(beta, d); + d *= beta; axpy(1.0, g, d); } gamma = new_gamma; // In-place recalculation Ad <- A*d to avoid re-allocating Ad - multiplyInPlace(Ab, d, Ad); + Ab.multiplyInPlace(d, Ad); return false; } diff --git a/gtsam/linear/iterative.h b/gtsam/linear/iterative.h index 7e0609f37..1ba0a7423 100644 --- a/gtsam/linear/iterative.h +++ b/gtsam/linear/iterative.h @@ -68,28 +68,28 @@ namespace gtsam { * Print with optional string */ void print (const std::string& s = "System") const; + + /** gradient of objective function 0.5*|Ax-b_|^2 at x = A_'*(Ax-b_) */ + Vector gradient(const Vector& x) const { + return A() ^ (A() * x - b()); + } + + /** Apply operator A */ + Vector operator*(const Vector& x) const { + return A() * x; + } + + /** Apply operator A in place */ + void multiplyInPlace(const Vector& x, Vector& e) const { + e = A() * x; + } + + /** x += alpha* A'*e */ + void transposeMultiplyAdd(double alpha, const Vector& e, Vector& x) const { + gtsam::transposeMultiplyAdd(alpha, A(), e, x); + } }; - /** gradient of objective function 0.5*|Ax-b_|^2 at x = A_'*(Ax-b_) */ - inline Vector gradient(const System& system, const Vector& x) { - return system.A() ^ (system.A() * x - system.b()); - } - - /** Apply operator A */ - inline Vector operator*(const System& system, const Vector& x) { - return system.A() * x; - } - - /** Apply operator A in place */ - inline void multiplyInPlace(const System& system, const Vector& x, Vector& e) { - e = system.A() * x; - } - - /** x += alpha* A'*e */ - inline void transposeMultiplyAdd(const System& system, double alpha, const Vector& e, Vector& x) { - transposeMultiplyAdd(alpha,system.A(),e,x); - } - /** * Method of steepest gradients, System version */ diff --git a/gtsam/linear/linearAlgorithms-inst.h b/gtsam/linear/linearAlgorithms-inst.h new file mode 100644 index 000000000..93276be59 --- /dev/null +++ b/gtsam/linear/linearAlgorithms-inst.h @@ -0,0 +1,103 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file linearAlgorithms-inst.h + * @brief Templated algorithms that are used in multiple places in linear + * @author Richard Roberts + */ + +#include +#include + +#include +#include + +namespace gtsam +{ + namespace internal + { + namespace linearAlgorithms + { + /* ************************************************************************* */ + struct OptimizeData { + boost::optional parentData; + //VectorValues ancestorResults; + //VectorValues results; + }; + + /* ************************************************************************* */ + /** Pre-order visitor for back-substitution in a Bayes tree. The visitor function operator()() + * optimizes the clique given the solution for the parents, and returns the solution for the + * clique's frontal variables. In addition, it adds the solution to a global collected + * solution that will finally be returned to the user. The reason we pass the individual + * clique solutions between nodes is to avoid log(n) lookups over all variables, they instead + * then are only over a node's parent variables. */ + template + struct OptimizeClique + { + VectorValues collectedResult; + + OptimizeData operator()( + const boost::shared_ptr& clique, + OptimizeData& parentData) + { + OptimizeData myData; + myData.parentData = parentData; + // Take any ancestor results we'll need + //BOOST_FOREACH(Key parent, clique->conditional_->parents()) + // myData.ancestorResults.insert(parent, myData.parentData->ancestorResults[parent]); + // Solve and store in our results + VectorValues result = clique->conditional()->solve(collectedResult/*myData.ancestorResults*/); + collectedResult.insert(result); + //myData.ancestorResults.insert(result); + return myData; + } + }; + + /* ************************************************************************* */ + //OptimizeData OptimizePreVisitor(const GaussianBayesTreeClique::shared_ptr& clique, OptimizeData& parentData) + //{ + // // Create data - holds a pointer to our parent, a copy of parent solution, and our results + // OptimizeData myData; + // myData.parentData = parentData; + // // Take any ancestor results we'll need + // BOOST_FOREACH(Key parent, clique->conditional_->parents()) + // myData.ancestorResults.insert(parent, myData.parentData->ancestorResults[parent]); + // // Solve and store in our results + // myData.results.insert(clique->conditional()->solve(myData.ancestorResults)); + // myData.ancestorResults.insert(myData.results); + // return myData; + //} + + /* ************************************************************************* */ + //void OptimizePostVisitor(const GaussianBayesTreeClique::shared_ptr& clique, OptimizeData& myData) + //{ + // // Conglomerate our results to the parent + // myData.parentData->results.insert(myData.results); + //} + + /* ************************************************************************* */ + template + VectorValues optimizeBayesTree(const BAYESTREE& bayesTree) + { + gttic(linear_optimizeBayesTree); + //internal::OptimizeData rootData; // Will hold final solution + //treeTraversal::DepthFirstForest(*this, rootData, internal::OptimizePreVisitor, internal::OptimizePostVisitor); + //return rootData.results; + OptimizeData rootData; + OptimizeClique preVisitor; + treeTraversal::DepthFirstForest(bayesTree, rootData, preVisitor); + return preVisitor.collectedResult; + } + } + } +} \ No newline at end of file diff --git a/gtsam/linear/linearExceptions.cpp b/gtsam/linear/linearExceptions.cpp new file mode 100644 index 000000000..f633df4ce --- /dev/null +++ b/gtsam/linear/linearExceptions.cpp @@ -0,0 +1,47 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file linearExceptions.cpp + * @brief Exceptions that may be thrown by linear solver components + * @author Richard Roberts + * @date Aug 17, 2012 + */ + +#include + +#include + +namespace gtsam { + + /* ************************************************************************* */ + const char* InvalidNoiseModel::what() const throw() { + if(description_.empty()) + description_ = (boost::format( + "A JacobianFactor was attempted to be constructed or modified to use a\n" + "noise model of incompatible dimension. The JacobianFactor has\n" + "dimensionality (i.e. length of error vector) %d but the provided noise\n" + "model has dimensionality %d.") % factorDims % noiseModelDims).str(); + return description_.c_str(); + } + + /* ************************************************************************* */ + const char* InvalidMatrixBlock::what() const throw() { + if(description_.empty()) + description_ = (boost::format( + "A JacobianFactor was attempted to be constructed with a matrix block of\n" + "inconsistent dimension. The JacobianFactor has %d rows (i.e. length of\n" + "error vector) but the provided matrix block has %d rows.") + % factorRows % blockRows).str(); + return description_.c_str(); + } + + } diff --git a/gtsam/linear/linearExceptions.h b/gtsam/linear/linearExceptions.h index 8b036df0a..68acf526b 100644 --- a/gtsam/linear/linearExceptions.h +++ b/gtsam/linear/linearExceptions.h @@ -114,4 +114,46 @@ on gtsam::IndeterminantLinearSystemException for more information.\n"; } }; -} + /* ************************************************************************* */ + /** An exception indicating that the noise model dimension passed into a + * JacobianFactor has a different dimensionality than the factor. */ + class GTSAM_EXPORT InvalidNoiseModel : public std::exception { + public: + const DenseIndex factorDims; ///< The dimensionality of the factor + const DenseIndex noiseModelDims; ///< The dimensionality of the noise model + + InvalidNoiseModel(DenseIndex factorDims, DenseIndex noiseModelDims) : + factorDims(factorDims), noiseModelDims(noiseModelDims) {} + virtual ~InvalidNoiseModel() throw() {} + + virtual const char* what() const throw(); + + private: + mutable std::string description_; + }; + + /* ************************************************************************* */ + /** An exception indicating that a matrix block passed into a + * JacobianFactor has a different dimensionality than the factor. */ + class GTSAM_EXPORT InvalidMatrixBlock : public std::exception { + public: + const DenseIndex factorRows; ///< The dimensionality of the factor + const DenseIndex blockRows; ///< The dimensionality of the noise model + + InvalidMatrixBlock(DenseIndex factorRows, DenseIndex blockRows) : + factorRows(factorRows), blockRows(blockRows) {} + virtual ~InvalidMatrixBlock() throw() {} + + virtual const char* what() const throw(); + + private: + mutable std::string description_; + }; + + /* ************************************************************************* */ + class InvalidDenseElimination : public std::invalid_argument { + public: + InvalidDenseElimination(const char *message) : std::invalid_argument(message) {} + }; + + } diff --git a/gtsam/linear/tests/testGaussianBayesNetUnordered.cpp b/gtsam/linear/tests/testGaussianBayesNetUnordered.cpp new file mode 100644 index 000000000..7d15f47e7 --- /dev/null +++ b/gtsam/linear/tests/testGaussianBayesNetUnordered.cpp @@ -0,0 +1,197 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testGaussianBayesNet.cpp + * @brief Unit tests for GaussianBayesNet + * @author Frank Dellaert + */ + +// STL/C++ +#include +#include +#include +#include +#include + +#include // for operator += +using namespace boost::assign; + +#include +#include +#include +#include +#include +#include + +using namespace std; +using namespace gtsam; + +static const Key _x_=0, _y_=1, _z_=2; + +static GaussianBayesNet smallBayesNet = list_of + (GaussianConditional(_x_, Vector_(1, 9.0), Matrix_(1, 1, 1.0), _y_, Matrix_(1, 1, 1.0))) + (GaussianConditional(_y_, Vector_(1, 5.0), Matrix_(1, 1, 1.0))); + +/* ************************************************************************* */ +TEST( GaussianBayesNet, matrix ) +{ + Matrix R; Vector d; + boost::tie(R,d) = smallBayesNet.matrix(); // find matrix and RHS + + Matrix R1 = Matrix_(2,2, + 1.0, 1.0, + 0.0, 1.0 + ); + Vector d1 = Vector_(2, 9.0, 5.0); + + EXPECT(assert_equal(R,R1)); + EXPECT(assert_equal(d,d1)); +} + +/* ************************************************************************* */ +TEST( GaussianBayesNet, optimize ) +{ + VectorValues actual = smallBayesNet.optimize(); + + VectorValues expected = map_list_of + (_x_, Vector_(1, 4.0)) + (_y_, Vector_(1, 5.0)); + + EXPECT(assert_equal(expected,actual)); +} + +/* ************************************************************************* */ +TEST( GaussianBayesNet, optimize3 ) +{ + // y = R*x, x=inv(R)*y + // 4 = 1 1 -1 + // 5 1 5 + // NOTE: we are supplying a new RHS here + + VectorValues expected = map_list_of + (_x_, Vector_(1, -1.0)) + (_y_, Vector_(1, 5.0)); + + // Test different RHS version + VectorValues gx = map_list_of + (_x_, Vector_(1, 4.0)) + (_y_, Vector_(1, 5.0)); + VectorValues actual = smallBayesNet.backSubstitute(gx); + EXPECT(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +TEST( GaussianBayesNet, backSubstituteTranspose ) +{ + // x=R'*y, expected=inv(R')*x + // 2 = 1 2 + // 5 1 1 3 + VectorValues + x = map_list_of + (_x_, Vector_(1, 2.0)) + (_y_, Vector_(1, 5.0)), + expected = map_list_of + (_x_, Vector_(1, 2.0)) + (_y_, Vector_(1, 3.0)); + + VectorValues actual = smallBayesNet.backSubstituteTranspose(x); + EXPECT(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +// Tests computing Determinant +TEST( GaussianBayesNet, DeterminantTest ) +{ + GaussianBayesNet cbn; + cbn += GaussianConditional( + 0, Vector_( 2, 3.0, 4.0 ), Matrix_(2, 2, 1.0, 3.0, 0.0, 4.0 ), + 1, Matrix_(2, 2, 2.0, 1.0, 2.0, 3.0), noiseModel::Isotropic::Sigma(2, 2.0)); + + cbn += GaussianConditional( + 1, Vector_( 2, 5.0, 6.0 ), Matrix_(2, 2, 1.0, 1.0, 0.0, 3.0 ), + 2, Matrix_(2, 2, 1.0, 0.0, 5.0, 2.0), noiseModel::Isotropic::Sigma(2, 2.0)); + + cbn += GaussianConditional( + 3, Vector_( 2, 7.0, 8.0 ), Matrix_(2, 2, 1.0, 1.0, 0.0, 5.0 ), noiseModel::Isotropic::Sigma(2, 2.0)); + + double expectedDeterminant = 60.0 / 64.0; + double actualDeterminant = cbn.determinant(); + + EXPECT_DOUBLES_EQUAL( expectedDeterminant, actualDeterminant, 1e-9); +} + +/* ************************************************************************* */ +namespace { + double computeError(const GaussianBayesNet& gbn, const LieVector& values) + { + pair Rd = GaussianFactorGraph(gbn).jacobian(); + return 0.5 * (Rd.first * values - Rd.second).squaredNorm(); + } +} + +/* ************************************************************************* */ +TEST(GaussianBayesNet, ComputeSteepestDescentPoint) { + + // Create an arbitrary Bayes Net + GaussianBayesNet gbn; + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 0, Vector_(2, 1.0,2.0), Matrix_(2,2, 3.0,4.0,0.0,6.0), + 3, Matrix_(2,2, 7.0,8.0,9.0,10.0), + 4, Matrix_(2,2, 11.0,12.0,13.0,14.0))); + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 1, Vector_(2, 15.0,16.0), Matrix_(2,2, 17.0,18.0,0.0,20.0), + 2, Matrix_(2,2, 21.0,22.0,23.0,24.0), + 4, Matrix_(2,2, 25.0,26.0,27.0,28.0))); + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 2, Vector_(2, 29.0,30.0), Matrix_(2,2, 31.0,32.0,0.0,34.0), + 3, Matrix_(2,2, 35.0,36.0,37.0,38.0))); + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 3, Vector_(2, 39.0,40.0), Matrix_(2,2, 41.0,42.0,0.0,44.0), + 4, Matrix_(2,2, 45.0,46.0,47.0,48.0))); + gbn += GaussianConditional::shared_ptr(new GaussianConditional( + 4, Vector_(2, 49.0,50.0), Matrix_(2,2, 51.0,52.0,0.0,54.0))); + + // Compute the Hessian numerically + Matrix hessian = numericalHessian( + boost::function(boost::bind(&computeError, gbn, _1)), + LieVector(Vector::Zero(GaussianFactorGraph(gbn).jacobian().first.cols()))); + + // Compute the gradient numerically + Vector gradient = numericalGradient( + boost::function(boost::bind(&computeError, gbn, _1)), + LieVector(Vector::Zero(GaussianFactorGraph(gbn).jacobian().first.cols()))); + + // Compute the gradient using dense matrices + Matrix augmentedHessian = GaussianFactorGraph(gbn).augmentedHessian(); + LONGS_EQUAL(11, (long)augmentedHessian.cols()); + Vector denseMatrixGradient = -augmentedHessian.col(10).segment(0,10); + EXPECT(assert_equal(gradient, denseMatrixGradient, 1e-5)); + + // Compute the steepest descent point + double step = -gradient.squaredNorm() / (gradient.transpose() * hessian * gradient)(0); + Vector expected = gradient * step; + + // Compute the steepest descent point with the dogleg function + VectorValues actual = gbn.optimizeGradientSearch(); + + // Check that points agree + EXPECT(assert_equal(expected, actual.vector(), 1e-5)); + + // Check that point causes a decrease in error + double origError = GaussianFactorGraph(gbn).error(VectorValues::Zero(actual)); + double newError = GaussianFactorGraph(gbn).error(actual); + EXPECT(newError < origError); +} + +/* ************************************************************************* */ +int main() { TestResult tr; return TestRegistry::runAllTests(tr);} +/* ************************************************************************* */ diff --git a/gtsam/linear/tests/testGaussianBayesTreeUnordered.cpp b/gtsam/linear/tests/testGaussianBayesTreeUnordered.cpp new file mode 100644 index 000000000..69977f3a4 --- /dev/null +++ b/gtsam/linear/tests/testGaussianBayesTreeUnordered.cpp @@ -0,0 +1,289 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testGaussianBayesTree.cpp + * @date Jul 8, 2010 + * @author Kai Ni + */ + +#include +#include + +#include +#include // for operator += +#include // for operator += +using namespace boost::assign; + +#include +#include +#include +#include +#include +#include +#include + +using namespace std; +using namespace gtsam; + +namespace { + const Key x1=1, x2=2, x3=3, x4=4; + const SharedDiagonal chainNoise = noiseModel::Isotropic::Sigma(1, 0.5); + const GaussianFactorGraph chain = list_of + (JacobianFactor(x2, Matrix_(1,1,1.), x1, Matrix_(1,1,1.), Vector_(1,1.), chainNoise)) + (JacobianFactor(x2, Matrix_(1,1,1.), x3, Matrix_(1,1,1.), Vector_(1,1.), chainNoise)) + (JacobianFactor(x3, Matrix_(1,1,1.), x4, Matrix_(1,1,1.), Vector_(1,1.), chainNoise)) + (JacobianFactor(x4, Matrix_(1,1,1.), Vector_(1,1.), chainNoise)); + const Ordering chainOrdering = Ordering(list_of(x2)(x1)(x3)(x4)); + + /* ************************************************************************* */ + // Helper functions for below + GaussianBayesTreeClique::shared_ptr MakeClique(const GaussianConditional& conditional) + { + return boost::make_shared( + boost::make_shared(conditional)); + } + + template + GaussianBayesTreeClique::shared_ptr MakeClique( + const GaussianConditional& conditional, const CHILDREN& children) + { + GaussianBayesTreeClique::shared_ptr clique = + boost::make_shared( + boost::make_shared(conditional)); + clique->children.assign(children.begin(), children.end()); + BOOST_FOREACH(const GaussianBayesTreeClique::shared_ptr& child, children) + child->parent_ = clique; + return clique; + } +} + +/* ************************************************************************* */ +/** + * x1 - x2 - x3 - x4 + * x3 x4 + * x2 x1 : x3 + * + * x2 x1 x3 x4 b + * 1 1 1 + * 1 1 1 + * 1 1 1 + * 1 1 + * + * 1 0 0 1 + */ +TEST( GaussianBayesTree, eliminate ) +{ + GaussianBayesTree bt = *chain.eliminateMultifrontal(chainOrdering); + + Matrix two = Matrix_(1,1,2.); + Matrix one = Matrix_(1,1,1.); + + GaussianBayesTree bayesTree_expected; + bayesTree_expected.insertRoot( + MakeClique(GaussianConditional(pair_list_of (x3, Matrix_(2,1, 2., 0.)) (x4, Matrix_(2,1, 2., 2.)), 2, Vector_(2, 2., 2.)), list_of + (MakeClique(GaussianConditional(pair_list_of (x2, Matrix_(2,1, -2.*sqrt(2.), 0.)) (x1, Matrix_(2,1, -sqrt(2.), -sqrt(2.))) (x3, Matrix_(2,1, -sqrt(2.), sqrt(2.))), 2, Vector_(2, -2.*sqrt(2.), 0.)))))); + + EXPECT(assert_equal(bayesTree_expected, bt)); +} + +/* ************************************************************************* */ +TEST( GaussianBayesTree, optimizeMultiFrontal ) +{ + VectorValues expected = pair_list_of + (x1, Vector_(1, 0.)) + (x2, Vector_(1, 1.)) + (x3, Vector_(1, 0.)) + (x4, Vector_(1, 1.)); + + VectorValues actual = chain.eliminateMultifrontal(chainOrdering)->optimize(); + EXPECT(assert_equal(expected,actual)); +} + +/* ************************************************************************* */ +TEST(GaussianBayesTree, complicatedMarginal) { + + // Create the conditionals to go in the BayesTree + GaussianBayesTree bt; + bt.insertRoot( + MakeClique(GaussianConditional(pair_list_of (11, (Matrix(3,1) << 0.0971, 0, 0).finished()) + (12, (Matrix(3,2) << 0.3171, 0.4387, 0.9502, 0.3816, 0, 0.7655).finished()), + 2, (Vector(3) << 0.2638, 0.1455, 0.1361).finished()), list_of + (MakeClique(GaussianConditional(pair_list_of (9, (Matrix(3,1) << 0.7952, 0, 0).finished()) + (10, (Matrix(3,2) << 0.4456, 0.7547, 0.6463, 0.2760, 0, 0.6797).finished()) + (11, (Matrix(3,1) << 0.6551, 0.1626, 0.1190).finished()) + (12, (Matrix(3,2) << 0.4984, 0.5853, 0.9597, 0.2238, 0.3404, 0.7513).finished()), + 2, (Vector(3) << 0.4314, 0.9106, 0.1818).finished()))) + (MakeClique(GaussianConditional(pair_list_of (7, (Matrix(3,1) << 0.2551, 0, 0).finished()) + (8, (Matrix(3,2) << 0.8909, 0.1386, 0.9593, 0.1493, 0, 0.2575).finished()) + (11, (Matrix(3,1) << 0.8407, 0.2543, 0.8143).finished()), + 2, (Vector(3) << 0.3998, 0.2599, 0.8001).finished()), list_of + (MakeClique(GaussianConditional(pair_list_of (5, (Matrix(3,1) << 0.2435, 0, 0).finished()) + (6, (Matrix(3,2) << 0.4733, 0.1966, 0.3517, 0.2511, 0.8308, 0.0).finished()) + // NOTE the non-upper-triangular form + // here since this test was written when we had column permutations + // from LDL. The code still works currently (does not enfore + // upper-triangularity in this case) but this test will need to be + // redone if this stops working in the future + (7, (Matrix(3,1) << 0.5853, 0.5497, 0.9172).finished()) + (8, (Matrix(3,2) << 0.2858, 0.3804, 0.7572, 0.5678, 0.7537, 0.0759).finished()), + 2, (Vector(3) << 0.8173, 0.8687, 0.0844).finished()), list_of + (MakeClique(GaussianConditional(pair_list_of (3, (Matrix(3,1) << 0.0540, 0, 0).finished()) + (4, (Matrix(3,2) << 0.9340, 0.4694, 0.1299, 0.0119, 0, 0.3371).finished()) + (6, (Matrix(3,2) << 0.1622, 0.5285, 0.7943, 0.1656, 0.3112, 0.6020).finished()), + 2, (Vector(3) << 0.9619, 0.0046, 0.7749).finished()))) + (MakeClique(GaussianConditional(pair_list_of (1, (Matrix(3,1) << 0.2630, 0, 0).finished()) + (2, (Matrix(3,2) << 0.7482, 0.2290, 0.4505, 0.9133, 0, 0.1524).finished()) + (5, (Matrix(3,1) << 0.8258, 0.5383, 0.9961).finished()), + 2, (Vector(3) << 0.0782, 0.4427, 0.1067).finished()))))))))); + + // Marginal on 5 + Matrix expectedCov = (Matrix(1,1) << 236.5166).finished(); + //GaussianConditional actualJacobianChol = *bt.marginalFactor(5, EliminateCholesky); + GaussianConditional actualJacobianQR = *bt.marginalFactor(5, EliminateQR); + //EXPECT(assert_equal(actualJacobianChol, actualJacobianQR)); // Check that Chol and QR obtained marginals are the same + LONGS_EQUAL(1, (long)actualJacobianQR.rows()); + LONGS_EQUAL(1, (long)actualJacobianQR.size()); + LONGS_EQUAL(5, (long)actualJacobianQR.keys()[0]); + Matrix actualA = actualJacobianQR.getA(actualJacobianQR.begin()); + Matrix actualCov = inverse(actualA.transpose() * actualA); + EXPECT(assert_equal(expectedCov, actualCov, 1e-1)); + + // Marginal on 6 +// expectedCov = (Matrix(2,2) << +// 8471.2, 2886.2, +// 2886.2, 1015.8).finished(); + expectedCov = (Matrix(2,2) << + 1015.8, 2886.2, + 2886.2, 8471.2).finished(); + //actualJacobianChol = bt.marginalFactor(6, EliminateCholesky); + actualJacobianQR = *bt.marginalFactor(6, EliminateQR); + //EXPECT(assert_equal(actualJacobianChol, actualJacobianQR)); // Check that Chol and QR obtained marginals are the same + LONGS_EQUAL(2, (long)actualJacobianQR.rows()); + LONGS_EQUAL(1, (long)actualJacobianQR.size()); + LONGS_EQUAL(6, (long)actualJacobianQR.keys()[0]); + actualA = actualJacobianQR.getA(actualJacobianQR.begin()); + actualCov = inverse(actualA.transpose() * actualA); + EXPECT(assert_equal(expectedCov, actualCov, 1e1)); +} + +namespace { + /* ************************************************************************* */ + double computeError(const GaussianBayesTree& gbt, const LieVector& values) + { + pair Rd = GaussianFactorGraph(gbt).jacobian(); + return 0.5 * (Rd.first * values - Rd.second).squaredNorm(); + } +} + +/* ************************************************************************* */ +TEST(GaussianBayesTree, ComputeSteepestDescentPointBT) { + + // Create an arbitrary Bayes Tree + GaussianBayesTree bt; + bt.insertRoot(MakeClique(GaussianConditional( + pair_list_of + (2, Matrix_(6,2, + 31.0,32.0, + 0.0,34.0, + 0.0,0.0, + 0.0,0.0, + 0.0,0.0, + 0.0,0.0)) + (3, Matrix_(6,2, + 35.0,36.0, + 37.0,38.0, + 41.0,42.0, + 0.0,44.0, + 0.0,0.0, + 0.0,0.0)) + (4, Matrix_(6,2, + 0.0,0.0, + 0.0,0.0, + 45.0,46.0, + 47.0,48.0, + 51.0,52.0, + 0.0,54.0)), + 3, Vector_(6, 29.0,30.0,39.0,40.0,49.0,50.0)), list_of + (MakeClique(GaussianConditional( + pair_list_of + (0, Matrix_(4,2, + 3.0,4.0, + 0.0,6.0, + 0.0,0.0, + 0.0,0.0)) + (1, Matrix_(4,2, + 0.0,0.0, + 0.0,0.0, + 17.0,18.0, + 0.0,20.0)) + (2, Matrix_(4,2, + 0.0,0.0, + 0.0,0.0, + 21.0,22.0, + 23.0,24.0)) + (3, Matrix_(4,2, + 7.0,8.0, + 9.0,10.0, + 0.0,0.0, + 0.0,0.0)) + (4, Matrix_(4,2, + 11.0,12.0, + 13.0,14.0, + 25.0,26.0, + 27.0,28.0)), + 2, Vector_(4, 1.0,2.0,15.0,16.0)))))); + + // Compute the Hessian numerically + Matrix hessian = numericalHessian( + boost::function(boost::bind(&computeError, bt, _1)), + LieVector(Vector::Zero(GaussianFactorGraph(bt).jacobian().first.cols()))); + + // Compute the gradient numerically + Vector gradient = numericalGradient( + boost::function(boost::bind(&computeError, bt, _1)), + LieVector(Vector::Zero(GaussianFactorGraph(bt).jacobian().first.cols()))); + + // Compute the gradient using dense matrices + Matrix augmentedHessian = GaussianFactorGraph(bt).augmentedHessian(); + LONGS_EQUAL(11, (long)augmentedHessian.cols()); + Vector denseMatrixGradient = -augmentedHessian.col(10).segment(0,10); + EXPECT(assert_equal(gradient, denseMatrixGradient, 1e-5)); + + // Compute the steepest descent point + double step = -gradient.squaredNorm() / (gradient.transpose() * hessian * gradient)(0); + Vector expected = gradient * step; + + // Known steepest descent point from Bayes' net version + VectorValues expectedFromBN = pair_list_of + (0, Vector_(2, 0.000129034, 0.000688183)) + (1, Vector_(2, 0.0109679, 0.0253767)) + (2, Vector_(2, 0.0680441, 0.114496)) + (3, Vector_(2, 0.16125, 0.241294)) + (4, Vector_(2, 0.300134, 0.423233)); + + // Compute the steepest descent point with the dogleg function + VectorValues actual = bt.optimizeGradientSearch(); + + // Check that points agree + EXPECT(assert_equal(expected, actual.vector(), 1e-5)); + EXPECT(assert_equal(expectedFromBN, actual, 1e-5)); + + // Check that point causes a decrease in error + double origError = GaussianFactorGraph(bt).error(VectorValues::Zero(actual)); + double newError = GaussianFactorGraph(bt).error(actual); + EXPECT(newError < origError); +} + + +/* ************************************************************************* */ +int main() { TestResult tr; return TestRegistry::runAllTests(tr);} +/* ************************************************************************* */ diff --git a/gtsam/linear/tests/testGaussianConditional.cpp b/gtsam/linear/tests/testGaussianConditionalUnordered.cpp similarity index 57% rename from gtsam/linear/tests/testGaussianConditional.cpp rename to gtsam/linear/tests/testGaussianConditionalUnordered.cpp index 86d4b28b4..45e9d29d2 100644 --- a/gtsam/linear/tests/testGaussianConditional.cpp +++ b/gtsam/linear/tests/testGaussianConditionalUnordered.cpp @@ -19,6 +19,8 @@ #include #include +#include +#include #include #include #include @@ -28,15 +30,16 @@ #include #include #include +#include +#include using namespace gtsam; using namespace std; using namespace boost::assign; static const double tol = 1e-5; -static const Index _x_=0, _x1_=1, _l1_=2; -Matrix R = Matrix_(2,2, +static Matrix R = Matrix_(2,2, -12.1244, -5.1962, 0., 4.6904); @@ -54,63 +57,43 @@ TEST(GaussianConditional, constructor) -3.0153, -3.5635); Vector d = Vector_(2, 1.0, 2.0); - Vector s = Vector_(2, 3.0, 4.0); + SharedDiagonal s = noiseModel::Diagonal::Sigmas(Vector_(2, 3.0, 4.0)); - list > terms; - terms += - make_pair(3, S1), - make_pair(5, S2), - make_pair(7, S3); + vector > terms = pair_list_of + (3, S1) + (5, S2) + (7, S3); GaussianConditional actual(1, d, R, terms, s); GaussianConditional::const_iterator it = actual.beginFrontals(); - EXPECT(assert_equal(Index(1), *it)); + EXPECT(assert_equal(Key(1), *it)); EXPECT(assert_equal(R, actual.get_R())); ++ it; EXPECT(it == actual.endFrontals()); it = actual.beginParents(); - EXPECT(assert_equal(Index(3), *it)); + EXPECT(assert_equal(Key(3), *it)); EXPECT(assert_equal(S1, actual.get_S(it))); ++ it; - EXPECT(assert_equal(Index(5), *it)); + EXPECT(assert_equal(Key(5), *it)); EXPECT(assert_equal(S2, actual.get_S(it))); ++ it; - EXPECT(assert_equal(Index(7), *it)); + EXPECT(assert_equal(Key(7), *it)); EXPECT(assert_equal(S3, actual.get_S(it))); ++it; EXPECT(it == actual.endParents()); EXPECT(assert_equal(d, actual.get_d())); - EXPECT(assert_equal(s, actual.get_sigmas())); + EXPECT(assert_equal(*s, *actual.get_model())); // test copy constructor GaussianConditional copied(actual); EXPECT(assert_equal(d, copied.get_d())); - EXPECT(assert_equal(s, copied.get_sigmas())); - EXPECT(assert_equal(R, copied.get_R())); -} - -/* ************************************************************************* */ - -GaussianConditional construct() { - Vector d = Vector_(2, 1.0, 2.0); - Vector s = Vector_(2, 3.0, 4.0); - GaussianConditional::shared_ptr shared(new GaussianConditional(1, d, R, s)); - return *shared; -} - -TEST(GaussianConditional, return_value) -{ - Vector d = Vector_(2, 1.0, 2.0); - Vector s = Vector_(2, 3.0, 4.0); - GaussianConditional copied = construct(); - EXPECT(assert_equal(d, copied.get_d())); - EXPECT(assert_equal(s, copied.get_sigmas())); + EXPECT(assert_equal(*s, *copied.get_model())); EXPECT(assert_equal(R, copied.get_R())); } @@ -130,16 +113,13 @@ TEST( GaussianConditional, equals ) R(0,0) = 0.1 ; R(1,0) = 0.3; R(0,1) = 0.0 ; R(1,1) = 0.34; - Vector tau(2); - tau(0) = 1.0; - tau(1) = 0.34; + SharedDiagonal model = noiseModel::Diagonal::Sigmas(Vector_(2, 1.0, 0.34)); - Vector d(2); - d(0) = 0.2; d(1) = 0.5; + Vector d = Vector_(2, 0.2, 0.5); GaussianConditional - expected(_x_,d, R, _x1_, A1, _l1_, A2, tau), - actual(_x_,d, R, _x1_, A1, _l1_, A2, tau); + expected(1, d, R, 2, A1, 10, A2, model), + actual(1, d, R, 2, A1, 10, A2, model); EXPECT( expected.equals(actual) ); } @@ -161,68 +141,57 @@ TEST( GaussianConditional, solve ) Matrix A2 = Matrix_(2,2, 5., 6., 7., 8.); - Vector d(2); - d(0) = 20.0; d(1) = 40.0; + Vector d(2); d << 20.0, 40.0; - Vector tau = ones(2); + GaussianConditional cg(1, d, R, 2, A1, 10, A2); - GaussianConditional cg(_x_, d, R, _x1_, A1, _l1_, A2, tau); + Vector sx1(2); sx1 << 1.0, 1.0; + Vector sl1(2); sl1 << 1.0, 1.0; - Vector sx1(2); - sx1(0) = 1.0; sx1(1) = 1.0; + VectorValues expected = map_list_of + (1, expectedX) + (2, sx1) + (10, sl1); - Vector sl1(2); - sl1(0) = 1.0; sl1(1) = 1.0; + VectorValues solution = map_list_of + (2, sx1) // parents + (10, sl1); + solution.insert(cg.solve(solution)); - VectorValues solution(vector(3, 2)); - solution[_x_] = d; - solution[_x1_] = sx1; // parents - solution[_l1_] = sl1; - - VectorValues expected(vector(3, 2)); - expected[_x_] = expectedX; - expected[_x1_] = sx1; - expected[_l1_] = sl1; - cg.solveInPlace(solution); - - EXPECT(assert_equal(expected, solution, 0.0001)); + EXPECT(assert_equal(expected, solution, tol)); } /* ************************************************************************* */ TEST( GaussianConditional, solve_simple ) { - Matrix full_matrix = Matrix_(4, 7, + // 2 variables, frontal has dim=4 + VerticalBlockMatrix blockMatrix(list_of(4)(2)(1), 4); + blockMatrix.matrix() << 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1, 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2, 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.3, - 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4); + 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4; // solve system as a non-multifrontal version first - // 2 variables, frontal has dim=4 - vector dims; dims += 4, 2, 1; - GaussianConditional::rsd_type matrices(full_matrix, dims.begin(), dims.end()); - Vector sigmas = ones(4); - vector cgdims; cgdims += _x_, _x1_; - GaussianConditional cg(cgdims.begin(), cgdims.end(), 1, matrices, sigmas); + GaussianConditional cg(list_of(1)(2), 1, blockMatrix); // partial solution Vector sx1 = Vector_(2, 9.0, 10.0); - // elimination order; _x_, _x1_ - vector vdim; vdim += 4, 2; - VectorValues actual(vdim); - actual[_x1_] = sx1; // parent + // elimination order: 1, 2 + VectorValues actual = map_list_of + (2, sx1); // parent - VectorValues expected(vdim); - expected[_x1_] = sx1; - expected[_x_] = Vector_(4, -3.1,-3.4,-11.9,-13.2); + VectorValues expected = map_list_of + (2, sx1) + (1, Vector_(4, -3.1,-3.4,-11.9,-13.2)); // verify indices/size - EXPECT_LONGS_EQUAL(2, cg.size()); - EXPECT_LONGS_EQUAL(4, cg.dim()); + EXPECT_LONGS_EQUAL(2, (long)cg.size()); + EXPECT_LONGS_EQUAL(4, (long)cg.rows()); // solve and verify - cg.solveInPlace(actual); + actual.insert(cg.solve(actual)); EXPECT(assert_equal(expected, actual, tol)); } @@ -230,46 +199,42 @@ TEST( GaussianConditional, solve_simple ) TEST( GaussianConditional, solve_multifrontal ) { // create full system, 3 variables, 2 frontals, all 2 dim - Matrix full_matrix = Matrix_(4, 7, + VerticalBlockMatrix blockMatrix(list_of(2)(2)(2)(1), 4); + blockMatrix.matrix() << 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 0.1, 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.2, 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.3, - 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4); + 0.0, 0.0, 0.0, 3.0, 0.0, 4.0, 0.4; // 3 variables, all dim=2 - vector dims; dims += 2, 2, 2, 1; - GaussianConditional::rsd_type matrices(full_matrix, dims.begin(), dims.end()); - Vector sigmas = ones(4); - vector cgdims; cgdims += _x_, _x1_, _l1_; - GaussianConditional cg(cgdims.begin(), cgdims.end(), 2, matrices, sigmas); + GaussianConditional cg(list_of(1)(2)(10), 2, blockMatrix); - EXPECT(assert_equal(Vector_(4, 0.1, 0.2, 0.3, 0.4), cg.get_d())); + EXPECT(assert_equal(Vector(blockMatrix.full().rightCols(1)), cg.get_d())); // partial solution Vector sl1 = Vector_(2, 9.0, 10.0); // elimination order; _x_, _x1_, _l1_ - VectorValues actual(vector(3, 2)); - actual[_l1_] = sl1; // parent + VectorValues actual = map_list_of + (10, sl1); // parent - VectorValues expected(vector(3, 2)); - expected[_x_] = Vector_(2, -3.1,-3.4); - expected[_x1_] = Vector_(2, -11.9,-13.2); - expected[_l1_] = sl1; + VectorValues expected = map_list_of + (1, Vector_(2, -3.1,-3.4)) + (2, Vector_(2, -11.9,-13.2)) + (10, sl1); // verify indices/size - EXPECT_LONGS_EQUAL(3, cg.size()); - EXPECT_LONGS_EQUAL(4, cg.dim()); + EXPECT_LONGS_EQUAL(3, (long)cg.size()); + EXPECT_LONGS_EQUAL(4, (long)cg.rows()); // solve and verify - cg.solveInPlace(actual); + actual.insert(cg.solve(actual)); EXPECT(assert_equal(expected, actual, tol)); } /* ************************************************************************* */ TEST( GaussianConditional, solveTranspose ) { - static const Index _y_=1; /** create small Chordal Bayes Net x <- y * x y d * 1 1 9 @@ -280,43 +245,41 @@ TEST( GaussianConditional, solveTranspose ) { Vector d1(1), d2(1); d1(0) = 9; d2(0) = 5; - Vector tau(1); - tau(0) = 1.0; // define nodes and specify in reverse topological sort (i.e. parents last) - GaussianConditional::shared_ptr Px_y(new GaussianConditional(_x_, d1, R11, _y_, S12, tau)); - GaussianConditional::shared_ptr Py(new GaussianConditional(_y_, d2, R22, tau)); - GaussianBayesNet cbn; - cbn.push_back(Px_y); - cbn.push_back(Py); + GaussianBayesNet cbn = list_of + (GaussianConditional(1, d1, R11, 2, S12)) + (GaussianConditional(1, d2, R22)); // x=R'*y, y=inv(R')*x // 2 = 1 2 // 5 1 1 3 - VectorValues y(vector(2,1)), x(vector(2,1)); - x[_x_] = Vector_(1,2.); - x[_y_] = Vector_(1,5.); - y[_x_] = Vector_(1,2.); - y[_y_] = Vector_(1,3.); + VectorValues + x = map_list_of + (1, Vector_(1,2.)) + (2, Vector_(1,5.)), + y = map_list_of + (1, Vector_(1,2.)) + (2, Vector_(1,3.)); // test functional version - VectorValues actual = backSubstituteTranspose(cbn,x); - CHECK(assert_equal(y,actual)); + VectorValues actual = cbn.backSubstituteTranspose(x); + CHECK(assert_equal(y, actual)); } /* ************************************************************************* */ TEST( GaussianConditional, information ) { // Create R matrix - Matrix R = (Matrix(4,4) << + Matrix R(4,4); R << 1, 2, 3, 4, 0, 5, 6, 7, 0, 0, 8, 9, - 0, 0, 0, 10).finished(); + 0, 0, 0, 10; // Create conditional - GaussianConditional conditional(0, Vector::Zero(4), R, Vector::Constant(4, 1.0)); + GaussianConditional conditional(0, Vector::Zero(4), R); // Expected information matrix (using permuted R) Matrix IExpected = R.transpose() * R; @@ -330,21 +293,21 @@ TEST( GaussianConditional, information ) { TEST( GaussianConditional, isGaussianFactor ) { // Create R matrix - Matrix R = (Matrix(4,4) << + Matrix R(4,4); R << 1, 2, 3, 4, 0, 5, 6, 7, 0, 0, 8, 9, - 0, 0, 0, 10).finished(); + 0, 0, 0, 10; // Create a conditional - GaussianConditional conditional(0, Vector::Zero(4), R, Vector::Constant(4, 1.0)); + GaussianConditional conditional(0, Vector::Zero(4), R); // Expected information matrix computed by conditional Matrix IExpected = conditional.information(); // Expected information matrix computed by a factor - JacobianFactor jf = *conditional.toFactor(); - Matrix IActual = jf.getA(jf.begin()).transpose() * jf.getA(jf.begin()); + JacobianFactor jf = conditional; + Matrix IActual = jf.information(); EXPECT(assert_equal(IExpected, IActual)); } diff --git a/gtsam/linear/tests/testGaussianDensity.cpp b/gtsam/linear/tests/testGaussianDensity.cpp index 7e7ecf5f8..b54fc9fd0 100644 --- a/gtsam/linear/tests/testGaussianDensity.cpp +++ b/gtsam/linear/tests/testGaussianDensity.cpp @@ -30,11 +30,11 @@ TEST(GaussianDensity, constructor) 0., 4.6904); Vector d = Vector_(2, 1.0, 2.0), s = Vector_(2, 3.0, 4.0); - GaussianConditional conditional(1, d, R, s); + GaussianConditional conditional(1, d, R, noiseModel::Diagonal::Sigmas(s)); GaussianDensity copied(conditional); EXPECT(assert_equal(d, copied.get_d())); - EXPECT(assert_equal(s, copied.get_sigmas())); + EXPECT(assert_equal(s, copied.get_model()->sigmas())); } /* ************************************************************************* */ diff --git a/gtsam/linear/tests/testGaussianFactorGraph.cpp b/gtsam/linear/tests/testGaussianFactorGraph.cpp deleted file mode 100644 index bdc64d849..000000000 --- a/gtsam/linear/tests/testGaussianFactorGraph.cpp +++ /dev/null @@ -1,537 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testGaussianFactor.cpp - * @brief Unit tests for Linear Factor - * @author Christian Potthast - * @author Frank Dellaert - **/ - -#include // for operator += -using namespace boost::assign; - -#include -#include - -#include -#include -#include -#include -#include - -using namespace std; -using namespace gtsam; -using namespace boost; - -static SharedDiagonal - sigma0_1 = noiseModel::Isotropic::Sigma(2,0.1), sigma_02 = noiseModel::Isotropic::Sigma(2,0.2), - constraintModel = noiseModel::Constrained::All(2); - -static GaussianFactorGraph createSimpleGaussianFactorGraph() { - GaussianFactorGraph fg; - SharedDiagonal unit2 = noiseModel::Unit::Create(2); - // linearized prior on x1: c[_x1_]+x1=0 i.e. x1=-c[_x1_] - fg.add(2, 10*eye(2), -1.0*ones(2), unit2); - // odometry between x1 and x2: x2-x1=[0.2;-0.1] - fg.add(2, -10*eye(2), 0, 10*eye(2), Vector_(2, 2.0, -1.0), unit2); - // measurement between x1 and l1: l1-x1=[0.0;0.2] - fg.add(2, -5*eye(2), 1, 5*eye(2), Vector_(2, 0.0, 1.0), unit2); - // measurement between x2 and l1: l1-x2=[-0.2;0.3] - fg.add(0, -5*eye(2), 1, 5*eye(2), Vector_(2, -1.0, 1.5), unit2); - return fg; -} - - -/* ************************************************************************* */ -TEST(GaussianFactorGraph, initialization) { - // Create empty graph - GaussianFactorGraph fg; - SharedDiagonal unit2 = noiseModel::Unit::Create(2); - - fg.add(0, 10*eye(2), -1.0*ones(2), unit2); - fg.add(0, -10*eye(2),1, 10*eye(2), Vector_(2, 2.0, -1.0), unit2); - fg.add(0, -5*eye(2), 2, 5*eye(2), Vector_(2, 0.0, 1.0), unit2); - fg.add(1, -5*eye(2), 2, 5*eye(2), Vector_(2, -1.0, 1.5), unit2); - - EXPECT_LONGS_EQUAL(4, fg.size()); - JacobianFactor factor = *boost::dynamic_pointer_cast(fg[0]); - - // Test sparse, which takes a vector and returns a matrix, used in MATLAB - // Note that this the augmented vector and the RHS is in column 7 - Matrix expectedIJS = Matrix_(3,22, - 1., 2., 1., 2., 3., 4., 3., 4., 3., 4., 5., 6., 5., 6., 5., 6., 7., 8., 7., 8., 7., 8., - 1., 2., 7., 7., 1., 2., 3., 4., 7., 7., 1., 2., 5., 6., 7., 7., 3., 4., 5., 6., 7., 7., - 10., 10., -1., -1., -10., -10., 10., 10., 2., -1., -5., -5., 5., 5., 0., 1., -5., -5., 5., 5., -1., 1.5 - ); - Matrix actualIJS = fg.sparseJacobian_(); - EQUALITY(expectedIJS, actualIJS); -} - -/* ************************************************************************* */ -TEST(GaussianFactorGraph, CombineJacobians) -{ - Matrix A01 = Matrix_(3,3, - 1.0, 0.0, 0.0, - 0.0, 1.0, 0.0, - 0.0, 0.0, 1.0); - Vector b0 = Vector_(3, 1.5, 1.5, 1.5); - Vector s0 = Vector_(3, 1.6, 1.6, 1.6); - - Matrix A10 = Matrix_(3,3, - 2.0, 0.0, 0.0, - 0.0, 2.0, 0.0, - 0.0, 0.0, 2.0); - Matrix A11 = Matrix_(3,3, - -2.0, 0.0, 0.0, - 0.0, -2.0, 0.0, - 0.0, 0.0, -2.0); - Vector b1 = Vector_(3, 2.5, 2.5, 2.5); - Vector s1 = Vector_(3, 2.6, 2.6, 2.6); - - Matrix A21 = Matrix_(3,3, - 3.0, 0.0, 0.0, - 0.0, 3.0, 0.0, - 0.0, 0.0, 3.0); - Vector b2 = Vector_(3, 3.5, 3.5, 3.5); - Vector s2 = Vector_(3, 3.6, 3.6, 3.6); - - GaussianFactorGraph gfg; - gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true)); - gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true)); - gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true)); - - // Convert to Jacobians (inefficient copy of all factors instead of selectively converting only Hessians) - FactorGraph jacobians; - BOOST_FOREACH(const GaussianFactorGraph::sharedFactor& factor, gfg) { - jacobians.push_back(boost::make_shared(*factor)); - } - - // Combine Jacobians into a single dense factor - JacobianFactor actual = *CombineJacobians(jacobians, VariableSlots(gfg)); - - Matrix zero3x3 = zeros(3,3); - Matrix A0 = gtsam::stack(3, &zero3x3, &A10, &zero3x3); - Matrix A1 = gtsam::stack(3, &A01, &A11, &A21); - Vector b = gtsam::concatVectors(3, &b0, &b1, &b2); - Vector sigmas = gtsam::concatVectors(3, &s0, &s1, &s2); - - JacobianFactor expected(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true)); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST(GaussianFactor, CombineAndEliminate) -{ - Matrix A01 = Matrix_(3,3, - 1.0, 0.0, 0.0, - 0.0, 1.0, 0.0, - 0.0, 0.0, 1.0); - Vector b0 = Vector_(3, 1.5, 1.5, 1.5); - Vector s0 = Vector_(3, 1.6, 1.6, 1.6); - - Matrix A10 = Matrix_(3,3, - 2.0, 0.0, 0.0, - 0.0, 2.0, 0.0, - 0.0, 0.0, 2.0); - Matrix A11 = Matrix_(3,3, - -2.0, 0.0, 0.0, - 0.0, -2.0, 0.0, - 0.0, 0.0, -2.0); - Vector b1 = Vector_(3, 2.5, 2.5, 2.5); - Vector s1 = Vector_(3, 2.6, 2.6, 2.6); - - Matrix A21 = Matrix_(3,3, - 3.0, 0.0, 0.0, - 0.0, 3.0, 0.0, - 0.0, 0.0, 3.0); - Vector b2 = Vector_(3, 3.5, 3.5, 3.5); - Vector s2 = Vector_(3, 3.6, 3.6, 3.6); - - GaussianFactorGraph gfg; - gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true)); - gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true)); - gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true)); - - Matrix zero3x3 = zeros(3,3); - Matrix A0 = gtsam::stack(3, &A10, &zero3x3, &zero3x3); - Matrix A1 = gtsam::stack(3, &A11, &A01, &A21); - Vector b = gtsam::concatVectors(3, &b1, &b0, &b2); - Vector sigmas = gtsam::concatVectors(3, &s1, &s0, &s2); - - JacobianFactor expectedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true)); - GaussianConditional::shared_ptr expectedBN = expectedFactor.eliminate(1); - - GaussianConditional::shared_ptr actualBN; - GaussianFactor::shared_ptr actualFactor; - boost::tie(actualBN, actualFactor) = EliminateQR(gfg, 1); - JacobianFactor::shared_ptr actualJacobian = boost::dynamic_pointer_cast< - JacobianFactor>(actualFactor); - - EXPECT(assert_equal(*expectedBN, *actualBN)); - EXPECT(assert_equal(expectedFactor, *actualJacobian)); -} - -/* ************************************************************************* */ -TEST(GaussianFactor, eliminateFrontals) -{ - // Augmented Ab test case for whole factor graph - Matrix Ab = Matrix_(14,11, - 4., 0., 1., 4., 1., 0., 3., 6., 8., 8., 1., - 9., 2., 0., 1., 6., 3., 9., 6., 6., 9., 4., - 5., 3., 7., 9., 5., 5., 9., 1., 3., 7., 0., - 5., 6., 5., 7., 9., 4., 0., 1., 1., 3., 5., - 0., 0., 4., 5., 6., 6., 7., 9., 4., 5., 4., - 0., 0., 9., 4., 8., 6., 2., 1., 4., 1., 6., - 0., 0., 6., 0., 4., 2., 4., 0., 1., 9., 6., - 0., 0., 6., 6., 4., 4., 5., 5., 5., 8., 6., - 0., 0., 0., 0., 8., 0., 9., 8., 2., 8., 0., - 0., 0., 0., 0., 0., 9., 4., 6., 3., 2., 0., - 0., 0., 0., 0., 1., 1., 9., 1., 5., 5., 3., - 0., 0., 0., 0., 1., 1., 3., 3., 2., 0., 5., - 0., 0., 0., 0., 0., 0., 0., 0., 2., 4., 6., - 0., 0., 0., 0., 0., 0., 0., 0., 6., 3., 4.); - - // Create first factor (from pieces of Ab) - list > terms1; - - terms1 += - make_pair( 3, Matrix(Ab.block(0, 0, 4, 2))), - make_pair( 5, Matrix(Ab.block(0, 2, 4, 2))), - make_pair( 7, Matrix(Ab.block(0, 4, 4, 2))), - make_pair( 9, Matrix(Ab.block(0, 6, 4, 2))), - make_pair(11, Matrix(Ab.block(0, 8, 4, 2))); - Vector b1 = Ab.col(10).segment(0, 4); - JacobianFactor::shared_ptr factor1(new JacobianFactor(terms1, b1, noiseModel::Isotropic::Sigma(4, 0.5))); - - // Create second factor - list > terms2; - terms2 += - make_pair(5, Matrix(Ab.block(4, 2, 4, 2))), - make_pair(7, Matrix(Ab.block(4, 4, 4, 2))), - make_pair(9, Matrix(Ab.block(4, 6, 4, 2))), - make_pair(11,Matrix(Ab.block(4, 8, 4, 2))); - Vector b2 = Ab.col(10).segment(4, 4); - JacobianFactor::shared_ptr factor2(new JacobianFactor(terms2, b2, noiseModel::Isotropic::Sigma(4, 0.5))); - - // Create third factor - list > terms3; - terms3 += - make_pair(7, Matrix(Ab.block(8, 4, 4, 2))), - make_pair(9, Matrix(Ab.block(8, 6, 4, 2))), - make_pair(11,Matrix(Ab.block(8, 8, 4, 2))); - Vector b3 = Ab.col(10).segment(8, 4); - JacobianFactor::shared_ptr factor3(new JacobianFactor(terms3, b3, noiseModel::Isotropic::Sigma(4, 0.5))); - - // Create fourth factor - list > terms4; - terms4 += - make_pair(11, Matrix(Ab.block(12, 8, 2, 2))); - Vector b4 = Ab.col(10).segment(12, 2); - JacobianFactor::shared_ptr factor4(new JacobianFactor(terms4, b4, noiseModel::Isotropic::Sigma(2, 0.5))); - - // Create factor graph - GaussianFactorGraph factors; - factors.push_back(factor1); - factors.push_back(factor2); - factors.push_back(factor3); - factors.push_back(factor4); - - // extract the dense matrix for the graph - Matrix actualDense = factors.augmentedJacobian(); - EXPECT(assert_equal(2.0 * Ab, actualDense)); - - // Convert to Jacobians, inefficient copy of all factors instead of selectively converting only Hessians - FactorGraph jacobians; - BOOST_FOREACH(const GaussianFactorGraph::sharedFactor& factor, factors) { - jacobians.push_back(boost::make_shared(*factor)); - } - - // Create combined factor - JacobianFactor combined(*CombineJacobians(jacobians, VariableSlots(factors))); - - // Copies factors as they will be eliminated in place - JacobianFactor actualFactor_QR = combined; - JacobianFactor actualFactor_Chol = combined; - - // Expected augmented matrix, both GaussianConditional (first 6 rows) and remaining factor (next 4 rows) - Matrix R = 2.0*Matrix_(11,11, - -12.1244, -5.1962, -5.2786, -8.6603, -10.5573, -5.9385, -11.3820, -7.2581, -8.7427, -13.4440, -5.3611, - 0., 4.6904, 5.0254, 5.5432, 5.5737, 3.0153, -3.0153, -3.5635, -3.9290, -2.7412, 2.1625, - 0., 0., -13.8160, -8.7166, -10.2245, -8.8666, -8.7632, -5.2544, -6.9192, -10.5537, -9.3250, - 0., 0., 0., 6.5033, -1.1453, 1.3179, 2.5768, 5.5503, 3.6524, 1.3491, -2.5676, - 0., 0., 0., 0., -9.6242, -2.1148, -9.3509, -10.5846, -3.5366, -6.8561, -3.2277, - 0., 0., 0., 0., 0., 9.7887, 4.3551, 5.7572, 2.7876, 0.1611, 1.1769, - 0., 0., 0., 0., 0., 0., -11.1139, -0.6521, -2.1943, -7.5529, -0.9081, - 0., 0., 0., 0., 0., 0., 0., -4.6479, -1.9367, -6.5170, -3.7685, - 0., 0., 0., 0., 0., 0., 0., 0., 8.2503, 3.3757, 6.8476, - 0., 0., 0., 0., 0., 0., 0., 0., 0., -5.7095, -0.0090, - 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., -7.1635); - - // Expected conditional on first variable from first 2 rows of R - Matrix R1 = sub(R, 0,2, 0,2); - list > cterms1; - cterms1 += - make_pair(5, sub(R, 0,2, 2,4 )), - make_pair(7, sub(R, 0,2, 4,6 )), - make_pair(9, sub(R, 0,2, 6,8 )), - make_pair(11,sub(R, 0,2, 8,10)); - Vector d1 = R.col(10).segment(0,2); - GaussianConditional::shared_ptr cond1(new GaussianConditional(3, d1, R1, cterms1, ones(2))); - - // Expected conditional on second variable from next 2 rows of R - Matrix R2 = sub(R, 2,4, 2,4); - list > cterms2; - cterms2 += - make_pair(7, sub(R, 2,4, 4,6)), - make_pair(9, sub(R, 2,4, 6,8)), - make_pair(11,sub(R, 2,4, 8,10)); - Vector d2 = R.col(10).segment(2,2); - GaussianConditional::shared_ptr cond2(new GaussianConditional(5, d2, R2, cterms2, ones(2))); - - // Expected conditional on third variable from next 2 rows of R - Matrix R3 = sub(R, 4,6, 4,6); - list > cterms3; - cterms3 += - make_pair(9, sub(R, 4,6, 6,8)), - make_pair(11, sub(R, 4,6, 8,10)); - Vector d3 = R.col(10).segment(4,2); - GaussianConditional::shared_ptr cond3(new GaussianConditional(7, d3, R3, cterms3, ones(2))); - - // Create expected Bayes net fragment from three conditionals above -// GaussianBayesNet expectedFragment; -// expectedFragment.push_back(cond1); -// expectedFragment.push_back(cond2); -// expectedFragment.push_back(cond3); - Index ikeys[] = {3,5,7,9,11}; - std::vector keys(ikeys, ikeys + sizeof(ikeys)/sizeof(Index)); - size_t dims[] = { 2,2,2,2,2,1 }; - size_t height = 11; - VerticalBlockView Rblock(R, dims, dims+6, height); - GaussianConditional::shared_ptr expectedFragment( new GaussianConditional(keys.begin(), keys.end(), 3, - Rblock, ones(6)) ); - - // Get expected matrices for remaining factor - Matrix Ae1 = sub(R, 6,10, 6,8); - Matrix Ae2 = sub(R, 6,10, 8,10); - Vector be = R.col(10).segment(6, 4); - - // Eliminate (3 frontal variables, 6 scalar columns) using QR !!!! - GaussianConditional::shared_ptr actualFragment_QR = actualFactor_QR.eliminate(3); - - EXPECT(assert_equal(*expectedFragment, *actualFragment_QR, 0.001)); - EXPECT(assert_equal(size_t(2), actualFactor_QR.keys().size())); - EXPECT(assert_equal(Index(9), actualFactor_QR.keys()[0])); - EXPECT(assert_equal(Index(11), actualFactor_QR.keys()[1])); - EXPECT(assert_equal(Ae1, actualFactor_QR.getA(actualFactor_QR.begin()), 0.001)); - EXPECT(assert_equal(Ae2, actualFactor_QR.getA(actualFactor_QR.begin()+1), 0.001)); - EXPECT(assert_equal(be, actualFactor_QR.getb(), 0.001)); - EXPECT(assert_equal(ones(4), actualFactor_QR.get_model()->sigmas(), 0.001)); - - // Eliminate (3 frontal variables, 6 scalar columns) using Cholesky !!!! -#ifdef BROKEN - GaussianBayesNet actualFragment_Chol = *actualFactor_Chol.eliminate(3, JacobianFactor::SOLVE_CHOLESKY); - EXPECT(assert_equal(expectedFragment, actualFragment_Chol, 0.001)); - EXPECT(assert_equal(size_t(2), actualFactor_Chol.keys().size())); - EXPECT(assert_equal(Index(9), actualFactor_Chol.keys()[0])); - EXPECT(assert_equal(Index(11), actualFactor_Chol.keys()[1])); - EXPECT(assert_equal(Ae1, actualFactor_Chol.getA(actualFactor_Chol.begin()), 0.001)); //// - EXPECT(linear_dependent(Ae2, actualFactor_Chol.getA(actualFactor_Chol.begin()+1), 0.001)); - EXPECT(assert_equal(be, actualFactor_Chol.getb(), 0.001)); //// - EXPECT(assert_equal(ones(4), actualFactor_Chol.get_sigmas(), 0.001)); -#endif -} - -/* ************************************************************************* */ -TEST(GaussianFactor, permuteWithInverse) -{ - Matrix A1 = Matrix_(2,2, - 1.0, 2.0, - 3.0, 4.0); - Matrix A2 = Matrix_(2,1, - 5.0, - 6.0); - Matrix A3 = Matrix_(2,3, - 7.0, 8.0, 9.0, - 10.0, 11.0, 12.0); - Vector b = Vector_(2, 13.0, 14.0); - - Permutation inversePermutation(6); - inversePermutation[0] = 5; - inversePermutation[1] = 4; - inversePermutation[2] = 3; - inversePermutation[3] = 2; - inversePermutation[4] = 1; - inversePermutation[5] = 0; - - JacobianFactor actual(1, A1, 3, A2, 5, A3, b, noiseModel::Isotropic::Sigma(2, 1.0)); - GaussianFactorGraph actualFG; actualFG.push_back(JacobianFactor::shared_ptr(new JacobianFactor(actual))); - VariableIndex actualIndex(actualFG); - actual.permuteWithInverse(inversePermutation); -// actualIndex.permute(*inversePermutation.inverse()); - - JacobianFactor expected(4, A1, 2, A2, 0, A3, b, noiseModel::Isotropic::Sigma(2, 1.0)); - GaussianFactorGraph expectedFG; expectedFG.push_back(JacobianFactor::shared_ptr(new JacobianFactor(expected))); -// GaussianVariableIndex expectedIndex(expectedFG); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST(GaussianFactorGraph, sparseJacobian) { - // Create factor graph: - // x1 x2 x3 x4 x5 b - // 1 2 3 0 0 4 - // 5 6 7 0 0 8 - // 9 10 0 11 12 13 - // 0 0 0 14 15 16 - - // Expected - NOTE that we transpose this! - Matrix expected = Matrix_(16,3, - 1., 1., 2., - 1., 2., 4., - 1., 3., 6., - 2., 1.,10., - 2., 2.,12., - 2., 3.,14., - 1., 6., 8., - 2., 6.,16., - 3., 1.,18., - 3., 2.,20., - 3., 4.,22., - 3., 5.,24., - 4., 4.,28., - 4., 5.,30., - 3., 6.,26., - 4., 6.,32.).transpose(); - - GaussianFactorGraph gfg; - SharedDiagonal model = noiseModel::Isotropic::Sigma(2, 0.5); - gfg.add(0, Matrix_(2,3, 1., 2., 3., 5., 6., 7.), Vector_(2, 4., 8.), model); - gfg.add(0, Matrix_(2,3, 9.,10., 0., 0., 0., 0.), 1, Matrix_(2,2, 11., 12., 14., 15.), Vector_(2, 13.,16.), model); - - Matrix actual = gfg.sparseJacobian_(); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST(GaussianFactorGraph, matrices) { - // Create factor graph: - // x1 x2 x3 x4 x5 b - // 1 2 3 0 0 4 - // 5 6 7 0 0 8 - // 9 10 0 11 12 13 - // 0 0 0 14 15 16 - - GaussianFactorGraph gfg; - SharedDiagonal model = noiseModel::Unit::Create(2); - gfg.add(0, Matrix_(2,3, 1., 2., 3., 5., 6., 7.), Vector_(2, 4., 8.), model); - gfg.add(0, Matrix_(2,3, 9.,10., 0., 0., 0., 0.), 1, Matrix_(2,2, 11., 12., 14., 15.), Vector_(2, 13.,16.), model); - - Matrix jacobian(4,6); - jacobian << - 1, 2, 3, 0, 0, 4, - 5, 6, 7, 0, 0, 8, - 9,10, 0,11,12,13, - 0, 0, 0,14,15,16; - - Matrix expectedJacobian = jacobian; - Matrix expectedHessian = jacobian.transpose() * jacobian; - Matrix expectedA = jacobian.leftCols(jacobian.cols()-1); - Vector expectedb = jacobian.col(jacobian.cols()-1); - Matrix expectedL = expectedA.transpose() * expectedA; - Vector expectedeta = expectedA.transpose() * expectedb; - - Matrix actualJacobian = gfg.augmentedJacobian(); - Matrix actualHessian = gfg.augmentedHessian(); - Matrix actualA; Vector actualb; boost::tie(actualA,actualb) = gfg.jacobian(); - Matrix actualL; Vector actualeta; boost::tie(actualL,actualeta) = gfg.hessian(); - - EXPECT(assert_equal(expectedJacobian, actualJacobian)); - EXPECT(assert_equal(expectedHessian, actualHessian)); - EXPECT(assert_equal(expectedA, actualA)); - EXPECT(assert_equal(expectedb, actualb)); - EXPECT(assert_equal(expectedL, actualL)); - EXPECT(assert_equal(expectedeta, actualeta)); -} - -/* ************************************************************************* */ -TEST( GaussianFactorGraph, gradient ) -{ - GaussianFactorGraph fg = createSimpleGaussianFactorGraph(); - - // Construct expected gradient - VectorValues expected; - - // 2*f(x) = 100*(x1+c[X(1)])^2 + 100*(x2-x1-[0.2;-0.1])^2 + 25*(l1-x1-[0.0;0.2])^2 + 25*(l1-x2-[-0.2;0.3])^2 - // worked out: df/dx1 = 100*[0.1;0.1] + 100*[0.2;-0.1]) + 25*[0.0;0.2] = [10+20;10-10+5] = [30;5] - expected.insert(1,Vector_(2, 5.0,-12.5)); - expected.insert(2,Vector_(2, 30.0, 5.0)); - expected.insert(0,Vector_(2,-25.0, 17.5)); - - // Check the gradient at delta=0 - VectorValues zero = VectorValues::Zero(expected); - VectorValues actual = gradient(fg, zero); - EXPECT(assert_equal(expected,actual)); - - // Check the gradient at the solution (should be zero) - VectorValues solution = *GaussianSequentialSolver(fg).optimize(); - VectorValues actual2 = gradient(fg, solution); - EXPECT(assert_equal(VectorValues::Zero(solution), actual2)); -} - -/* ************************************************************************* */ -TEST( GaussianFactorGraph, transposeMultiplication ) -{ - GaussianFactorGraph A = createSimpleGaussianFactorGraph(); - - VectorValues e; - e.insert(0, Vector_(2, 0.0, 0.0)); - e.insert(1, Vector_(2,15.0, 0.0)); - e.insert(2, Vector_(2, 0.0,-5.0)); - e.insert(3, Vector_(2,-7.5,-5.0)); - - VectorValues expected; - expected.insert(1, Vector_(2, -37.5,-50.0)); - expected.insert(2, Vector_(2,-150.0, 25.0)); - expected.insert(0, Vector_(2, 187.5, 25.0)); - - VectorValues actual = VectorValues::SameStructure(expected); - transposeMultiply(A, e, actual); - EXPECT(assert_equal(expected,actual)); -} - -/* ************************************************************************* */ -TEST(GaussianFactorGraph, eliminate_empty ) -{ - // eliminate an empty factor - GaussianFactorGraph gfg; - gfg.push_back(boost::make_shared()); - GaussianConditional::shared_ptr actualCG; - GaussianFactorGraph remainingGFG; - boost::tie(actualCG, remainingGFG) = gfg.eliminateOne(0); - - // expected Conditional Gaussian is just a parent-less node with P(x)=1 - GaussianConditional expectedCG(0, Vector(), Matrix(), Vector()); - - // expected remaining graph should be the same as the original, still empty :-) - GaussianFactorGraph expectedLF = gfg; - - // check if the result matches - EXPECT(actualCG->equals(expectedCG)); - EXPECT(remainingGFG.equals(expectedLF)); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr);} -/* ************************************************************************* */ diff --git a/gtsam/linear/tests/testGaussianFactorGraphUnordered.cpp b/gtsam/linear/tests/testGaussianFactorGraphUnordered.cpp new file mode 100644 index 000000000..bc0a0d1ba --- /dev/null +++ b/gtsam/linear/tests/testGaussianFactorGraphUnordered.cpp @@ -0,0 +1,225 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testGaussianFactor.cpp + * @brief Unit tests for Linear Factor + * @author Christian Potthast + * @author Frank Dellaert + **/ + +#include // for operator += +using namespace boost::assign; + +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +using namespace std; +using namespace gtsam; + +static SharedDiagonal + sigma0_1 = noiseModel::Isotropic::Sigma(2,0.1), sigma_02 = noiseModel::Isotropic::Sigma(2,0.2), + constraintModel = noiseModel::Constrained::All(2); + +static GaussianFactorGraph createSimpleGaussianFactorGraph() { + GaussianFactorGraph fg; + SharedDiagonal unit2 = noiseModel::Unit::Create(2); + // linearized prior on x1: c[_x1_]+x1=0 i.e. x1=-c[_x1_] + fg += JacobianFactor(2, 10*eye(2), -1.0*ones(2), unit2); + // odometry between x1 and x2: x2-x1=[0.2;-0.1] + fg += JacobianFactor(2, -10*eye(2), 0, 10*eye(2), Vector_(2, 2.0, -1.0), unit2); + // measurement between x1 and l1: l1-x1=[0.0;0.2] + fg += JacobianFactor(2, -5*eye(2), 1, 5*eye(2), Vector_(2, 0.0, 1.0), unit2); + // measurement between x2 and l1: l1-x2=[-0.2;0.3] + fg += JacobianFactor(0, -5*eye(2), 1, 5*eye(2), Vector_(2, -1.0, 1.5), unit2); + return fg; +} + +/* ************************************************************************* */ +TEST(GaussianFactorGraph, initialization) { + // Create empty graph + GaussianFactorGraph fg; + SharedDiagonal unit2 = noiseModel::Unit::Create(2); + + fg += + JacobianFactor(0, 10*eye(2), -1.0*ones(2), unit2), + JacobianFactor(0, -10*eye(2),1, 10*eye(2), Vector_(2, 2.0, -1.0), unit2), + JacobianFactor(0, -5*eye(2), 2, 5*eye(2), Vector_(2, 0.0, 1.0), unit2), + JacobianFactor(1, -5*eye(2), 2, 5*eye(2), Vector_(2, -1.0, 1.5), unit2); + + EXPECT_LONGS_EQUAL(4, (long)fg.size()); + + // Test sparse, which takes a vector and returns a matrix, used in MATLAB + // Note that this the augmented vector and the RHS is in column 7 + Matrix expectedIJS = Matrix_(3,22, + 1., 2., 1., 2., 3., 4., 3., 4., 3., 4., 5., 6., 5., 6., 5., 6., 7., 8., 7., 8., 7., 8., + 1., 2., 7., 7., 1., 2., 3., 4., 7., 7., 1., 2., 5., 6., 7., 7., 3., 4., 5., 6., 7., 7., + 10., 10., -1., -1., -10., -10., 10., 10., 2., -1., -5., -5., 5., 5., 0., 1., -5., -5., 5., 5., -1., 1.5 + ); + Matrix actualIJS = fg.sparseJacobian_(); + EQUALITY(expectedIJS, actualIJS); +} + +/* ************************************************************************* */ +TEST(GaussianFactorGraph, sparseJacobian) { + // Create factor graph: + // x1 x2 x3 x4 x5 b + // 1 2 3 0 0 4 + // 5 6 7 0 0 8 + // 9 10 0 11 12 13 + // 0 0 0 14 15 16 + + // Expected - NOTE that we transpose this! + Matrix expected = Matrix_(16,3, + 1., 1., 2., + 1., 2., 4., + 1., 3., 6., + 2., 1.,10., + 2., 2.,12., + 2., 3.,14., + 1., 6., 8., + 2., 6.,16., + 3., 1.,18., + 3., 2.,20., + 3., 4.,22., + 3., 5.,24., + 4., 4.,28., + 4., 5.,30., + 3., 6.,26., + 4., 6.,32.).transpose(); + + GaussianFactorGraph gfg; + SharedDiagonal model = noiseModel::Isotropic::Sigma(2, 0.5); + gfg.add(0, Matrix_(2,3, 1., 2., 3., 5., 6., 7.), Vector_(2, 4., 8.), model); + gfg.add(0, Matrix_(2,3, 9.,10., 0., 0., 0., 0.), 1, Matrix_(2,2, 11., 12., 14., 15.), Vector_(2, 13.,16.), model); + + Matrix actual = gfg.sparseJacobian_(); + + EXPECT(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +TEST(GaussianFactorGraph, matrices) { + // Create factor graph: + // x1 x2 x3 x4 x5 b + // 1 2 3 0 0 4 + // 5 6 7 0 0 8 + // 9 10 0 11 12 13 + // 0 0 0 14 15 16 + + GaussianFactorGraph gfg; + SharedDiagonal model = noiseModel::Unit::Create(2); + gfg.add(0, Matrix_(2,3, 1., 2., 3., 5., 6., 7.), Vector_(2, 4., 8.), model); + gfg.add(0, Matrix_(2,3, 9.,10., 0., 0., 0., 0.), 1, Matrix_(2,2, 11., 12., 14., 15.), Vector_(2, 13.,16.), model); + + Matrix jacobian(4,6); + jacobian << + 1, 2, 3, 0, 0, 4, + 5, 6, 7, 0, 0, 8, + 9,10, 0,11,12,13, + 0, 0, 0,14,15,16; + + Matrix expectedJacobian = jacobian; + Matrix expectedHessian = jacobian.transpose() * jacobian; + Matrix expectedA = jacobian.leftCols(jacobian.cols()-1); + Vector expectedb = jacobian.col(jacobian.cols()-1); + Matrix expectedL = expectedA.transpose() * expectedA; + Vector expectedeta = expectedA.transpose() * expectedb; + + Matrix actualJacobian = gfg.augmentedJacobian(); + //Matrix actualHessian = gfg.augmentedHessian(); + Matrix actualA; Vector actualb; boost::tie(actualA,actualb) = gfg.jacobian(); + //Matrix actualL; Vector actualeta; boost::tie(actualL,actualeta) = gfg.hessian(); + + EXPECT(assert_equal(expectedJacobian, actualJacobian)); + //EXPECT(assert_equal(expectedHessian, actualHessian)); + EXPECT(assert_equal(expectedA, actualA)); + EXPECT(assert_equal(expectedb, actualb)); + //EXPECT(assert_equal(expectedL, actualL)); + //EXPECT(assert_equal(expectedeta, actualeta)); +} + +/* ************************************************************************* */ +TEST( GaussianFactorGraph, gradient ) +{ + GaussianFactorGraph fg = createSimpleGaussianFactorGraph(); + + // Construct expected gradient + // 2*f(x) = 100*(x1+c[X(1)])^2 + 100*(x2-x1-[0.2;-0.1])^2 + 25*(l1-x1-[0.0;0.2])^2 + 25*(l1-x2-[-0.2;0.3])^2 + // worked out: df/dx1 = 100*[0.1;0.1] + 100*[0.2;-0.1]) + 25*[0.0;0.2] = [10+20;10-10+5] = [30;5] + VectorValues expected = map_list_of + (1, Vector_(2, 5.0,-12.5)) + (2, Vector_(2, 30.0, 5.0)) + (0, Vector_(2,-25.0, 17.5)); + + // Check the gradient at delta=0 + VectorValues zero = VectorValues::Zero(expected); + VectorValues actual = fg.gradient(zero); + EXPECT(assert_equal(expected, actual)); + + // Check the gradient at the solution (should be zero) + VectorValues solution = fg.optimize(); + VectorValues actual2 = fg.gradient(solution); + EXPECT(assert_equal(VectorValues::Zero(solution), actual2)); +} + +/* ************************************************************************* */ +TEST( GaussianFactorGraph, transposeMultiplication ) +{ + GaussianFactorGraph A = createSimpleGaussianFactorGraph(); + + Errors e; e += + Vector_(2, 0.0, 0.0), + Vector_(2,15.0, 0.0), + Vector_(2, 0.0,-5.0), + Vector_(2,-7.5,-5.0); + + VectorValues expected; + expected.insert(1, Vector_(2, -37.5,-50.0)); + expected.insert(2, Vector_(2,-150.0, 25.0)); + expected.insert(0, Vector_(2, 187.5, 25.0)); + + VectorValues actual = A.transposeMultiply(e); + EXPECT(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +TEST(GaussianFactorGraph, eliminate_empty ) +{ + // eliminate an empty factor + GaussianFactorGraph gfg; + gfg.add(JacobianFactor()); + GaussianBayesNet::shared_ptr actualBN; + GaussianFactorGraph::shared_ptr remainingGFG; + boost::tie(actualBN, remainingGFG) = gfg.eliminatePartialSequential(Ordering()); + + // expected Bayes net is empty + GaussianBayesNet expectedBN; + + // expected remaining graph should be the same as the original, still containing the empty factor + GaussianFactorGraph expectedLF = gfg; + + // check if the result matches + EXPECT(assert_equal(*actualBN, expectedBN)); + EXPECT(assert_equal(*remainingGFG, expectedLF)); +} + +/* ************************************************************************* */ +int main() { TestResult tr; return TestRegistry::runAllTests(tr);} +/* ************************************************************************* */ diff --git a/gtsam/linear/tests/testGaussianJunctionTree.cpp b/gtsam/linear/tests/testGaussianJunctionTree.cpp deleted file mode 100644 index 71d32cb1e..000000000 --- a/gtsam/linear/tests/testGaussianJunctionTree.cpp +++ /dev/null @@ -1,309 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testGaussianJunctionTree.cpp - * @date Jul 8, 2010 - * @author Kai Ni - */ - -#include -#include - -#include -#include // for operator += -#include // for operator += -using namespace boost::assign; - -#include -#include -#include -#include -#include -#include - -using namespace std; -using namespace gtsam; - -static const Index x2=0, x1=1, x3=2, x4=3; - -static GaussianFactorGraph createChain() { - - typedef GaussianFactorGraph::sharedFactor Factor; - SharedDiagonal model = noiseModel::Isotropic::Sigma(1, 0.5); - Factor factor1(new JacobianFactor(x2, Matrix_(1,1,1.), x1, Matrix_(1,1,1.), Vector_(1,1.), model)); - Factor factor2(new JacobianFactor(x2, Matrix_(1,1,1.), x3, Matrix_(1,1,1.), Vector_(1,1.), model)); - Factor factor3(new JacobianFactor(x3, Matrix_(1,1,1.), x4, Matrix_(1,1,1.), Vector_(1,1.), model)); - Factor factor4(new JacobianFactor(x4, Matrix_(1,1,1.), Vector_(1,1.), model)); - - GaussianFactorGraph fg; - fg.push_back(factor1); - fg.push_back(factor2); - fg.push_back(factor3); - fg.push_back(factor4); - - return fg; -} - -/* ************************************************************************* */ -/** - * x1 - x2 - x3 - x4 - * x3 x4 - * x2 x1 : x3 - * - * x2 x1 x3 x4 b - * 1 1 1 - * 1 1 1 - * 1 1 1 - * 1 1 - * - * 1 0 0 1 - */ -TEST( GaussianJunctionTree, eliminate ) -{ - GaussianFactorGraph fg = createChain(); - GaussianJunctionTree junctionTree(fg); - BayesTree::sharedClique rootClique = junctionTree.eliminate(&EliminateQR); - - typedef BayesTree::sharedConditional sharedConditional; - Matrix two = Matrix_(1,1,2.); - Matrix one = Matrix_(1,1,1.); - - BayesTree bayesTree_expected; - Index keys_root[] = {x3,x4}; - Matrix rsd_root = Matrix_(2,3, 2., 2., 2., 0., 2., 2.); - size_t dim_root[] = {1, 1, 1}; - sharedConditional root_expected(new GaussianConditional(keys_root, keys_root+2, 2, - VerticalBlockView(rsd_root, dim_root, dim_root+3, 2), ones(2))); - BayesTree::sharedClique rootClique_expected(new BayesTree::Clique(root_expected)); - - Index keys_child[] = {x2,x1,x3}; - Matrix rsd_child = Matrix_(2,4, 2., 1., 1., 2., 0., -1., 1., 0.); - size_t dim_child[] = {1, 1, 1, 1}; - sharedConditional child_expected(new GaussianConditional(keys_child, keys_child+3, 2, - VerticalBlockView(rsd_child, dim_child, dim_child+4, 2), ones(2))); - BayesTree::sharedClique childClique_expected(new BayesTree::Clique(child_expected)); - - bayesTree_expected.insert(rootClique_expected); - bayesTree_expected.insert(childClique_expected); - -// bayesTree_expected.insert(sharedConditional(new GaussianConditional(x4, Vector_(1,2.), two, Vector_(1,1.)))); -// bayesTree_expected.insert(sharedConditional(new GaussianConditional(x3, Vector_(1,2.), two, x4, two, Vector_(1,1.)))); -// bayesTree_expected.insert(sharedConditional(new GaussianConditional(x1, Vector_(1,0.), one*(-1), x3, one, Vector_(1,1.)))); -// bayesTree_expected.insert(sharedConditional(new GaussianConditional(x2, Vector_(1,2.), two, x1, one, x3, one, Vector_(1,1.)))); - CHECK(assert_equal(*bayesTree_expected.root(), *rootClique)); - EXPECT(assert_equal(*(bayesTree_expected.root()->children().front()), *(rootClique->children().front()))); -} - -/* ************************************************************************* */ -TEST( GaussianJunctionTree, GBNConstructor ) -{ - GaussianFactorGraph fg = createChain(); - GaussianJunctionTree jt(fg); - BayesTree::sharedClique root = jt.eliminate(&EliminateQR); - BayesTree expected; - expected.insert(root); - - GaussianBayesNet bn(*GaussianSequentialSolver(fg).eliminate()); - BayesTree actual(bn); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST( GaussianJunctionTree, optimizeMultiFrontal ) -{ - GaussianFactorGraph fg = createChain(); - GaussianJunctionTree tree(fg); - - VectorValues actual = tree.optimize(&EliminateQR); - VectorValues expected(vector(4,1)); - expected[x1] = Vector_(1, 0.); - expected[x2] = Vector_(1, 1.); - expected[x3] = Vector_(1, 0.); - expected[x4] = Vector_(1, 1.); - EXPECT(assert_equal(expected,actual)); -} - -/* ************************************************************************* */ -TEST(GaussianJunctionTree, complicatedMarginal) { - - // Create the conditionals to go in the BayesTree - GaussianConditional::shared_ptr R_1_2(new GaussianConditional( - pair_list_of - (1, (Matrix(3,1) << - 0.2630, - 0, - 0).finished()) - (2, (Matrix(3,2) << - 0.7482, 0.2290, - 0.4505, 0.9133, - 0, 0.1524).finished()) - (5, (Matrix(3,1) << - 0.8258, - 0.5383, - 0.9961).finished()), - 2, (Vector(3) << 0.0782, 0.4427, 0.1067).finished(), ones(3))); - GaussianConditional::shared_ptr R_3_4(new GaussianConditional( - pair_list_of - (3, (Matrix(3,1) << - 0.0540, - 0, - 0).finished()) - (4, (Matrix(3,2) << - 0.9340, 0.4694, - 0.1299, 0.0119, - 0, 0.3371).finished()) - (6, (Matrix(3,2) << - 0.1622, 0.5285, - 0.7943, 0.1656, - 0.3112, 0.6020).finished()), - 2, (Vector(3) << 0.9619, 0.0046, 0.7749).finished(), ones(3))); -// GaussianConditional::shared_ptr R_5_6(new GaussianConditional( -// pair_list_of -// (5, (Matrix(3,1) << -// 0.2435, -// 0, -// 0).finished()) -// (6, (Matrix(3,2) << -// 0.4733, 0.1966, -// 0.9022, 0.0979, -// 0.0, 0.2312).finished()) // Attempted to recreate without permutation -// (7, (Matrix(3,1) << -// 0.5853, -// 1.0589, -// 0.1487).finished()) -// (8, (Matrix(3,2) << -// 0.2858, 0.3804, -// 0.9893, 0.2912, -// 0.4035, 0.4933).finished()), -// 2, (Vector(3) << 0.8173, 0.4164, 0.7671).finished(), ones(3))); - GaussianConditional::shared_ptr R_5_6(new GaussianConditional( - pair_list_of - (5, (Matrix(3,1) << - 0.2435, - 0, - 0).finished()) - (6, (Matrix(3,2) << - 0.4733, 0.1966, - 0.3517, 0.2511, - 0.8308, 0.0).finished()) // NOTE the non-upper-triangular form - // here since this test was written when we had column permutations - // from LDL. The code still works currently (does not enfore - // upper-triangularity in this case) but this test will need to be - // redone if this stops working in the future - (7, (Matrix(3,1) << - 0.5853, - 0.5497, - 0.9172).finished()) - (8, (Matrix(3,2) << - 0.2858, 0.3804, - 0.7572, 0.5678, - 0.7537, 0.0759).finished()), - 2, (Vector(3) << 0.8173, 0.8687, 0.0844).finished(), ones(3))); - GaussianConditional::shared_ptr R_7_8(new GaussianConditional( - pair_list_of - (7, (Matrix(3,1) << - 0.2551, - 0, - 0).finished()) - (8, (Matrix(3,2) << - 0.8909, 0.1386, - 0.9593, 0.1493, - 0, 0.2575).finished()) - (11, (Matrix(3,1) << - 0.8407, - 0.2543, - 0.8143).finished()), - 2, (Vector(3) << 0.3998, 0.2599, 0.8001).finished(), ones(3))); - GaussianConditional::shared_ptr R_9_10(new GaussianConditional( - pair_list_of - (9, (Matrix(3,1) << - 0.7952, - 0, - 0).finished()) - (10, (Matrix(3,2) << - 0.4456, 0.7547, - 0.6463, 0.2760, - 0, 0.6797).finished()) - (11, (Matrix(3,1) << - 0.6551, - 0.1626, - 0.1190).finished()) - (12, (Matrix(3,2) << - 0.4984, 0.5853, - 0.9597, 0.2238, - 0.3404, 0.7513).finished()), - 2, (Vector(3) << 0.4314, 0.9106, 0.1818).finished(), ones(3))); - GaussianConditional::shared_ptr R_11_12(new GaussianConditional( - pair_list_of - (11, (Matrix(3,1) << - 0.0971, - 0, - 0).finished()) - (12, (Matrix(3,2) << - 0.3171, 0.4387, - 0.9502, 0.3816, - 0, 0.7655).finished()), - 2, (Vector(3) << 0.2638, 0.1455, 0.1361).finished(), ones(3))); - - // Gaussian Bayes Tree - typedef BayesTree GaussianBayesTree; - typedef GaussianBayesTree::Clique Clique; - typedef GaussianBayesTree::sharedClique sharedClique; - - // Create Bayes Tree - GaussianBayesTree bt; - bt.insert(sharedClique(new Clique(R_11_12))); - bt.insert(sharedClique(new Clique(R_9_10))); - bt.insert(sharedClique(new Clique(R_7_8))); - bt.insert(sharedClique(new Clique(R_5_6))); - bt.insert(sharedClique(new Clique(R_3_4))); - bt.insert(sharedClique(new Clique(R_1_2))); - - // Marginal on 5 - Matrix expectedCov = (Matrix(1,1) << 236.5166).finished(); - JacobianFactor::shared_ptr actualJacobianChol= boost::dynamic_pointer_cast( - bt.marginalFactor(5, EliminateCholesky)); - JacobianFactor::shared_ptr actualJacobianQR = boost::dynamic_pointer_cast( - bt.marginalFactor(5, EliminateQR)); - CHECK(assert_equal(*actualJacobianChol, *actualJacobianQR)); // Check that Chol and QR obtained marginals are the same - LONGS_EQUAL(1, actualJacobianChol->rows()); - LONGS_EQUAL(1, actualJacobianChol->size()); - LONGS_EQUAL(5, actualJacobianChol->keys()[0]); - Matrix actualA = actualJacobianChol->getA(actualJacobianChol->begin()); - Matrix actualCov = inverse(actualA.transpose() * actualA); - EXPECT(assert_equal(expectedCov, actualCov, 1e-1)); - - // Marginal on 6 -// expectedCov = (Matrix(2,2) << -// 8471.2, 2886.2, -// 2886.2, 1015.8).finished(); - expectedCov = (Matrix(2,2) << - 1015.8, 2886.2, - 2886.2, 8471.2).finished(); - actualJacobianChol = boost::dynamic_pointer_cast( - bt.marginalFactor(6, EliminateCholesky)); - actualJacobianQR = boost::dynamic_pointer_cast( - bt.marginalFactor(6, EliminateQR)); - CHECK(assert_equal(*actualJacobianChol, *actualJacobianQR)); // Check that Chol and QR obtained marginals are the same - LONGS_EQUAL(2, actualJacobianChol->rows()); - LONGS_EQUAL(1, actualJacobianChol->size()); - LONGS_EQUAL(6, actualJacobianChol->keys()[0]); - actualA = actualJacobianChol->getA(actualJacobianChol->begin()); - actualCov = inverse(actualA.transpose() * actualA); - EXPECT(assert_equal(expectedCov, actualCov, 1e1)); - -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr);} -/* ************************************************************************* */ diff --git a/gtsam/linear/tests/testHessianFactor.cpp b/gtsam/linear/tests/testHessianFactorUnordered.cpp similarity index 51% rename from gtsam/linear/tests/testHessianFactor.cpp rename to gtsam/linear/tests/testHessianFactorUnordered.cpp index dccbd17da..c2d99026a 100644 --- a/gtsam/linear/tests/testHessianFactor.cpp +++ b/gtsam/linear/tests/testHessianFactorUnordered.cpp @@ -25,6 +25,8 @@ #include #include #include +#include +#include #include #include @@ -36,7 +38,8 @@ using namespace gtsam; const double tol = 1e-5; /* ************************************************************************* */ -TEST(HessianFactor, emptyConstructor) { +TEST(HessianFactor, emptyConstructor) +{ HessianFactor f; DOUBLES_EQUAL(0.0, f.constantTerm(), 1e-9); // Constant term should be zero EXPECT(assert_equal(Vector(), f.linearTerm())); // Linear term should be empty @@ -45,68 +48,39 @@ TEST(HessianFactor, emptyConstructor) { } /* ************************************************************************* */ -TEST(HessianFactor, ConversionConstructor) { +TEST(HessianFactor, ConversionConstructor) +{ + HessianFactor expected(list_of(0)(1), + SymmetricBlockMatrix(list_of(2)(4)(1), Matrix_(7,7, + 125.0000, 0.0, -25.0000, 0.0, -100.0000, 0.0, 25.0000, + 0.0, 125.0000, 0.0, -25.0000, 0.0, -100.0000, -17.5000, + -25.0000, 0.0, 25.0000, 0.0, 0.0, 0.0, -5.0000, + 0.0, -25.0000, 0.0, 25.0000, 0.0, 0.0, 7.5000, + -100.0000, 0.0, 0.0, 0.0, 100.0000, 0.0, -20.0000, + 0.0, -100.0000, 0.0, 0.0, 0.0, 100.0000, 10.0000, + 25.0000, -17.5000, -5.0000, 7.5000, -20.0000, 10.0000, 8.2500))); - HessianFactor expected; - expected.keys_.push_back(0); - expected.keys_.push_back(1); - size_t dims[] = { 2, 4, 1 }; - expected.info_.resize(dims, dims+3, false); - expected.matrix_ = Matrix_(7,7, - 125.0000, 0.0, -25.0000, 0.0, -100.0000, 0.0, 25.0000, - 0.0, 125.0000, 0.0, -25.0000, 0.0, -100.0000, -17.5000, - -25.0000, 0.0, 25.0000, 0.0, 0.0, 0.0, -5.0000, - 0.0, -25.0000, 0.0, 25.0000, 0.0, 0.0, 7.5000, - -100.0000, 0.0, 0.0, 0.0, 100.0000, 0.0, -20.0000, - 0.0, -100.0000, 0.0, 0.0, 0.0, 100.0000, 10.0000, - 25.0000, -17.5000, -5.0000, 7.5000, -20.0000, 10.0000, 8.2500); + JacobianFactor jacobian( + 0, Matrix_(4,2, -1., 0., + +0.,-1., + 1., 0., + +0.,1.), + 1, Matrix_(4,4, 1., 0., 0.00, 0., // f4 + 0., 1., 0.00, 0., // f4 + 0., 0., -1., 0., // f2 + 0., 0., 0.00, -1.), // f2 + Vector_(4, -0.2, 0.3, 0.2, -0.1), + noiseModel::Diagonal::Sigmas(Vector_(4, 0.2, 0.2, 0.1, 0.1))); - // sigmas - double sigma1 = 0.2; - double sigma2 = 0.1; - Vector sigmas = Vector_(4, sigma1, sigma1, sigma2, sigma2); + HessianFactor actual(jacobian); - // the combined linear factor - Matrix Ax2 = Matrix_(4,2, - // x2 - -1., 0., - +0.,-1., - 1., 0., - +0.,1. - ); - - Matrix Al1x1 = Matrix_(4,4, - // l1 x1 - 1., 0., 0.00, 0., // f4 - 0., 1., 0.00, 0., // f4 - 0., 0., -1., 0., // f2 - 0., 0., 0.00,-1. // f2 - ); - - // the RHS - Vector b2(4); - b2(0) = -0.2; - b2(1) = 0.3; - b2(2) = 0.2; - b2(3) = -0.1; - - vector > meas; - meas.push_back(make_pair(0, Ax2)); - meas.push_back(make_pair(1, Al1x1)); - JacobianFactor combined(meas, b2, noiseModel::Diagonal::Sigmas(sigmas)); - - HessianFactor actual(combined); - - VectorValues values(std::vector(dims, dims+2)); - values[0] = Vector_(2, 1.0, 2.0); - values[1] = Vector_(4, 3.0, 4.0, 5.0, 6.0); - - EXPECT_LONGS_EQUAL(2, actual.size()); + VectorValues values = pair_list_of + (0, Vector_(2, 1.0, 2.0)) + (1, Vector_(4, 3.0, 4.0, 5.0, 6.0)); + EXPECT_LONGS_EQUAL(2, (long)actual.size()); EXPECT(assert_equal(expected, actual, 1e-9)); - - // error terms - EXPECT_DOUBLES_EQUAL(combined.error(values), actual.error(values), 1e-9); + EXPECT_DOUBLES_EQUAL(jacobian.error(values), actual.error(values), 1e-9); } /* ************************************************************************* */ @@ -116,29 +90,23 @@ TEST(HessianFactor, Constructor1) Vector g = Vector_(2, -8.0, -9.0); double f = 10.0; - Vector dxv = Vector_(2, 1.5, 2.5); - - vector dims; - dims.push_back(2); - VectorValues dx(dims); - - dx[0] = dxv; + VectorValues dx = pair_list_of + (0, Vector_(2, 1.5, 2.5)); HessianFactor factor(0, G, g, f); // extract underlying parts - Matrix info_matrix = factor.info_.range(0, 1, 0, 1); - EXPECT(assert_equal(Matrix(G), info_matrix)); + EXPECT(assert_equal(G, Matrix(factor.info(factor.begin(), factor.begin())))); EXPECT_DOUBLES_EQUAL(f, factor.constantTerm(), 1e-10); - EXPECT(assert_equal(g, Vector(factor.linearTerm()), 1e-10)); - EXPECT_LONGS_EQUAL(1, factor.size()); + EXPECT(assert_equal(g, Vector(factor.linearTerm()))); + EXPECT_LONGS_EQUAL(1, (long)factor.size()); // error 0.5*(f - 2*x'*g + x'*G*x) double expected = 80.375; double actual = factor.error(dx); - double expected_manual = 0.5 * (f - 2.0 * dxv.dot(g) + dxv.transpose() * G.selfadjointView() * dxv); + double expected_manual = 0.5 * (f - 2.0 * dx[0].dot(g) + dx[0].transpose() * G.selfadjointView() * dx[0]); EXPECT_DOUBLES_EQUAL(expected, expected_manual, 1e-10); - DOUBLES_EQUAL(expected, actual, 1e-10); + EXPECT_DOUBLES_EQUAL(expected, actual, 1e-10); } /* ************************************************************************* */ @@ -154,11 +122,10 @@ TEST(HessianFactor, Constructor1b) double f = dot(g,mu); // Check - Matrix info_matrix = factor.info_.range(0, 1, 0, 1); - EXPECT(assert_equal(Matrix(G), info_matrix)); + EXPECT(assert_equal(G, Matrix(factor.info(factor.begin(), factor.begin())))); EXPECT_DOUBLES_EQUAL(f, factor.constantTerm(), 1e-10); - EXPECT(assert_equal(g, Vector(factor.linearTerm()), 1e-10)); - EXPECT_LONGS_EQUAL(1, factor.size()); + EXPECT(assert_equal(g, Vector(factor.linearTerm()))); + EXPECT_LONGS_EQUAL(1, (long)factor.size()); } /* ************************************************************************* */ @@ -174,13 +141,9 @@ TEST(HessianFactor, Constructor2) Vector dx0 = Vector_(1, 0.5); Vector dx1 = Vector_(2, 1.5, 2.5); - vector dims; - dims.push_back(1); - dims.push_back(2); - VectorValues dx(dims); - - dx[0] = dx0; - dx[1] = dx1; + VectorValues dx = pair_list_of + (0, dx0) + (1, dx1); HessianFactor factor(0, 1, G11, G12, g1, G22, g2, f); @@ -188,7 +151,7 @@ TEST(HessianFactor, Constructor2) double actual = factor.error(dx); DOUBLES_EQUAL(expected, actual, 1e-10); - LONGS_EQUAL(4, factor.rows()); + LONGS_EQUAL(4, (long)factor.rows()); DOUBLES_EQUAL(10.0, factor.constantTerm(), 1e-10); Vector linearExpected(3); linearExpected << g1, g2; @@ -199,11 +162,10 @@ TEST(HessianFactor, Constructor2) EXPECT(assert_equal(G22, factor.info(factor.begin()+1, factor.begin()+1))); // Check case when vector values is larger than factor - dims.push_back(2); - VectorValues dxLarge(dims); - dxLarge[0] = dx0; - dxLarge[1] = dx1; - dxLarge[2] = Vector_(2, 0.1, 0.2); + VectorValues dxLarge = pair_list_of + (0, dx0) + (1, dx1) + (2, Vector_(2, 0.1, 0.2)); EXPECT_DOUBLES_EQUAL(expected, factor.error(dxLarge), 1e-10); } @@ -229,15 +191,10 @@ TEST(HessianFactor, Constructor3) Vector dx1 = Vector_(2, 1.5, 2.5); Vector dx2 = Vector_(3, 1.5, 2.5, 3.5); - vector dims; - dims.push_back(1); - dims.push_back(2); - dims.push_back(3); - VectorValues dx(dims); - - dx[0] = dx0; - dx[1] = dx1; - dx[2] = dx2; + VectorValues dx = pair_list_of + (0, dx0) + (1, dx1) + (2, dx2); HessianFactor factor(0, 1, 2, G11, G12, G13, g1, G22, G23, g2, G33, g3, f); @@ -245,7 +202,7 @@ TEST(HessianFactor, Constructor3) double actual = factor.error(dx); DOUBLES_EQUAL(expected, actual, 1e-10); - LONGS_EQUAL(7, factor.rows()); + LONGS_EQUAL(7, (long)factor.rows()); DOUBLES_EQUAL(10.0, factor.constantTerm(), 1e-10); Vector linearExpected(6); linearExpected << g1, g2, g3; @@ -281,16 +238,10 @@ TEST(HessianFactor, ConstructorNWay) Vector dx1 = Vector_(2, 1.5, 2.5); Vector dx2 = Vector_(3, 1.5, 2.5, 3.5); - vector dims; - dims.push_back(1); - dims.push_back(2); - dims.push_back(3); - VectorValues dx(dims); - - dx[0] = dx0; - dx[1] = dx1; - dx[2] = dx2; - + VectorValues dx = pair_list_of + (0, dx0) + (1, dx1) + (2, dx2); std::vector js; js.push_back(0); js.push_back(1); js.push_back(2); @@ -304,7 +255,7 @@ TEST(HessianFactor, ConstructorNWay) double actual = factor.error(dx); DOUBLES_EQUAL(expected, actual, 1e-10); - LONGS_EQUAL(7, factor.rows()); + LONGS_EQUAL(7, (long)factor.rows()); DOUBLES_EQUAL(10.0, factor.constantTerm(), 1e-10); Vector linearExpected(6); linearExpected << g1, g2, g3; @@ -319,63 +270,7 @@ TEST(HessianFactor, ConstructorNWay) } /* ************************************************************************* */ -TEST_UNSAFE(HessianFactor, CopyConstructor_and_assignment) -{ - Matrix G11 = Matrix_(1,1, 1.0); - Matrix G12 = Matrix_(1,2, 2.0, 4.0); - Matrix G22 = Matrix_(2,2, 3.0, 5.0, 0.0, 6.0); - Vector g1 = Vector_(1, -7.0); - Vector g2 = Vector_(2, -8.0, -9.0); - double f = 10.0; - - Vector dx0 = Vector_(1, 0.5); - Vector dx1 = Vector_(2, 1.5, 2.5); - - vector dims; - dims.push_back(1); - dims.push_back(2); - VectorValues dx(dims); - - dx[0] = dx0; - dx[1] = dx1; - - HessianFactor originalFactor(0, 1, G11, G12, g1, G22, g2, f); - - // Make a copy - HessianFactor copy1(originalFactor); - - double expected = 90.5; - double actual = copy1.error(dx); - - DOUBLES_EQUAL(expected, actual, 1e-10); - LONGS_EQUAL(4, copy1.rows()); - DOUBLES_EQUAL(10.0, copy1.constantTerm(), 1e-10); - - Vector linearExpected(3); linearExpected << g1, g2; - EXPECT(assert_equal(linearExpected, copy1.linearTerm())); - - EXPECT(assert_equal(G11, copy1.info(copy1.begin(), copy1.begin()))); - EXPECT(assert_equal(G12, copy1.info(copy1.begin(), copy1.begin()+1))); - EXPECT(assert_equal(G22, copy1.info(copy1.begin()+1, copy1.begin()+1))); - - // Make a copy using the assignment operator - HessianFactor copy2; - copy2 = HessianFactor(originalFactor); // Make a temporary to make sure copying does not shared references - - actual = copy2.error(dx); - DOUBLES_EQUAL(expected, actual, 1e-10); - LONGS_EQUAL(4, copy2.rows()); - DOUBLES_EQUAL(10.0, copy2.constantTerm(), 1e-10); - - EXPECT(assert_equal(linearExpected, copy2.linearTerm())); - - EXPECT(assert_equal(G11, copy2.info(copy2.begin(), copy2.begin()))); - EXPECT(assert_equal(G12, copy2.info(copy2.begin(), copy2.begin()+1))); - EXPECT(assert_equal(G22, copy2.info(copy2.begin()+1, copy2.begin()+1))); -} - -/* ************************************************************************* */ -TEST_UNSAFE(HessianFactor, CombineAndEliminate) +TEST(HessianFactor, CombineAndEliminate) { Matrix A01 = Matrix_(3,3, 1.0, 0.0, 0.0, @@ -416,29 +311,18 @@ TEST_UNSAFE(HessianFactor, CombineAndEliminate) // create a full, uneliminated version of the factor JacobianFactor expectedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true)); - // perform elimination - GaussianConditional::shared_ptr expectedBN = expectedFactor.eliminate(1); - - // create expected Hessian after elimination - HessianFactor expectedCholeskyFactor(expectedFactor); - - // Convert all factors to hessians - FactorGraph hessians; - BOOST_FOREACH(const GaussianFactorGraph::sharedFactor& factor, gfg) { - if(boost::shared_ptr hf = boost::dynamic_pointer_cast(factor)) - hessians.push_back(hf); - else if(boost::shared_ptr jf = boost::dynamic_pointer_cast(factor)) - hessians.push_back(boost::make_shared(*jf)); - else - CHECK(false); - } + // perform elimination on jacobian + GaussianConditional::shared_ptr expectedConditional; + JacobianFactor::shared_ptr expectedRemainingFactor; + boost::tie(expectedConditional, expectedRemainingFactor) = expectedFactor.eliminate(Ordering(list_of(0))); // Eliminate - GaussianFactorGraph::EliminationResult actualCholesky = EliminateCholesky(gfg, 1); - HessianFactor::shared_ptr actualFactor = boost::dynamic_pointer_cast(actualCholesky.second); + GaussianConditional::shared_ptr actualConditional; + HessianFactor::shared_ptr actualCholeskyFactor; + boost::tie(actualConditional, actualCholeskyFactor) = EliminateCholesky(gfg, Ordering(list_of(0))); - EXPECT(assert_equal(*expectedBN, *actualCholesky.first, 1e-6)); - EXPECT(assert_equal(expectedCholeskyFactor, *actualFactor, 1e-6)); + EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6)); + EXPECT(assert_equal(HessianFactor(*expectedRemainingFactor), *actualCholeskyFactor, 1e-6)); } /* ************************************************************************* */ @@ -480,13 +364,10 @@ TEST(HessianFactor, eliminate2 ) // eliminate the combined factor HessianFactor::shared_ptr combinedLF_Chol(new HessianFactor(combined)); - FactorGraph combinedLFG_Chol; - combinedLFG_Chol.push_back(combinedLF_Chol); + GaussianFactorGraph combinedLFG_Chol = list_of(combinedLF_Chol); - GaussianFactorGraph::EliminationResult actual_Chol = EliminateCholesky( - combinedLFG_Chol, 1); - HessianFactor::shared_ptr actualFactor = boost::dynamic_pointer_cast< - HessianFactor>(actual_Chol.second); + std::pair actual_Chol = + EliminateCholesky(combinedLFG_Chol, Ordering(list_of(0))); // create expected Conditional Gaussian double oldSigma = 0.0894427; // from when R was made unit @@ -499,8 +380,8 @@ TEST(HessianFactor, eliminate2 ) +0.00,-0.20,+0.00,-0.80 )/oldSigma; Vector d = Vector_(2,0.2,-0.14)/oldSigma; - GaussianConditional expectedCG(0,d,R11,1,S12,ones(2)); - EXPECT(assert_equal(expectedCG,*actual_Chol.first,1e-4)); + GaussianConditional expectedCG(0, d, R11, 1, S12); + EXPECT(assert_equal(expectedCG, *actual_Chol.first, 1e-4)); // the expected linear factor double sigma = 0.2236; @@ -511,89 +392,7 @@ TEST(HessianFactor, eliminate2 ) )/sigma; Vector b1 = Vector_(2,0.0,0.894427); JacobianFactor expectedLF(1, Bl1x1, b1, noiseModel::Isotropic::Sigma(2,1.0)); - EXPECT(assert_equal(HessianFactor(expectedLF), *actualFactor, 1.5e-3)); -} - -/* ************************************************************************* */ -TEST(HessianFactor, eliminateUnsorted) { - - JacobianFactor::shared_ptr factor1( - new JacobianFactor(0, - Matrix_(3,3, - 44.7214, 0.0, 0.0, - 0.0, 44.7214, 0.0, - 0.0, 0.0, 44.7214), - 1, - Matrix_(3,3, - -0.179168, -44.721, 0.717294, - 44.721, -0.179168, -44.9138, - 0.0, 0.0, -44.7214), - Vector_(3, 1.98916e-17, -4.96503e-15, -7.75792e-17), - noiseModel::Unit::Create(3))); - HessianFactor::shared_ptr unsorted_factor2( - new HessianFactor(JacobianFactor(0, - Matrix_(6,3, - 25.8367, 0.1211, 0.0593, - 0.0, 23.4099, 30.8733, - 0.0, 0.0, 25.8729, - 0.0, 0.0, 0.0, - 0.0, 0.0, 0.0, - 0.0, 0.0, 0.0), - 1, - Matrix_(6,3, - 25.7429, -1.6897, 0.4587, - 1.6400, 23.3095, -8.4816, - 0.0034, 0.0509, -25.7855, - 0.9997, -0.0002, 0.0824, - 0.0, 0.9973, 0.9517, - 0.0, 0.0, 0.9973), - Vector_(6, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), - noiseModel::Unit::Create(6)))); - Permutation permutation(2); - permutation[0] = 1; - permutation[1] = 0; - unsorted_factor2->permuteWithInverse(permutation); - - HessianFactor::shared_ptr sorted_factor2( - new HessianFactor(JacobianFactor(0, - Matrix_(6,3, - 25.7429, -1.6897, 0.4587, - 1.6400, 23.3095, -8.4816, - 0.0034, 0.0509, -25.7855, - 0.9997, -0.0002, 0.0824, - 0.0, 0.9973, 0.9517, - 0.0, 0.0, 0.9973), - 1, - Matrix_(6,3, - 25.8367, 0.1211, 0.0593, - 0.0, 23.4099, 30.8733, - 0.0, 0.0, 25.8729, - 0.0, 0.0, 0.0, - 0.0, 0.0, 0.0, - 0.0, 0.0, 0.0), - Vector_(6, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), - noiseModel::Unit::Create(6)))); - - GaussianFactorGraph sortedGraph; -// sortedGraph.push_back(factor1); - sortedGraph.push_back(sorted_factor2); - - GaussianFactorGraph unsortedGraph; -// unsortedGraph.push_back(factor1); - unsortedGraph.push_back(unsorted_factor2); - - GaussianConditional::shared_ptr expected_bn; - GaussianFactor::shared_ptr expected_factor; - boost::tie(expected_bn, expected_factor) = - EliminatePreferCholesky(sortedGraph, 1); - - GaussianConditional::shared_ptr actual_bn; - GaussianFactor::shared_ptr actual_factor; - boost::tie(actual_bn, actual_factor) = - EliminatePreferCholesky(unsortedGraph, 1); - - EXPECT(assert_equal(*expected_bn, *actual_bn, 1e-10)); - EXPECT(assert_equal(*expected_factor, *actual_factor, 1e-10)); + EXPECT(assert_equal(HessianFactor(expectedLF), *actual_Chol.second, 1.5e-3)); } /* ************************************************************************* */ @@ -612,8 +411,7 @@ TEST(HessianFactor, combine) { Vector b = Vector_(2, 2.23606798,-1.56524758); SharedDiagonal model = noiseModel::Diagonal::Sigmas(ones(2)); GaussianFactor::shared_ptr f(new JacobianFactor(0, A0, 1, A1, 2, A2, b, model)); - FactorGraph factors; - factors.push_back(f); + GaussianFactorGraph factors = list_of(f); // Form Ab' * Ab HessianFactor actual(factors); @@ -627,9 +425,10 @@ TEST(HessianFactor, combine) { 0.0, -100.0000, 0.0, 20.0000, 0.0, 80.0000, 14.0000, 25.0000, -17.5000, -5.0000, 3.5000, -20.0000, 14.0000, 7.4500); EXPECT(assert_equal(Matrix(expected.triangularView()), - Matrix(actual.matrix_.triangularView()), tol)); + Matrix(actual.matrixObject().full().triangularView()), tol)); } + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr);} /* ************************************************************************* */ diff --git a/gtsam/linear/tests/testJacobianFactor.cpp b/gtsam/linear/tests/testJacobianFactor.cpp deleted file mode 100644 index fe13fdeac..000000000 --- a/gtsam/linear/tests/testJacobianFactor.cpp +++ /dev/null @@ -1,468 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testJacobianFactor.cpp - * @brief Unit tests for Linear Factor - * @author Christian Potthast - * @author Frank Dellaert - **/ - -#include -#include - -#include -#include -#include -#include - -using namespace std; -using namespace gtsam; - -static const Index _x0_=0, _x1_=1, _x2_=2, _x_=5, _y_=6, _l11_=8; - -static SharedDiagonal constraintModel = noiseModel::Constrained::All(2); - -/* ************************************************************************* */ -TEST(JacobianFactor, constructor) -{ - Vector b = Vector_(3, 1., 2., 3.); - SharedDiagonal noise = noiseModel::Diagonal::Sigmas(Vector_(3,1.,1.,1.)); - std::list > terms; - terms.push_back(make_pair(_x0_, eye(3))); - terms.push_back(make_pair(_x1_, 2.*eye(3))); - JacobianFactor actual(terms, b, noise); - JacobianFactor expected(_x0_, eye(3), _x1_, 2.*eye(3), b, noise); - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST(JacobianFactor, constructor2) -{ - Vector b = Vector_(3, 1., 2., 3.); - SharedDiagonal noise = noiseModel::Diagonal::Sigmas(Vector_(3,1.,1.,1.)); - std::list > terms; - terms.push_back(make_pair(_x0_, eye(3))); - terms.push_back(make_pair(_x1_, 2.*eye(3))); - const JacobianFactor actual(terms, b, noise); - - JacobianFactor::const_iterator key0 = actual.begin(); - JacobianFactor::const_iterator key1 = key0 + 1; - EXPECT(assert_equal(*key0, _x0_)); - EXPECT(assert_equal(*key1, _x1_)); - EXPECT(!actual.empty()); - EXPECT_LONGS_EQUAL(3, actual.Ab_.nBlocks()); - - Matrix actualA0 = actual.getA(key0); - Matrix actualA1 = actual.getA(key1); - Vector actualb = actual.getb(); - - EXPECT(assert_equal(eye(3), actualA0)); - EXPECT(assert_equal(2.*eye(3), actualA1)); - EXPECT(assert_equal(b, actualb)); -} - -/* ************************************************************************* */ - -JacobianFactor construct() { - Matrix A = Matrix_(2,2, 1.,2.,3.,4.); - Vector b = Vector_(2, 1.0, 2.0); - SharedDiagonal s = noiseModel::Diagonal::Sigmas(Vector_(2, 3.0, 4.0)); - JacobianFactor::shared_ptr shared( - new JacobianFactor(0, A, b, s)); - return *shared; -} - -TEST(JacobianFactor, return_value) -{ - Matrix A = Matrix_(2,2, 1.,2.,3.,4.); - Vector b = Vector_(2, 1.0, 2.0); - SharedDiagonal s = noiseModel::Diagonal::Sigmas(Vector_(2, 3.0, 4.0)); - JacobianFactor copied = construct(); - EXPECT(assert_equal(b, copied.getb())); - EXPECT(assert_equal(*s, *copied.get_model())); - EXPECT(assert_equal(A, copied.getA(copied.begin()))); -} - -/* ************************************************************************* */ -TEST(JabobianFactor, Hessian_conversion) { - HessianFactor hessian(0, (Matrix(4,4) << - 1.57, 2.695, -1.1, -2.35, - 2.695, 11.3125, -0.65, -10.225, - -1.1, -0.65, 1, 0.5, - -2.35, -10.225, 0.5, 9.25).finished(), - (Vector(4) << -7.885, -28.5175, 2.75, 25.675).finished(), - 73.1725); - - JacobianFactor expected(0, (Matrix(2,4) << - 1.2530, 2.1508, -0.8779, -1.8755, - 0, 2.5858, 0.4789, -2.3943).finished(), - (Vector(2) << -6.2929, -5.7941).finished(), - noiseModel::Unit::Create(2)); - - JacobianFactor actual(hessian); - - EXPECT(assert_equal(expected, actual, 1e-3)); -} - -/* ************************************************************************* */ -TEST( JacobianFactor, constructor_combined){ - double sigma1 = 0.0957; - Matrix A11(2,2); - A11(0,0) = 1; A11(0,1) = 0; - A11(1,0) = 0; A11(1,1) = 1; - Vector b(2); - b(0) = 2; b(1) = -1; - JacobianFactor::shared_ptr f1(new JacobianFactor(0, A11, b*sigma1, noiseModel::Isotropic::Sigma(2,sigma1))); - - double sigma2 = 0.5; - A11(0,0) = 1; A11(0,1) = 0; - A11(1,0) = 0; A11(1,1) = -1; - b(0) = 4 ; b(1) = -5; - JacobianFactor::shared_ptr f2(new JacobianFactor(0, A11, b*sigma2, noiseModel::Isotropic::Sigma(2,sigma2))); - - double sigma3 = 0.25; - A11(0,0) = 1; A11(0,1) = 0; - A11(1,0) = 0; A11(1,1) = -1; - b(0) = 3 ; b(1) = -88; - JacobianFactor::shared_ptr f3(new JacobianFactor(0, A11, b*sigma3, noiseModel::Isotropic::Sigma(2,sigma3))); - - // TODO: find a real sigma value for this example - double sigma4 = 0.1; - A11(0,0) = 6; A11(0,1) = 0; - A11(1,0) = 0; A11(1,1) = 7; - b(0) = 5 ; b(1) = -6; - JacobianFactor::shared_ptr f4(new JacobianFactor(0, A11*sigma4, b*sigma4, noiseModel::Isotropic::Sigma(2,sigma4))); - - GaussianFactorGraph lfg; - lfg.push_back(f1); - lfg.push_back(f2); - lfg.push_back(f3); - lfg.push_back(f4); - JacobianFactor combined(lfg); - - Vector sigmas = Vector_(8, sigma1, sigma1, sigma2, sigma2, sigma3, sigma3, sigma4, sigma4); - Matrix A22(8,2); - A22(0,0) = 1; A22(0,1) = 0; - A22(1,0) = 0; A22(1,1) = 1; - A22(2,0) = 1; A22(2,1) = 0; - A22(3,0) = 0; A22(3,1) = -1; - A22(4,0) = 1; A22(4,1) = 0; - A22(5,0) = 0; A22(5,1) = -1; - A22(6,0) = 0.6; A22(6,1) = 0; - A22(7,0) = 0; A22(7,1) = 0.7; - Vector exb(8); - exb(0) = 2*sigma1 ; exb(1) = -1*sigma1; exb(2) = 4*sigma2 ; exb(3) = -5*sigma2; - exb(4) = 3*sigma3 ; exb(5) = -88*sigma3; exb(6) = 5*sigma4 ; exb(7) = -6*sigma4; - - vector > meas; - meas.push_back(make_pair(0, A22)); - JacobianFactor expected(meas, exb, noiseModel::Diagonal::Sigmas(sigmas)); - EXPECT(assert_equal(expected,combined)); -} - -/* ************************************************************************* */ -TEST(JacobianFactor, linearFactorN){ - Matrix I = eye(2); - GaussianFactorGraph f; - SharedDiagonal model = noiseModel::Isotropic::Sigma(2,1.0); - f.push_back(JacobianFactor::shared_ptr(new JacobianFactor(0, I, Vector_(2, 10.0, 5.0), model))); - f.push_back(JacobianFactor::shared_ptr(new JacobianFactor(0, -10 * I, 1, 10 * I, Vector_(2, 1.0, -2.0), model))); - f.push_back(JacobianFactor::shared_ptr(new JacobianFactor(1, -10 * I, 2, 10 * I, Vector_(2, 1.5, -1.5), model))); - f.push_back(JacobianFactor::shared_ptr(new JacobianFactor(2, -10 * I, 3, 10 * I, Vector_(2, 2.0, -1.0), model))); - - JacobianFactor combinedFactor(f); - - vector > combinedMeasurement; - combinedMeasurement.push_back(make_pair(0, Matrix_(8,2, - 1.0, 0.0, - 0.0, 1.0, - -10.0, 0.0, - 0.0,-10.0, - 0.0, 0.0, - 0.0, 0.0, - 0.0, 0.0, - 0.0, 0.0))); - combinedMeasurement.push_back(make_pair(1, Matrix_(8,2, - 0.0, 0.0, - 0.0, 0.0, - 10.0, 0.0, - 0.0, 10.0, - -10.0, 0.0, - 0.0,-10.0, - 0.0, 0.0, - 0.0, 0.0))); - combinedMeasurement.push_back(make_pair(2, Matrix_(8,2, - 0.0, 0.0, - 0.0, 0.0, - 0.0, 0.0, - 0.0, 0.0, - 10.0, 0.0, - 0.0, 10.0, - -10.0, 0.0, - 0.0,-10.0))); - combinedMeasurement.push_back(make_pair(3, Matrix_(8,2, - 0.0, 0.0, - 0.0, 0.0, - 0.0, 0.0, - 0.0, 0.0, - 0.0, 0.0, - 0.0, 0.0, - 10.0, 0.0, - 0.0,10.0))); - Vector b = Vector_(8, - 10.0, 5.0, 1.0, -2.0, 1.5, -1.5, 2.0, -1.0); - - Vector sigmas = repeat(8,1.0); - JacobianFactor expected(combinedMeasurement, b, noiseModel::Diagonal::Sigmas(sigmas)); - EXPECT(assert_equal(expected,combinedFactor)); -} - -/* ************************************************************************* */ -TEST(JacobianFactor, error) { - Vector b = Vector_(3, 1., 2., 3.); - SharedDiagonal noise = noiseModel::Diagonal::Sigmas(Vector_(3,2.,2.,2.)); - std::list > terms; - terms.push_back(make_pair(_x0_, eye(3))); - terms.push_back(make_pair(_x1_, 2.*eye(3))); - const JacobianFactor jf(terms, b, noise); - - VectorValues values(2, 3); - values[0] = Vector_(3, 1.,2.,3.); - values[1] = Vector_(3, 4.,5.,6.); - - Vector expected_unwhitened = Vector_(3, 8., 10., 12.); - Vector actual_unwhitened = jf.unweighted_error(values); - EXPECT(assert_equal(expected_unwhitened, actual_unwhitened)); - - Vector expected_whitened = Vector_(3, 4., 5., 6.); - Vector actual_whitened = jf.error_vector(values); - EXPECT(assert_equal(expected_whitened, actual_whitened)); - - // check behavior when there are more values than in this factor - VectorValues largeValues(3, 3); - largeValues[0] = Vector_(3, 1.,2.,3.); - largeValues[1] = Vector_(3, 4.,5.,6.); - largeValues[2] = Vector_(3, 7.,8.,9.); - - EXPECT(assert_equal(expected_unwhitened, jf.unweighted_error(largeValues))); - EXPECT(assert_equal(expected_whitened, jf.error_vector(largeValues))); -} - -/* ************************************************************************* */ -TEST(JacobianFactor, operators ) -{ - SharedDiagonal sigma0_1 = noiseModel::Isotropic::Sigma(2,0.1); - - Matrix I = eye(2); - Vector b = Vector_(2,0.2,-0.1); - JacobianFactor lf(_x1_, -I, _x2_, I, b, sigma0_1); - - VectorValues c; - c.insert(_x1_, Vector_(2,10.,20.)); - c.insert(_x2_, Vector_(2,30.,60.)); - - // test A*x - Vector expectedE = Vector_(2,200.,400.), e = lf*c; - EXPECT(assert_equal(expectedE,e)); - - // test A^e - VectorValues expectedX; - expectedX.insert(_x1_, Vector_(2,-2000.,-4000.)); - expectedX.insert(_x2_, Vector_(2, 2000., 4000.)); - VectorValues actualX = VectorValues::Zero(expectedX); - lf.transposeMultiplyAdd(1.0, e, actualX); - EXPECT(assert_equal(expectedX, actualX)); -} - -/* ************************************************************************* */ -TEST(JacobianFactor, eliminate2 ) -{ - // sigmas - double sigma1 = 0.2; - double sigma2 = 0.1; - Vector sigmas = Vector_(4, sigma1, sigma1, sigma2, sigma2); - - // the combined linear factor - Matrix Ax2 = Matrix_(4,2, - // x2 - -1., 0., - +0.,-1., - 1., 0., - +0.,1. - ); - - Matrix Al1x1 = Matrix_(4,4, - // l1 x1 - 1., 0., 0.00, 0., // f4 - 0., 1., 0.00, 0., // f4 - 0., 0., -1., 0., // f2 - 0., 0., 0.00,-1. // f2 - ); - - // the RHS - Vector b2(4); - b2(0) = -0.2; - b2(1) = 0.3; - b2(2) = 0.2; - b2(3) = -0.1; - - vector > meas; - meas.push_back(make_pair(_x2_, Ax2)); - meas.push_back(make_pair(_l11_, Al1x1)); - JacobianFactor combined(meas, b2, noiseModel::Diagonal::Sigmas(sigmas)); - - // eliminate the combined factor - GaussianConditional::shared_ptr actualCG_QR; - JacobianFactor::shared_ptr actualLF_QR(new JacobianFactor(combined)); - actualCG_QR = actualLF_QR->eliminateFirst(); - - // create expected Conditional Gaussian - double oldSigma = 0.0894427; // from when R was made unit - Matrix R11 = Matrix_(2,2, - 1.00, 0.00, - 0.00, 1.00 - )/oldSigma; - Matrix S12 = Matrix_(2,4, - -0.20, 0.00,-0.80, 0.00, - +0.00,-0.20,+0.00,-0.80 - )/oldSigma; - Vector d = Vector_(2,0.2,-0.14)/oldSigma; - GaussianConditional expectedCG(_x2_,d,R11,_l11_,S12,ones(2)); - - EXPECT_LONGS_EQUAL(0, actualCG_QR->rsd().firstBlock()); - EXPECT_LONGS_EQUAL(0, actualCG_QR->rsd().rowStart()); - EXPECT_LONGS_EQUAL(2, actualCG_QR->rsd().rowEnd()); - EXPECT_LONGS_EQUAL(3, actualCG_QR->rsd().nBlocks()); - EXPECT(assert_equal(expectedCG,*actualCG_QR,1e-4)); - - // the expected linear factor - double sigma = 0.2236; - Matrix Bl1x1 = Matrix_(2,4, - // l1 x1 - 1.00, 0.00, -1.00, 0.00, - 0.00, 1.00, +0.00, -1.00 - )/sigma; - Vector b1 = Vector_(2,0.0,0.894427); - JacobianFactor expectedLF(_l11_, Bl1x1, b1, noiseModel::Isotropic::Sigma(2,1.0)); - EXPECT(assert_equal(expectedLF,*actualLF_QR,1e-3)); -} - -/* ************************************************************************* */ -TEST(JacobianFactor, default_error ) -{ - JacobianFactor f; - vector dims; - VectorValues c(dims); - double actual = f.error(c); - EXPECT(actual==0.0); -} - -//* ************************************************************************* */ -TEST(JacobianFactor, empty ) -{ - // create an empty factor - JacobianFactor f; - EXPECT(f.empty()==true); -} - -/* ************************************************************************* */ -// small aux. function to print out lists of anything -template -void print(const list& i) { - copy(i.begin(), i.end(), ostream_iterator (cout, ",")); - cout << endl; -} - -/* ************************************************************************* */ -TEST(JacobianFactor, CONSTRUCTOR_GaussianConditional ) -{ - Matrix R11 = eye(2); - Matrix S12 = Matrix_(2,2, - -0.200001, 0.00, - +0.00,-0.200001 - ); - Vector d(2); d(0) = 2.23607; d(1) = -1.56525; - Vector sigmas =repeat(2,0.29907); - GaussianConditional::shared_ptr CG(new GaussianConditional(_x2_,d,R11,_l11_,S12,sigmas)); - - // Call the constructor we are testing ! - JacobianFactor actualLF(*CG); - - JacobianFactor expectedLF(_x2_,R11,_l11_,S12,d, noiseModel::Diagonal::Sigmas(sigmas)); - EXPECT(assert_equal(expectedLF,actualLF,1e-5)); -} - -/* ************************************************************************* */ -TEST ( JacobianFactor, constraint_eliminate1 ) -{ - // construct a linear constraint - Vector v(2); v(0)=1.2; v(1)=3.4; - Index key = _x0_; - JacobianFactor lc(key, eye(2), v, constraintModel); - - // eliminate it - GaussianConditional::shared_ptr actualCG; - JacobianFactor::shared_ptr actualLF(new JacobianFactor(lc)); - actualCG = actualLF->eliminateFirst(); - - // verify linear factor - EXPECT(actualLF->size() == 0); - - // verify conditional Gaussian - Vector sigmas = Vector_(2, 0.0, 0.0); - GaussianConditional expCG(_x0_, v, eye(2), sigmas); - EXPECT(assert_equal(expCG, *actualCG)); -} - -/* ************************************************************************* */ -TEST ( JacobianFactor, constraint_eliminate2 ) -{ - // Construct a linear constraint - // RHS - Vector b(2); b(0)=3.0; b(1)=4.0; - - // A1 - invertible - Matrix A1(2,2); - A1(0,0) = 1.0 ; A1(0,1) = 2.0; - A1(1,0) = 2.0 ; A1(1,1) = 1.0; - - // A2 - not invertible - Matrix A2(2,2); - A2(0,0) = 1.0 ; A2(0,1) = 2.0; - A2(1,0) = 2.0 ; A2(1,1) = 4.0; - - JacobianFactor lc(_x_, A1, _y_, A2, b, constraintModel); - - // eliminate x and verify results - GaussianConditional::shared_ptr actualCG; - JacobianFactor::shared_ptr actualLF(new JacobianFactor(lc)); - actualCG = actualLF->eliminateFirst(); - - // LF should be null - JacobianFactor expectedLF; - EXPECT(assert_equal(*actualLF, expectedLF)); - - // verify CG - Matrix R = Matrix_(2, 2, - 1.0, 2.0, - 0.0, 1.0); - Matrix S = Matrix_(2,2, - 1.0, 2.0, - 0.0, 0.0); - Vector d = Vector_(2, 3.0, 0.6666); - GaussianConditional expectedCG(_x_, d, R, _y_, S, zero(2)); - EXPECT(assert_equal(expectedCG, *actualCG, 1e-4)); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr);} -/* ************************************************************************* */ diff --git a/gtsam/linear/tests/testJacobianFactorUnordered.cpp b/gtsam/linear/tests/testJacobianFactorUnordered.cpp new file mode 100644 index 000000000..b4d51794e --- /dev/null +++ b/gtsam/linear/tests/testJacobianFactorUnordered.cpp @@ -0,0 +1,541 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testJacobianFactor.cpp + * @brief Unit tests for Linear Factor + * @author Christian Potthast + * @author Frank Dellaert + **/ + +#include +#include + +#include +#include +#include +#include + +#include +#include +#include + +using namespace std; +using namespace gtsam; +using namespace boost::assign; + +namespace { + namespace simple { + // Terms we'll use + const vector > terms = list_of > + (make_pair(5, Matrix3::Identity())) + (make_pair(10, 2*Matrix3::Identity())) + (make_pair(15, 3*Matrix3::Identity())); + + // RHS and sigmas + const Vector b = Vector_(3, 1., 2., 3.); + const SharedDiagonal noise = noiseModel::Diagonal::Sigmas(Vector_(3, 0.5, 0.5, 0.5)); + } +} + +/* ************************************************************************* */ +TEST(JacobianFactor, constructors_and_accessors) +{ + using namespace simple; + + // Test for using different numbers of terms + { + // b vector only constructor + JacobianFactor expected( + boost::make_iterator_range(terms.begin(), terms.begin()), b); + JacobianFactor actual(b); + EXPECT(assert_equal(expected, actual)); + EXPECT(assert_equal(b, expected.getb())); + EXPECT(assert_equal(b, actual.getb())); + EXPECT(!expected.get_model()); + EXPECT(!actual.get_model()); + } + { + // One term constructor + JacobianFactor expected( + boost::make_iterator_range(terms.begin(), terms.begin() + 1), b, noise); + JacobianFactor actual(terms[0].first, terms[0].second, b, noise); + EXPECT(assert_equal(expected, actual)); + LONGS_EQUAL((long)terms[0].first, (long)actual.keys().back()); + EXPECT(assert_equal(terms[0].second, actual.getA(actual.end() - 1))); + EXPECT(assert_equal(b, expected.getb())); + EXPECT(assert_equal(b, actual.getb())); + EXPECT(noise == expected.get_model()); + EXPECT(noise == actual.get_model()); + } + { + // Two term constructor + JacobianFactor expected( + boost::make_iterator_range(terms.begin(), terms.begin() + 2), b, noise); + JacobianFactor actual(terms[0].first, terms[0].second, + terms[1].first, terms[1].second, b, noise); + EXPECT(assert_equal(expected, actual)); + LONGS_EQUAL((long)terms[1].first, (long)actual.keys().back()); + EXPECT(assert_equal(terms[1].second, actual.getA(actual.end() - 1))); + EXPECT(assert_equal(b, expected.getb())); + EXPECT(assert_equal(b, actual.getb())); + EXPECT(noise == expected.get_model()); + EXPECT(noise == actual.get_model()); + } + { + // Three term constructor + JacobianFactor expected( + boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise); + JacobianFactor actual(terms[0].first, terms[0].second, + terms[1].first, terms[1].second, terms[2].first, terms[2].second, b, noise); + EXPECT(assert_equal(expected, actual)); + LONGS_EQUAL((long)terms[2].first, (long)actual.keys().back()); + EXPECT(assert_equal(terms[2].second, actual.getA(actual.end() - 1))); + EXPECT(assert_equal(b, expected.getb())); + EXPECT(assert_equal(b, actual.getb())); + EXPECT(noise == expected.get_model()); + EXPECT(noise == actual.get_model()); + } + { + // VerticalBlockMatrix constructor + JacobianFactor expected( + boost::make_iterator_range(terms.begin(), terms.begin() + 3), b, noise); + VerticalBlockMatrix blockMatrix(list_of(3)(3)(3)(1), 3); + blockMatrix(0) = terms[0].second; + blockMatrix(1) = terms[1].second; + blockMatrix(2) = terms[2].second; + blockMatrix(3) = b; + JacobianFactor actual(terms | boost::adaptors::map_keys, blockMatrix, noise); + EXPECT(assert_equal(expected, actual)); + LONGS_EQUAL((long)terms[2].first, (long)actual.keys().back()); + EXPECT(assert_equal(terms[2].second, actual.getA(actual.end() - 1))); + EXPECT(assert_equal(b, expected.getb())); + EXPECT(assert_equal(b, actual.getb())); + EXPECT(noise == expected.get_model()); + EXPECT(noise == actual.get_model()); + } +} + +/* ************************************************************************* */ +//TEST(JabobianFactor, Hessian_conversion) { +// HessianFactor hessian(0, (Matrix(4,4) << +// 1.57, 2.695, -1.1, -2.35, +// 2.695, 11.3125, -0.65, -10.225, +// -1.1, -0.65, 1, 0.5, +// -2.35, -10.225, 0.5, 9.25).finished(), +// (Vector(4) << -7.885, -28.5175, 2.75, 25.675).finished(), +// 73.1725); +// +// JacobianFactor expected(0, (Matrix(2,4) << +// 1.2530, 2.1508, -0.8779, -1.8755, +// 0, 2.5858, 0.4789, -2.3943).finished(), +// (Vector(2) << -6.2929, -5.7941).finished(), +// noiseModel::Unit::Create(2)); +// +// JacobianFactor actual(hessian); +// +// EXPECT(assert_equal(expected, actual, 1e-3)); +//} + +/* ************************************************************************* */ +TEST( JacobianFactor, construct_from_graph) +{ + GaussianFactorGraph factors; + + double sigma1 = 0.1; + Matrix A11 = Matrix::Identity(2,2); + Vector b1(2); b1 << 2, -1; + factors.add(JacobianFactor(10, A11, b1, noiseModel::Isotropic::Sigma(2, sigma1))); + + double sigma2 = 0.5; + Matrix A21 = -2 * Matrix::Identity(2,2); + Matrix A22 = 3 * Matrix::Identity(2,2); + Vector b2(2); b2 << 4, -5; + factors.add(JacobianFactor(10, A21, 8, A22, b2, noiseModel::Isotropic::Sigma(2, sigma2))); + + double sigma3 = 1.0; + Matrix A32 = -4 * Matrix::Identity(2,2); + Matrix A33 = 5 * Matrix::Identity(2,2); + Vector b3(2); b3 << 3, -6; + factors.add(JacobianFactor(8, A32, 12, A33, b3, noiseModel::Isotropic::Sigma(2, sigma3))); + + Matrix A1(6,2); A1 << A11, A21, Matrix::Zero(2,2); + Matrix A2(6,2); A2 << Matrix::Zero(2,2), A22, A32; + Matrix A3(6,2); A3 << Matrix::Zero(4,2), A33; + Vector b(6); b << b1, b2, b3; + Vector sigmas(6); sigmas << sigma1, sigma1, sigma2, sigma2, sigma3, sigma3; + JacobianFactor expected(10, A1, 8, A2, 12, A3, b, noiseModel::Diagonal::Sigmas(sigmas)); + + // The ordering here specifies the order in which the variables will appear in the combined factor + JacobianFactor actual(factors, Ordering(list_of(10)(8)(12))); + + EXPECT(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +TEST(JacobianFactor, error) +{ + JacobianFactor factor(simple::terms, simple::b, simple::noise); + + VectorValues values; + values.insert(5, Vector::Constant(3, 1.0)); + values.insert(10, Vector::Constant(3, 0.5)); + values.insert(15, Vector::Constant(3, 1.0/3.0)); + + Vector expected_unwhitened(3); expected_unwhitened << 2.0, 1.0, 0.0; + Vector actual_unwhitened = factor.unweighted_error(values); + EXPECT(assert_equal(expected_unwhitened, actual_unwhitened)); + + Vector expected_whitened(3); expected_whitened << 4.0, 2.0, 0.0; + Vector actual_whitened = factor.error_vector(values); + EXPECT(assert_equal(expected_whitened, actual_whitened)); + + double expected_error = 0.5 * expected_whitened.squaredNorm(); + double actual_error = factor.error(values); + DOUBLES_EQUAL(expected_error, actual_error, 1e-10); +} + +/* ************************************************************************* */ +TEST(JacobianFactor, matrices) +{ + JacobianFactor factor(simple::terms, simple::b, simple::noise); + + Matrix jacobianExpected(3, 9); + jacobianExpected << simple::terms[0].second, simple::terms[1].second, simple::terms[2].second; + Vector rhsExpected = simple::b; + Matrix augmentedJacobianExpected(3, 10); + augmentedJacobianExpected << jacobianExpected, rhsExpected; + + Matrix augmentedHessianExpected = + augmentedJacobianExpected.transpose() * simple::noise->R().transpose() + * simple::noise->R() * augmentedJacobianExpected; + + // Hessian + EXPECT(assert_equal(Matrix(augmentedHessianExpected.topLeftCorner(9,9)), factor.information())); + EXPECT(assert_equal(augmentedHessianExpected, factor.augmentedInformation())); + + // Whitened Jacobian + EXPECT(assert_equal(simple::noise->R() * jacobianExpected, factor.jacobian().first)); + EXPECT(assert_equal(simple::noise->R() * rhsExpected, factor.jacobian().second)); + EXPECT(assert_equal(simple::noise->R() * augmentedJacobianExpected, factor.augmentedJacobian())); + + // Unwhitened Jacobian + EXPECT(assert_equal(jacobianExpected, factor.jacobianUnweighted().first)); + EXPECT(assert_equal(rhsExpected, factor.jacobianUnweighted().second)); + EXPECT(assert_equal(augmentedJacobianExpected, factor.augmentedJacobianUnweighted())); +} + +/* ************************************************************************* */ +TEST(JacobianFactor, operators ) +{ + SharedDiagonal sigma0_1 = noiseModel::Isotropic::Sigma(2,0.1); + + Matrix I = eye(2); + Vector b = Vector_(2,0.2,-0.1); + JacobianFactor lf(1, -I, 2, I, b, sigma0_1); + + VectorValues c; + c.insert(1, Vector_(2,10.,20.)); + c.insert(2, Vector_(2,30.,60.)); + + // test A*x + Vector expectedE = Vector_(2,200.,400.); + Vector actualE = lf * c; + EXPECT(assert_equal(expectedE, actualE)); + + // test A^e + VectorValues expectedX; + expectedX.insert(1, Vector_(2,-2000.,-4000.)); + expectedX.insert(2, Vector_(2, 2000., 4000.)); + VectorValues actualX = VectorValues::Zero(expectedX); + lf.transposeMultiplyAdd(1.0, actualE, actualX); + EXPECT(assert_equal(expectedX, actualX)); +} + +/* ************************************************************************* */ +TEST(JacobianFactor, default_error ) +{ + JacobianFactor f; + double actual = f.error(VectorValues()); + DOUBLES_EQUAL(0.0, actual, 1e-15); +} + +//* ************************************************************************* */ +TEST(JacobianFactor, empty ) +{ + // create an empty factor + JacobianFactor f; + EXPECT(f.empty()); +} + +/* ************************************************************************* */ +TEST(JacobianFactor, eliminate) +{ + Matrix A01 = Matrix_(3,3, + 1.0, 0.0, 0.0, + 0.0, 1.0, 0.0, + 0.0, 0.0, 1.0); + Vector b0 = Vector_(3, 1.5, 1.5, 1.5); + Vector s0 = Vector_(3, 1.6, 1.6, 1.6); + + Matrix A10 = Matrix_(3,3, + 2.0, 0.0, 0.0, + 0.0, 2.0, 0.0, + 0.0, 0.0, 2.0); + Matrix A11 = Matrix_(3,3, + -2.0, 0.0, 0.0, + 0.0, -2.0, 0.0, + 0.0, 0.0, -2.0); + Vector b1 = Vector_(3, 2.5, 2.5, 2.5); + Vector s1 = Vector_(3, 2.6, 2.6, 2.6); + + Matrix A21 = Matrix_(3,3, + 3.0, 0.0, 0.0, + 0.0, 3.0, 0.0, + 0.0, 0.0, 3.0); + Vector b2 = Vector_(3, 3.5, 3.5, 3.5); + Vector s2 = Vector_(3, 3.6, 3.6, 3.6); + + GaussianFactorGraph gfg; + gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true)); + gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true)); + gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true)); + + Matrix zero3x3 = zeros(3,3); + Matrix A0 = gtsam::stack(3, &A10, &zero3x3, &zero3x3); + Matrix A1 = gtsam::stack(3, &A11, &A01, &A21); + Vector b = gtsam::concatVectors(3, &b1, &b0, &b2); + Vector sigmas = gtsam::concatVectors(3, &s1, &s0, &s2); + + JacobianFactor combinedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true)); + GaussianFactorGraph::EliminationResult expected = combinedFactor.eliminate(list_of(0)); + JacobianFactor::shared_ptr expectedJacobian = boost::dynamic_pointer_cast< + JacobianFactor>(expected.second); + + GaussianFactorGraph::EliminationResult actual = EliminateQR(gfg, list_of(0)); + JacobianFactor::shared_ptr actualJacobian = boost::dynamic_pointer_cast< + JacobianFactor>(actual.second); + + EXPECT(assert_equal(*expected.first, *actual.first)); + EXPECT(assert_equal(*expectedJacobian, *actualJacobian)); +} + +/* ************************************************************************* */ +TEST(JacobianFactor, eliminate2 ) +{ + // sigmas + double sigma1 = 0.2; + double sigma2 = 0.1; + Vector sigmas = Vector_(4, sigma1, sigma1, sigma2, sigma2); + + // the combined linear factor + Matrix Ax2 = Matrix_(4,2, + // x2 + -1., 0., + +0.,-1., + 1., 0., + +0.,1. + ); + + Matrix Al1x1 = Matrix_(4,4, + // l1 x1 + 1., 0., 0.00, 0., // f4 + 0., 1., 0.00, 0., // f4 + 0., 0., -1., 0., // f2 + 0., 0., 0.00,-1. // f2 + ); + + // the RHS + Vector b2(4); + b2(0) = -0.2; + b2(1) = 0.3; + b2(2) = 0.2; + b2(3) = -0.1; + + vector > meas; + meas.push_back(make_pair(2, Ax2)); + meas.push_back(make_pair(11, Al1x1)); + JacobianFactor combined(meas, b2, noiseModel::Diagonal::Sigmas(sigmas)); + + // eliminate the combined factor + pair + actual = combined.eliminate(Ordering(list_of(2))); + + // create expected Conditional Gaussian + double oldSigma = 0.0894427; // from when R was made unit + Matrix R11 = Matrix_(2,2, + 1.00, 0.00, + 0.00, 1.00 + )/oldSigma; + Matrix S12 = Matrix_(2,4, + -0.20, 0.00,-0.80, 0.00, + +0.00,-0.20,+0.00,-0.80 + )/oldSigma; + Vector d = Vector_(2,0.2,-0.14)/oldSigma; + GaussianConditional expectedCG(2, d, R11, 11, S12); + + EXPECT(assert_equal(expectedCG, *actual.first, 1e-4)); + + // the expected linear factor + double sigma = 0.2236; + Matrix Bl1x1 = Matrix_(2,4, + // l1 x1 + 1.00, 0.00, -1.00, 0.00, + 0.00, 1.00, +0.00, -1.00 + )/sigma; + Vector b1 = Vector_(2, 0.0, 0.894427); + JacobianFactor expectedLF(11, Bl1x1, b1); + EXPECT(assert_equal(expectedLF, *actual.second,1e-3)); +} + +/* ************************************************************************* */ +TEST(JacobianFactor, EliminateQR) +{ + // Augmented Ab test case for whole factor graph + Matrix Ab = Matrix_(14,11, + 4., 0., 1., 4., 1., 0., 3., 6., 8., 8., 1., + 9., 2., 0., 1., 6., 3., 9., 6., 6., 9., 4., + 5., 3., 7., 9., 5., 5., 9., 1., 3., 7., 0., + 5., 6., 5., 7., 9., 4., 0., 1., 1., 3., 5., + 0., 0., 4., 5., 6., 6., 7., 9., 4., 5., 4., + 0., 0., 9., 4., 8., 6., 2., 1., 4., 1., 6., + 0., 0., 6., 0., 4., 2., 4., 0., 1., 9., 6., + 0., 0., 6., 6., 4., 4., 5., 5., 5., 8., 6., + 0., 0., 0., 0., 8., 0., 9., 8., 2., 8., 0., + 0., 0., 0., 0., 0., 9., 4., 6., 3., 2., 0., + 0., 0., 0., 0., 1., 1., 9., 1., 5., 5., 3., + 0., 0., 0., 0., 1., 1., 3., 3., 2., 0., 5., + 0., 0., 0., 0., 0., 0., 0., 0., 2., 4., 6., + 0., 0., 0., 0., 0., 0., 0., 0., 6., 3., 4.); + + // Create factor graph + const SharedDiagonal sig_4D = noiseModel::Isotropic::Sigma(4, 0.5); + const SharedDiagonal sig_2D = noiseModel::Isotropic::Sigma(2, 0.5); + GaussianFactorGraph factors = list_of + (JacobianFactor(list_of(3)(5)(7)(9)(11), VerticalBlockMatrix(list_of(2)(2)(2)(2)(2)(1), Ab.block(0, 0, 4, 11)), sig_4D)) + (JacobianFactor(list_of(5)(7)(9)(11), VerticalBlockMatrix(list_of(2)(2)(2)(2)(1), Ab.block(4, 2, 4, 9)), sig_4D)) + (JacobianFactor(list_of(7)(9)(11), VerticalBlockMatrix(list_of(2)(2)(2)(1), Ab.block(8, 4, 4, 7)), sig_4D)) + (JacobianFactor(list_of(11), VerticalBlockMatrix(list_of(2)(1), Ab.block(12, 8, 2, 3)), sig_2D)); + + // extract the dense matrix for the graph + Matrix actualDense = factors.augmentedJacobian(); + EXPECT(assert_equal(2.0 * Ab, actualDense)); + + // Expected augmented matrix, both GaussianConditional (first 6 rows) and remaining factor (next 4 rows) + Matrix R = 2.0*Matrix_(11,11, + -12.1244, -5.1962, -5.2786, -8.6603, -10.5573, -5.9385, -11.3820, -7.2581, -8.7427, -13.4440, -5.3611, + 0., 4.6904, 5.0254, 5.5432, 5.5737, 3.0153, -3.0153, -3.5635, -3.9290, -2.7412, 2.1625, + 0., 0., -13.8160, -8.7166, -10.2245, -8.8666, -8.7632, -5.2544, -6.9192, -10.5537, -9.3250, + 0., 0., 0., 6.5033, -1.1453, 1.3179, 2.5768, 5.5503, 3.6524, 1.3491, -2.5676, + 0., 0., 0., 0., -9.6242, -2.1148, -9.3509, -10.5846, -3.5366, -6.8561, -3.2277, + 0., 0., 0., 0., 0., 9.7887, 4.3551, 5.7572, 2.7876, 0.1611, 1.1769, + 0., 0., 0., 0., 0., 0., -11.1139, -0.6521, -2.1943, -7.5529, -0.9081, + 0., 0., 0., 0., 0., 0., 0., -4.6479, -1.9367, -6.5170, -3.7685, + 0., 0., 0., 0., 0., 0., 0., 0., 8.2503, 3.3757, 6.8476, + 0., 0., 0., 0., 0., 0., 0., 0., 0., -5.7095, -0.0090, + 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., -7.1635); + + GaussianConditional expectedFragment( + list_of(3)(5)(7)(9)(11), 3, VerticalBlockMatrix(list_of(2)(2)(2)(2)(2)(1), R)); + + // Eliminate (3 frontal variables, 6 scalar columns) using QR !!!! + GaussianFactorGraph::EliminationResult actual = EliminateQR(factors, list_of(3)(5)(7)); + const JacobianFactor &actualJF = dynamic_cast(*actual.second); + + EXPECT(assert_equal(expectedFragment, *actual.first, 0.001)); + EXPECT(assert_equal(size_t(2), actualJF.keys().size())); + EXPECT(assert_equal(Key(9), actualJF.keys()[0])); + EXPECT(assert_equal(Key(11), actualJF.keys()[1])); + EXPECT(assert_equal(Matrix(R.block(6, 6, 4, 2)), actualJF.getA(actualJF.begin()), 0.001)); + EXPECT(assert_equal(Matrix(R.block(6, 8, 4, 2)), actualJF.getA(actualJF.begin()+1), 0.001)); + EXPECT(assert_equal(Vector(R.col(10).segment(6, 4)), actualJF.getb(), 0.001)); + EXPECT(!actualJF.get_model()); + + // Eliminate (3 frontal variables, 6 scalar columns) using Cholesky !!!! + // TODO: HessianFactor + //GaussianBayesNet actualFragment_Chol = *actualFactor_Chol.eliminate(3, JacobianFactor::SOLVE_CHOLESKY); + //EXPECT(assert_equal(expectedFragment, actualFragment_Chol, 0.001)); + //EXPECT(assert_equal(size_t(2), actualFactor_Chol.keys().size())); + //EXPECT(assert_equal(Index(9), actualFactor_Chol.keys()[0])); + //EXPECT(assert_equal(Index(11), actualFactor_Chol.keys()[1])); + //EXPECT(assert_equal(Ae1, actualFactor_Chol.getA(actualFactor_Chol.begin()), 0.001)); //// + //EXPECT(linear_dependent(Ae2, actualFactor_Chol.getA(actualFactor_Chol.begin()+1), 0.001)); + //EXPECT(assert_equal(be, actualFactor_Chol.getb(), 0.001)); //// + //EXPECT(assert_equal(ones(4), actualFactor_Chol.get_sigmas(), 0.001)); +} + +/* ************************************************************************* */ +TEST ( JacobianFactor, constraint_eliminate1 ) +{ + // construct a linear constraint + Vector v(2); v(0)=1.2; v(1)=3.4; + JacobianFactor lc(1, eye(2), v, noiseModel::Constrained::All(2)); + + // eliminate it + pair + actual = lc.eliminate(list_of(1)); + + // verify linear factor + EXPECT(actual.second->size() == 0); + + // verify conditional Gaussian + Vector sigmas = Vector_(2, 0.0, 0.0); + GaussianConditional expCG(1, v, eye(2), noiseModel::Diagonal::Sigmas(sigmas)); + EXPECT(assert_equal(expCG, *actual.first)); +} + +/* ************************************************************************* */ +TEST ( JacobianFactor, constraint_eliminate2 ) +{ + // Construct a linear constraint + // RHS + Vector b(2); b(0)=3.0; b(1)=4.0; + + // A1 - invertible + Matrix A1(2,2); + A1(0,0) = 1.0 ; A1(0,1) = 2.0; + A1(1,0) = 2.0 ; A1(1,1) = 1.0; + + // A2 - not invertible + Matrix A2(2,2); + A2(0,0) = 1.0 ; A2(0,1) = 2.0; + A2(1,0) = 2.0 ; A2(1,1) = 4.0; + + JacobianFactor lc(1, A1, 2, A2, b, noiseModel::Constrained::All(2)); + + // eliminate x and verify results + pair + actual = lc.eliminate(list_of(1)); + + // LF should be empty + // It's tricky to create Eigen matrices that are only zero along one dimension + Matrix m(1,2); + Matrix Aempty = m.topRows(0); + Vector bempty = m.block(0,0,0,1); + JacobianFactor expectedLF(2, Aempty, bempty, noiseModel::Constrained::All(0)); + EXPECT(assert_equal(expectedLF, *actual.second)); + + // verify CG + Matrix R = Matrix_(2, 2, + 1.0, 2.0, + 0.0, 1.0); + Matrix S = Matrix_(2,2, + 1.0, 2.0, + 0.0, 0.0); + Vector d = Vector_(2, 3.0, 0.6666); + Vector sigmas = Vector_(2, 0.0, 0.0); + GaussianConditional expectedCG(1, d, R, 2, S, noiseModel::Diagonal::Sigmas(sigmas)); + EXPECT(assert_equal(expectedCG, *actual.first, 1e-4)); +} + +/* ************************************************************************* */ +int main() { TestResult tr; return TestRegistry::runAllTests(tr);} +/* ************************************************************************* */ diff --git a/gtsam/linear/tests/testKalmanFilter.cpp b/gtsam/linear/tests/testKalmanFilter.cpp index 083e942c3..5a33e97f3 100644 --- a/gtsam/linear/tests/testKalmanFilter.cpp +++ b/gtsam/linear/tests/testKalmanFilter.cpp @@ -124,11 +124,11 @@ TEST( KalmanFilter, linear1 ) { // Run iteration 3 KalmanFilter::State p3p = kf.predict(p2, F, B, u, modelQ); EXPECT(assert_equal(expected3, p3p->mean())); - LONGS_EQUAL(3, KalmanFilter::step(p3p)); + LONGS_EQUAL(3, (long)KalmanFilter::step(p3p)); KalmanFilter::State p3 = kf.update(p3p, H, z3, modelR); EXPECT(assert_equal(expected3, p3->mean())); - LONGS_EQUAL(3, KalmanFilter::step(p3)); + LONGS_EQUAL(3, (long)KalmanFilter::step(p3)); } /* ************************************************************************* */ diff --git a/gtsam/linear/tests/testNoiseModel.cpp b/gtsam/linear/tests/testNoiseModel.cpp index 7a0e02722..9d3f19fb1 100644 --- a/gtsam/linear/tests/testNoiseModel.cpp +++ b/gtsam/linear/tests/testNoiseModel.cpp @@ -230,8 +230,7 @@ TEST( NoiseModel, QR ) // Call Gaussian version SharedDiagonal actual1 = exampleQR::diagonal->QR(Ab1); - SharedDiagonal expected = noiseModel::Unit::Create(4); - EXPECT(assert_equal(*expected,*actual1)); + EXPECT(!actual1); EXPECT(linear_dependent(exampleQR::Rd,Ab1,1e-4)); // Ab was modified in place !!! // Call Constrained version diff --git a/gtsam/linear/tests/testSerializationLinear.cpp b/gtsam/linear/tests/testSerializationLinear.cpp index b258fb52e..192d370d2 100644 --- a/gtsam/linear/tests/testSerializationLinear.cpp +++ b/gtsam/linear/tests/testSerializationLinear.cpp @@ -19,7 +19,6 @@ #include #include #include -#include #include #include @@ -164,7 +163,7 @@ TEST (Serialization, gaussian_conditional) { Matrix A2 = Matrix_(2,2, 6., 0.2, 8., 0.4); Matrix R = Matrix_(2,2, 0.1, 0.3, 0.0, 0.34); Vector d(2); d << 0.2, 0.5; - GaussianConditional cg(0, d, R, 1, A1, 2, A2, ones(2)); + GaussianConditional cg(0, d, R, 1, A1, 2, A2); EXPECT(equalsObj(cg)); EXPECT(equalsXML(cg)); diff --git a/gtsam/linear/tests/testVectorValues.cpp b/gtsam/linear/tests/testVectorValues.cpp deleted file mode 100644 index c416937ed..000000000 --- a/gtsam/linear/tests/testVectorValues.cpp +++ /dev/null @@ -1,486 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testVectorValues.cpp - * @author Richard Roberts - * @date Sep 16, 2010 - */ - -#include - -#include -#include -#include - -#include - -using namespace std; -using namespace boost::assign; -using namespace gtsam; - -/* ************************************************************************* */ -TEST(VectorValues, insert) { - - // insert, with out-of-order indices - VectorValues actual; - actual.insert(0, Vector_(1, 1.0)); - actual.insert(1, Vector_(2, 2.0, 3.0)); - actual.insert(5, Vector_(2, 6.0, 7.0)); - actual.insert(2, Vector_(2, 4.0, 5.0)); - - // Check dimensions - LONGS_EQUAL(6, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - LONGS_EQUAL(2, actual.dim(5)); - - // Logic - EXPECT(actual.exists(0)); - EXPECT(actual.exists(1)); - EXPECT(actual.exists(2)); - EXPECT(!actual.exists(3)); - EXPECT(!actual.exists(4)); - EXPECT(actual.exists(5)); - EXPECT(!actual.exists(6)); - - // Check values - EXPECT(assert_equal(Vector_(1, 1.0), actual[0])); - EXPECT(assert_equal(Vector_(2, 2.0, 3.0), actual[1])); - EXPECT(assert_equal(Vector_(2, 4.0, 5.0), actual[2])); - EXPECT(assert_equal(Vector_(2, 6.0, 7.0), actual[5])); - EXPECT(assert_equal(Vector_(7, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0), actual.asVector())); - - // Check exceptions - CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument); - CHECK_EXCEPTION(actual.dim(3), out_of_range); -} - -/* ************************************************************************* */ -TEST(VectorValues, dimsConstructor) { - vector dims; - dims.push_back(1); - dims.push_back(2); - dims.push_back(2); - - VectorValues actual(dims); - actual[0] = Vector_(1, 1.0); - actual[1] = Vector_(2, 2.0, 3.0); - actual[2] = Vector_(2, 4.0, 5.0); - - // Check dimensions - LONGS_EQUAL(3, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - - // Check values - EXPECT(assert_equal(Vector_(1, 1.0), actual[0])); - EXPECT(assert_equal(Vector_(2, 2.0, 3.0), actual[1])); - EXPECT(assert_equal(Vector_(2, 4.0, 5.0), actual[2])); - EXPECT(assert_equal(Vector_(5, 1.0, 2.0, 3.0, 4.0, 5.0), actual.asVector())); -} - -/* ************************************************************************* */ -TEST(VectorValues, copyConstructor) { - - // insert, with out-of-order indices - VectorValues original; - original.insert(0, Vector_(1, 1.0)); - original.insert(1, Vector_(2, 2.0, 3.0)); - original.insert(5, Vector_(2, 6.0, 7.0)); - original.insert(2, Vector_(2, 4.0, 5.0)); - - VectorValues actual(original); - - // Check dimensions - LONGS_EQUAL(6, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - LONGS_EQUAL(2, actual.dim(5)); - - // Logic - EXPECT(actual.exists(0)); - EXPECT(actual.exists(1)); - EXPECT(actual.exists(2)); - EXPECT(!actual.exists(3)); - EXPECT(!actual.exists(4)); - EXPECT(actual.exists(5)); - EXPECT(!actual.exists(6)); - - // Check values - EXPECT(assert_equal(Vector_(1, 1.0), actual[0])); - EXPECT(assert_equal(Vector_(2, 2.0, 3.0), actual[1])); - EXPECT(assert_equal(Vector_(2, 4.0, 5.0), actual[2])); - EXPECT(assert_equal(Vector_(2, 6.0, 7.0), actual[5])); - EXPECT(assert_equal(Vector_(7, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0), actual.asVector())); - - // Check exceptions - CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument); -} - -/* ************************************************************************* */ -TEST(VectorValues, assignment) { - - VectorValues actual; - - { - // insert, with out-of-order indices - VectorValues original; - original.insert(0, Vector_(1, 1.0)); - original.insert(1, Vector_(2, 2.0, 3.0)); - original.insert(5, Vector_(2, 6.0, 7.0)); - original.insert(2, Vector_(2, 4.0, 5.0)); - actual = original; - } - - // Check dimensions - LONGS_EQUAL(6, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - LONGS_EQUAL(2, actual.dim(5)); - - // Logic - EXPECT(actual.exists(0)); - EXPECT(actual.exists(1)); - EXPECT(actual.exists(2)); - EXPECT(!actual.exists(3)); - EXPECT(!actual.exists(4)); - EXPECT(actual.exists(5)); - EXPECT(!actual.exists(6)); - - // Check values - EXPECT(assert_equal(Vector_(1, 1.0), actual[0])); - EXPECT(assert_equal(Vector_(2, 2.0, 3.0), actual[1])); - EXPECT(assert_equal(Vector_(2, 4.0, 5.0), actual[2])); - EXPECT(assert_equal(Vector_(2, 6.0, 7.0), actual[5])); - EXPECT(assert_equal(Vector_(7, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0), actual.asVector())); - - // Check exceptions - CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument); -} - -/* ************************************************************************* */ -TEST(VectorValues, SameStructure) { - // insert, with out-of-order indices - VectorValues original; - original.insert(0, Vector_(1, 1.0)); - original.insert(1, Vector_(2, 2.0, 3.0)); - original.insert(5, Vector_(2, 6.0, 7.0)); - original.insert(2, Vector_(2, 4.0, 5.0)); - - VectorValues actual(VectorValues::SameStructure(original)); - - // Check dimensions - LONGS_EQUAL(6, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - LONGS_EQUAL(2, actual.dim(5)); - - // Logic - EXPECT(actual.exists(0)); - EXPECT(actual.exists(1)); - EXPECT(actual.exists(2)); - EXPECT(!actual.exists(3)); - EXPECT(!actual.exists(4)); - EXPECT(actual.exists(5)); - EXPECT(!actual.exists(6)); - - // Check exceptions - CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument); -} - -/* ************************************************************************* */ -TEST(VectorValues, Zero_fromModel) { - // insert, with out-of-order indices - VectorValues original; - original.insert(0, Vector_(1, 1.0)); - original.insert(1, Vector_(2, 2.0, 3.0)); - original.insert(5, Vector_(2, 6.0, 7.0)); - original.insert(2, Vector_(2, 4.0, 5.0)); - - VectorValues actual(VectorValues::Zero(original)); - - // Check dimensions - LONGS_EQUAL(6, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - LONGS_EQUAL(2, actual.dim(5)); - - // Values - EXPECT(assert_equal(Vector::Zero(1), actual[0])); - EXPECT(assert_equal(Vector::Zero(2), actual[1])); - EXPECT(assert_equal(Vector::Zero(2), actual[5])); - EXPECT(assert_equal(Vector::Zero(2), actual[2])); - - // Logic - EXPECT(actual.exists(0)); - EXPECT(actual.exists(1)); - EXPECT(actual.exists(2)); - EXPECT(!actual.exists(3)); - EXPECT(!actual.exists(4)); - EXPECT(actual.exists(5)); - EXPECT(!actual.exists(6)); - - // Check exceptions - CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument); -} - -/* ************************************************************************* */ -TEST(VectorValues, Zero_fromDims) { - vector dims; - dims.push_back(1); - dims.push_back(2); - dims.push_back(2); - - VectorValues actual(VectorValues::Zero(dims)); - - // Check dimensions - LONGS_EQUAL(3, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - - // Values - EXPECT(assert_equal(Vector::Zero(1), actual[0])); - EXPECT(assert_equal(Vector::Zero(2), actual[1])); - EXPECT(assert_equal(Vector::Zero(2), actual[2])); -} - -/* ************************************************************************* */ -TEST(VectorValues, Zero_fromUniform) { - VectorValues actual(VectorValues::Zero(3, 2)); - - // Check dimensions - LONGS_EQUAL(3, actual.size()); - LONGS_EQUAL(2, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - - // Values - EXPECT(assert_equal(Vector::Zero(2), actual[0])); - EXPECT(assert_equal(Vector::Zero(2), actual[1])); - EXPECT(assert_equal(Vector::Zero(2), actual[2])); -} - -/* ************************************************************************* */ -TEST(VectorValues, resizeLike) { - // insert, with out-of-order indices - VectorValues original; - original.insert(0, Vector_(1, 1.0)); - original.insert(1, Vector_(2, 2.0, 3.0)); - original.insert(5, Vector_(2, 6.0, 7.0)); - original.insert(2, Vector_(2, 4.0, 5.0)); - - VectorValues actual(10, 3); - actual.resizeLike(original); - - // Check dimensions - LONGS_EQUAL(6, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - LONGS_EQUAL(2, actual.dim(5)); - - // Logic - EXPECT(actual.exists(0)); - EXPECT(actual.exists(1)); - EXPECT(actual.exists(2)); - EXPECT(!actual.exists(3)); - EXPECT(!actual.exists(4)); - EXPECT(actual.exists(5)); - EXPECT(!actual.exists(6)); - - // Check exceptions - CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument); -} - -/* ************************************************************************* */ -TEST(VectorValues, resize_fromUniform) { - VectorValues actual(4, 10); - actual.resize(3, 2); - - actual[0] = Vector_(2, 1.0, 2.0); - actual[1] = Vector_(2, 2.0, 3.0); - actual[2] = Vector_(2, 4.0, 5.0); - - // Check dimensions - LONGS_EQUAL(3, actual.size()); - LONGS_EQUAL(2, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - - // Check values - EXPECT(assert_equal(Vector_(2, 1.0, 2.0), actual[0])); - EXPECT(assert_equal(Vector_(2, 2.0, 3.0), actual[1])); - EXPECT(assert_equal(Vector_(2, 4.0, 5.0), actual[2])); - EXPECT(assert_equal(Vector_(6, 1.0, 2.0, 2.0, 3.0, 4.0, 5.0), actual.asVector())); -} - -/* ************************************************************************* */ -TEST(VectorValues, resize_fromDims) { - vector dims; - dims.push_back(1); - dims.push_back(2); - dims.push_back(2); - - VectorValues actual(4, 10); - actual.resize(dims); - actual[0] = Vector_(1, 1.0); - actual[1] = Vector_(2, 2.0, 3.0); - actual[2] = Vector_(2, 4.0, 5.0); - - // Check dimensions - LONGS_EQUAL(3, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - - // Check values - EXPECT(assert_equal(Vector_(1, 1.0), actual[0])); - EXPECT(assert_equal(Vector_(2, 2.0, 3.0), actual[1])); - EXPECT(assert_equal(Vector_(2, 4.0, 5.0), actual[2])); - EXPECT(assert_equal(Vector_(5, 1.0, 2.0, 3.0, 4.0, 5.0), actual.asVector())); -} - -/* ************************************************************************* */ -TEST(VectorValues, append) { - // insert - VectorValues actual; - actual.insert(0, Vector_(1, 1.0)); - actual.insert(1, Vector_(2, 2.0, 3.0)); - actual.insert(2, Vector_(2, 4.0, 5.0)); - - // append - vector dims(2); - dims[0] = 3; - dims[1] = 5; - actual.append(dims); - - // Check dimensions - LONGS_EQUAL(5, actual.size()); - LONGS_EQUAL(1, actual.dim(0)); - LONGS_EQUAL(2, actual.dim(1)); - LONGS_EQUAL(2, actual.dim(2)); - LONGS_EQUAL(3, actual.dim(3)); - LONGS_EQUAL(5, actual.dim(4)); - - // Logic - EXPECT(actual.exists(0)); - EXPECT(actual.exists(1)); - EXPECT(actual.exists(2)); - EXPECT(actual.exists(3)); - EXPECT(actual.exists(4)); - EXPECT(!actual.exists(5)); - - // Check values - EXPECT(assert_equal(Vector_(1, 1.0), actual[0])); - EXPECT(assert_equal(Vector_(2, 2.0, 3.0), actual[1])); - EXPECT(assert_equal(Vector_(2, 4.0, 5.0), actual[2])); - - // Check exceptions - CHECK_EXCEPTION(actual.insert(3, Vector()), invalid_argument); -} - -/* ************************************************************************* */ -TEST(VectorValues, append_withValuesFromVector) { - // Constructor with initial values - vector dims(3); - dims[0] = 1; - dims[1] = 2; - dims[2] = 2; - Vector d = Vector_(5, 1.0, 2.0, 3.0, 4.0, 5.0); - VectorValues actual(d, dims); - - VectorValues expected; - expected.insert(0, Vector_(1, 1.0)); - expected.insert(1, Vector_(2, 2.0, 3.0)); - expected.insert(2, Vector_(2, 4.0, 5.0)); - - EXPECT(assert_equal(expected, actual)); - - // Append with initial values - expected.insert(3, Vector_(1, 6.0)); - expected.insert(4, Vector_(3, 7.0, 8.0, 9.0)); - - vector dims2(2); - dims2[0] = 1; - dims2[1] = 3; - Vector d2 = Vector_(4, 6.0, 7.0, 8.0, 9.0); - actual.append(d2, dims2); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST(VectorValues, hasSameStructure) { - VectorValues v1(2, 3); - VectorValues v2(3, 2); - VectorValues v3(4, 2); - VectorValues v4(4, 2); - - EXPECT(!v1.hasSameStructure(v2)); - EXPECT(!v2.hasSameStructure(v3)); - EXPECT(v3.hasSameStructure(v4)); - EXPECT(VectorValues().hasSameStructure(VectorValues())); - EXPECT(!v1.hasSameStructure(VectorValues())); -} - - -/* ************************************************************************* */ -TEST(VectorValues, permute) { - - VectorValues original; - original.insert(0, Vector_(1, 1.0)); - original.insert(1, Vector_(2, 2.0, 3.0)); - original.insert(2, Vector_(2, 4.0, 5.0)); - original.insert(3, Vector_(2, 6.0, 7.0)); - - VectorValues expected; - expected.insert(0, Vector_(2, 4.0, 5.0)); // from 2 - expected.insert(1, Vector_(1, 1.0)); // from 0 - expected.insert(2, Vector_(2, 6.0, 7.0)); // from 3 - expected.insert(3, Vector_(2, 2.0, 3.0)); // from 1 - - Permutation permutation(4); - permutation[0] = 2; - permutation[1] = 0; - permutation[2] = 3; - permutation[3] = 1; - - VectorValues actual = original; - actual.permuteInPlace(permutation); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST(VectorValues, subvector) { - VectorValues init; - init.insert(0, Vector_(1, 1.0)); - init.insert(1, Vector_(2, 2.0, 3.0)); - init.insert(2, Vector_(2, 4.0, 5.0)); - init.insert(3, Vector_(2, 6.0, 7.0)); - - std::vector indices; - indices += 0, 2, 3; - Vector expSubVector = Vector_(5, 1.0, 4.0, 5.0, 6.0, 7.0); - EXPECT(assert_equal(expSubVector, init.vector(indices))); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ diff --git a/gtsam/linear/tests/testVectorValuesUnordered.cpp b/gtsam/linear/tests/testVectorValuesUnordered.cpp new file mode 100644 index 000000000..a651cdf72 --- /dev/null +++ b/gtsam/linear/tests/testVectorValuesUnordered.cpp @@ -0,0 +1,163 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testVectorValues.cpp + * @author Richard Roberts + * @date Sep 16, 2010 + */ + +#include + +#include +#include + +#include + +using namespace std; +using namespace boost::assign; +using namespace gtsam; + +/* ************************************************************************* */ +TEST(VectorValues, basics) +{ + // Tests insert(), size(), dim(), exists(), vector() + + // insert + VectorValues actual; + actual.insert(0, Vector_(1, 1.0)); + actual.insert(1, Vector_(2, 2.0, 3.0)); + actual.insert(5, Vector_(2, 6.0, 7.0)); + actual.insert(2, Vector_(2, 4.0, 5.0)); + + // Check dimensions + LONGS_EQUAL(4, actual.size()); + LONGS_EQUAL(1, actual.dim(0)); + LONGS_EQUAL(2, actual.dim(1)); + LONGS_EQUAL(2, actual.dim(2)); + LONGS_EQUAL(2, actual.dim(5)); + + // Logic + EXPECT(actual.exists(0)); + EXPECT(actual.exists(1)); + EXPECT(actual.exists(2)); + EXPECT(!actual.exists(3)); + EXPECT(!actual.exists(4)); + EXPECT(actual.exists(5)); + EXPECT(!actual.exists(6)); + + // Check values + EXPECT(assert_equal(Vector_(1, 1.0), actual[0])); + EXPECT(assert_equal(Vector_(2, 2.0, 3.0), actual[1])); + EXPECT(assert_equal(Vector_(2, 4.0, 5.0), actual[2])); + EXPECT(assert_equal(Vector_(2, 6.0, 7.0), actual[5])); + EXPECT(assert_equal(Vector_(7, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0), actual.vector())); + + // Check exceptions + CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument); + CHECK_EXCEPTION(actual.dim(3), out_of_range); +} + +/* ************************************************************************* */ +TEST(VectorValues, combine) +{ + VectorValues expected; + expected.insert(0, Vector_(1, 1.0)); + expected.insert(1, Vector_(2, 2.0, 3.0)); + expected.insert(5, Vector_(2, 6.0, 7.0)); + expected.insert(2, Vector_(2, 4.0, 5.0)); + + VectorValues first; + first.insert(0, Vector_(1, 1.0)); + first.insert(1, Vector_(2, 2.0, 3.0)); + + VectorValues second; + second.insert(5, Vector_(2, 6.0, 7.0)); + second.insert(2, Vector_(2, 4.0, 5.0)); + + VectorValues actual(first, second); + + EXPECT(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +TEST(VectorValues, subvector) +{ + VectorValues init; + init.insert(10, Vector_(1, 1.0)); + init.insert(11, Vector_(2, 2.0, 3.0)); + init.insert(12, Vector_(2, 4.0, 5.0)); + init.insert(13, Vector_(2, 6.0, 7.0)); + + std::vector keys; + keys += 10, 12, 13; + Vector expSubVector = Vector_(5, 1.0, 4.0, 5.0, 6.0, 7.0); + EXPECT(assert_equal(expSubVector, init.vector(keys))); +} + +/* ************************************************************************* */ +TEST(VectorValues, LinearAlgebra) +{ + VectorValues test1; + test1.insert(0, Vector_(1, 1.0)); + test1.insert(1, Vector_(2, 2.0, 3.0)); + test1.insert(5, Vector_(2, 6.0, 7.0)); + test1.insert(2, Vector_(2, 4.0, 5.0)); + + VectorValues test2; + test2.insert(0, Vector_(1, 6.0)); + test2.insert(1, Vector_(2, 1.0, 6.0)); + test2.insert(5, Vector_(2, 4.0, 3.0)); + test2.insert(2, Vector_(2, 1.0, 8.0)); + + // Dot product + double dotExpected = test1.vector().dot(test2.vector()); + double dotActual = test1.dot(test2); + DOUBLES_EQUAL(dotExpected, dotActual, 1e-10); + + // Norm + double normExpected = test1.vector().norm(); + double normActual = test1.norm(); + DOUBLES_EQUAL(normExpected, normActual, 1e-10); + + // Squared norm + double sqNormExpected = test1.vector().norm(); + double sqNormActual = test1.norm(); + DOUBLES_EQUAL(sqNormExpected, sqNormActual, 1e-10); + + // Addition + Vector sumExpected = test1.vector() + test2.vector(); + VectorValues sumActual = test1 + test2; + EXPECT(sumActual.hasSameStructure(test1)); + EXPECT(assert_equal(sumExpected, sumActual.vector())); + Vector sum2Expected = sumActual.vector() + test1.vector(); + VectorValues sum2Actual = sumActual; + sum2Actual += test1; + EXPECT(assert_equal(sum2Expected, sum2Actual.vector())); + + // Subtraction + Vector difExpected = test1.vector() - test2.vector(); + VectorValues difActual = test1 - test2; + EXPECT(difActual.hasSameStructure(test1)); + EXPECT(assert_equal(difExpected, difActual.vector())); + + // Scaling + Vector scalExpected = test1.vector() * 5.0; + VectorValues scalActual = test1; + scalActual *= 5.0; + EXPECT(assert_equal(scalExpected, scalActual.vector())); + VectorValues scal2Actual = 5.0 * test1; + EXPECT(assert_equal(scalExpected, scal2Actual.vector())); +} + +/* ************************************************************************* */ +int main() { TestResult tr; return TestRegistry::runAllTests(tr); } +/* ************************************************************************* */ diff --git a/gtsam/linear/tests/timeSLAMlike.cpp b/gtsam/linear/tests/timeSLAMlike.cpp index 340e16a79..479c03940 100644 --- a/gtsam/linear/tests/timeSLAMlike.cpp +++ b/gtsam/linear/tests/timeSLAMlike.cpp @@ -18,8 +18,6 @@ #include #include -#include -#include #include #include diff --git a/gtsam/navigation/tests/testEquivInertialNavFactor_GlobalVel.cpp b/gtsam/navigation/tests/testEquivInertialNavFactor_GlobalVel.cpp index 6b5eb29f0..752f1887a 100644 --- a/gtsam/navigation/tests/testEquivInertialNavFactor_GlobalVel.cpp +++ b/gtsam/navigation/tests/testEquivInertialNavFactor_GlobalVel.cpp @@ -19,7 +19,7 @@ #include #include #include -#include +#include #include #include #include diff --git a/gtsam/navigation/tests/testInertialNavFactor_GlobalVelocity.cpp b/gtsam/navigation/tests/testInertialNavFactor_GlobalVelocity.cpp index 6622e8bf9..60988980c 100644 --- a/gtsam/navigation/tests/testInertialNavFactor_GlobalVelocity.cpp +++ b/gtsam/navigation/tests/testInertialNavFactor_GlobalVelocity.cpp @@ -21,9 +21,10 @@ #include #include #include -#include +#include #include #include +#include using namespace std; using namespace gtsam; diff --git a/gtsam/nonlinear/DoglegOptimizer.cpp b/gtsam/nonlinear/DoglegOptimizer.cpp index 6356a4c27..03da76eb6 100644 --- a/gtsam/nonlinear/DoglegOptimizer.cpp +++ b/gtsam/nonlinear/DoglegOptimizer.cpp @@ -17,10 +17,11 @@ */ #include - -#include -#include #include +#include +#include +#include +#include #include @@ -53,8 +54,7 @@ std::string DoglegParams::verbosityDLTranslator(VerbosityDL verbosityDL) const { void DoglegOptimizer::iterate(void) { // Linearize graph - const Ordering& ordering = *params_.ordering; - GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values, ordering); + GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values); // Pull out parameters we'll use const bool dlVerbose = (params_.verbosityDL > DoglegParams::SILENT); @@ -63,16 +63,21 @@ void DoglegOptimizer::iterate(void) { DoglegOptimizerImpl::IterationResult result; if ( params_.isMultifrontal() ) { - GaussianBayesTree bt; - bt.insert(GaussianJunctionTree(*linear).eliminate(params_.getEliminationFunction())); - result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, bt, graph_, state_.values, ordering, state_.error, dlVerbose); + GaussianBayesTree bt = *linear->eliminateMultifrontal(*params_.ordering, params_.getEliminationFunction()); + VectorValues dx_u = bt.optimizeGradientSearch(); + VectorValues dx_n = bt.optimize(); + result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, + dx_u, dx_n, bt, graph_, state_.values, state_.error, dlVerbose); } else if ( params_.isSequential() ) { - GaussianBayesNet::shared_ptr bn = EliminationTree::Create(*linear)->eliminate(params_.getEliminationFunction()); - result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *bn, graph_, state_.values, ordering, state_.error, dlVerbose); + GaussianBayesNet bn = *linear->eliminateSequential(*params_.ordering, params_.getEliminationFunction()); + VectorValues dx_u = bn.optimizeGradientSearch(); + VectorValues dx_n = bn.optimize(); + result = DoglegOptimizerImpl::Iterate(state_.Delta, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, + dx_u, dx_n, bn, graph_, state_.values, state_.error, dlVerbose); } else if ( params_.isCG() ) { - throw runtime_error("todo: "); + throw runtime_error("Dogleg is not currently compatible with the linear conjugate gradient solver"); } else { throw runtime_error("Optimization parameter is invalid: DoglegParams::elimination"); @@ -82,10 +87,17 @@ void DoglegOptimizer::iterate(void) { if(params_.verbosity >= NonlinearOptimizerParams::DELTA) result.dx_d.print("delta"); // Create new state with new values and new error - state_.values = state_.values.retract(result.dx_d, ordering); + state_.values = state_.values.retract(result.dx_d); state_.error = result.f_error; state_.Delta = result.Delta; ++state_.iterations; } +/* ************************************************************************* */ +DoglegParams DoglegOptimizer::ensureHasOrdering(DoglegParams params, const NonlinearFactorGraph& graph) const { + if(!params.ordering) + params.ordering = Ordering::COLAMD(graph); + return params; +} + } /* namespace gtsam */ diff --git a/gtsam/nonlinear/DoglegOptimizer.h b/gtsam/nonlinear/DoglegOptimizer.h index 86a8b5570..41028d018 100644 --- a/gtsam/nonlinear/DoglegOptimizer.h +++ b/gtsam/nonlinear/DoglegOptimizer.h @@ -105,7 +105,7 @@ public: */ DoglegOptimizer(const NonlinearFactorGraph& graph, const Values& initialValues, const DoglegParams& params = DoglegParams()) : - NonlinearOptimizer(graph), params_(ensureHasOrdering(params, graph, initialValues)), state_(graph, initialValues, params_) {} + NonlinearOptimizer(graph), params_(ensureHasOrdering(params, graph)), state_(graph, initialValues, params_) {} /** Standard constructor, requires a nonlinear factor graph, initial * variable assignments, and optimization parameters. For convenience this @@ -158,11 +158,7 @@ protected: virtual const NonlinearOptimizerState& _state() const { return state_; } /** Internal function for computing a COLAMD ordering if no ordering is specified */ - DoglegParams ensureHasOrdering(DoglegParams params, const NonlinearFactorGraph& graph, const Values& values) const { - if(!params.ordering) - params.ordering = *graph.orderingCOLAMD(values); - return params; - } + DoglegParams ensureHasOrdering(DoglegParams params, const NonlinearFactorGraph& graph) const; }; } diff --git a/gtsam/nonlinear/DoglegOptimizerImpl.h b/gtsam/nonlinear/DoglegOptimizerImpl.h index ae659f092..c0b52f88b 100644 --- a/gtsam/nonlinear/DoglegOptimizerImpl.h +++ b/gtsam/nonlinear/DoglegOptimizerImpl.h @@ -18,10 +18,8 @@ #include -#include -#include // To get optimize(BayesTree) -//#include -#include +#include +#include namespace gtsam { @@ -102,8 +100,8 @@ struct GTSAM_EXPORT DoglegOptimizerImpl { */ template static IterationResult Iterate( - double Delta, TrustRegionAdaptationMode mode, const M& Rd, - const F& f, const VALUES& x0, const Ordering& ordering, const double f_error, const bool verbose=false); + double Delta, TrustRegionAdaptationMode mode, const VectorValues& dx_u, const VectorValues& dx_n, + const M& Rd, const F& f, const VALUES& x0, const double f_error, const bool verbose=false); /** * Compute the dogleg point given a trust region radius \f$ \Delta \f$. The @@ -145,26 +143,12 @@ struct GTSAM_EXPORT DoglegOptimizerImpl { /* ************************************************************************* */ template typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( - double Delta, TrustRegionAdaptationMode mode, const M& Rd, - const F& f, const VALUES& x0, const Ordering& ordering, const double f_error, const bool verbose) { - - // Compute steepest descent and Newton's method points - gttic(optimizeGradientSearch); - gttic(allocateVectorValues); - VectorValues dx_u = *allocateVectorValues(Rd); - gttoc(allocateVectorValues); - gttic(optimizeGradientSearchInPlace); - optimizeGradientSearchInPlace(Rd, dx_u); - gttoc(optimizeGradientSearchInPlace); - gttoc(optimizeGradientSearch); - gttic(optimizeInPlace); - VectorValues dx_n(VectorValues::SameStructure(dx_u)); - optimizeInPlace(Rd, dx_n); - gttoc(optimizeInPlace); - gttic(jfg_error); - const GaussianFactorGraph jfg(Rd); - const double M_error = jfg.error(VectorValues::Zero(dx_u)); - gttoc(jfg_error); + double Delta, TrustRegionAdaptationMode mode, const VectorValues& dx_u, const VectorValues& dx_n, + const M& Rd, const F& f, const VALUES& x0, const double f_error, const bool verbose) +{ + gttic(M_error); + const double M_error = Rd.error(VectorValues::Zero(dx_u)); + gttoc(M_error); // Result to return IterationResult result; @@ -181,7 +165,7 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( gttic(retract); // Compute expmapped solution - const VALUES x_d(x0.retract(result.dx_d, ordering)); + const VALUES x_d(x0.retract(result.dx_d)); gttoc(retract); gttic(decrease_in_f); @@ -189,10 +173,10 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate( result.f_error = f.error(x_d); gttoc(decrease_in_f); - gttic(decrease_in_M); + gttic(new_M_error); // Compute decrease in M - const double new_M_error = jfg.error(result.dx_d); - gttoc(decrease_in_M); + const double new_M_error = Rd.error(result.dx_d); + gttoc(new_M_error); if(verbose) std::cout << std::setprecision(15) << "f error: " << f_error << " -> " << result.f_error << std::endl; if(verbose) std::cout << std::setprecision(15) << "M error: " << M_error << " -> " << new_M_error << std::endl; diff --git a/gtsam/nonlinear/ExtendedKalmanFilter-inl.h b/gtsam/nonlinear/ExtendedKalmanFilter-inl.h index e56bc3cda..ef9ad234c 100644 --- a/gtsam/nonlinear/ExtendedKalmanFilter-inl.h +++ b/gtsam/nonlinear/ExtendedKalmanFilter-inl.h @@ -20,7 +20,6 @@ #include #include -#include #include #include @@ -29,45 +28,40 @@ namespace gtsam { /* ************************************************************************* */ template typename ExtendedKalmanFilter::T ExtendedKalmanFilter::solve_( - const GaussianFactorGraph& linearFactorGraph, const Ordering& ordering, + const GaussianFactorGraph& linearFactorGraph, const Values& linearizationPoints, Key lastKey, - JacobianFactor::shared_ptr& newPrior) const { - - // Extract the Index of the provided last key - gtsam::Index lastIndex = ordering.at(lastKey); - + JacobianFactor::shared_ptr& newPrior) const + { + // Compute the marginal on the last key // Solve the linear factor graph, converting it into a linear Bayes Network // P(x0,x1) = P(x0|x1)*P(x1) - GaussianSequentialSolver solver(linearFactorGraph); - GaussianBayesNet::shared_ptr linearBayesNet = solver.eliminate(); + Ordering lastKeyAsOrdering; + lastKeyAsOrdering += lastKey; + const GaussianConditional::shared_ptr marginal = + linearFactorGraph.marginalMultifrontalBayesNet(lastKeyAsOrdering)->front(); - // Extract the current estimate of x1,P1 from the Bayes Network - VectorValues result = optimize(*linearBayesNet); - T x = linearizationPoints.at(lastKey).retract(result[lastIndex]); + // Extract the current estimate of x1,P1 + VectorValues result = marginal->solve(VectorValues()); + T x = linearizationPoints.at(lastKey).retract(result[lastKey]); // Create a Jacobian Factor from the root node of the produced Bayes Net. // This will act as a prior for the next iteration. // The linearization point of this prior must be moved to the new estimate of x, // and the key/index needs to be reset to 0, the first key in the next iteration. - const GaussianConditional::shared_ptr& cg = linearBayesNet->back(); - assert(cg->nrFrontals() == 1); - assert(cg->nrParents() == 0); - // TODO: Find a way to create the correct Jacobian Factor in a single pass - JacobianFactor tmpPrior = JacobianFactor(*cg); - newPrior = JacobianFactor::shared_ptr( - new JacobianFactor( - 0, - tmpPrior.getA(tmpPrior.begin()), - tmpPrior.getb() - - tmpPrior.getA(tmpPrior.begin()) * result[lastIndex], - tmpPrior.get_model())); + assert(marginal->nrFrontals() == 1); + assert(marginal->nrParents() == 0); + newPrior = boost::make_shared( + marginal->keys().front(), + marginal->getA(marginal->begin()), + marginal->getb() - marginal->getA(marginal->begin()) * result[lastKey], + marginal->get_model()); return x; } /* ************************************************************************* */ template - ExtendedKalmanFilter::ExtendedKalmanFilter(T x_initial, + ExtendedKalmanFilter::ExtendedKalmanFilter(Key key_initial, T x_initial, noiseModel::Gaussian::shared_ptr P_initial) { // Set the initial linearization point to the provided mean @@ -77,7 +71,7 @@ namespace gtsam { // Since x0 is set to the provided mean, the b vector in the prior will be zero // TODO Frank asks: is there a reason why noiseModel is not simply P_initial ? priorFactor_ = JacobianFactor::shared_ptr( - new JacobianFactor(0, P_initial->R(), Vector::Zero(x_initial.dim()), + new JacobianFactor(key_initial, P_initial->R(), Vector::Zero(x_initial.dim()), noiseModel::Unit::Create(P_initial->dim()))); } @@ -94,11 +88,6 @@ namespace gtsam { Key x0 = motionFactor.key1(); Key x1 = motionFactor.key2(); - // Create an elimination ordering - Ordering ordering; - ordering.insert(x0, 0); - ordering.insert(x1, 1); - // Create a set of linearization points Values linearizationPoints; linearizationPoints.insert(x0, x_); @@ -112,11 +101,11 @@ namespace gtsam { // Linearize motion model and add it to the Kalman Filter graph linearFactorGraph.push_back( - motionFactor.linearize(linearizationPoints, ordering)); + motionFactor.linearize(linearizationPoints)); // Solve the factor graph and update the current state estimate // and the posterior for the next iteration. - x_ = solve_(linearFactorGraph, ordering, linearizationPoints, x1, priorFactor_); + x_ = solve_(linearFactorGraph, linearizationPoints, x1, priorFactor_); return x_; } @@ -133,10 +122,6 @@ namespace gtsam { // Create Keys Key x0 = measurementFactor.key(); - // Create an elimination ordering - Ordering ordering; - ordering.insert(x0, 0); - // Create a set of linearization points Values linearizationPoints; linearizationPoints.insert(x0, x_); @@ -149,11 +134,11 @@ namespace gtsam { // Linearize measurement factor and add it to the Kalman Filter graph linearFactorGraph.push_back( - measurementFactor.linearize(linearizationPoints, ordering)); + measurementFactor.linearize(linearizationPoints)); // Solve the factor graph and update the current state estimate // and the prior factor for the next iteration - x_ = solve_(linearFactorGraph, ordering, linearizationPoints, x0, priorFactor_); + x_ = solve_(linearFactorGraph, linearizationPoints, x0, priorFactor_); return x_; } diff --git a/gtsam/nonlinear/ExtendedKalmanFilter.h b/gtsam/nonlinear/ExtendedKalmanFilter.h index a71a5332a..7bbd14980 100644 --- a/gtsam/nonlinear/ExtendedKalmanFilter.h +++ b/gtsam/nonlinear/ExtendedKalmanFilter.h @@ -54,7 +54,7 @@ namespace gtsam { JacobianFactor::shared_ptr priorFactor_; // density T solve_(const GaussianFactorGraph& linearFactorGraph, - const Ordering& ordering, const Values& linearizationPoints, + const Values& linearizationPoints, Key x, JacobianFactor::shared_ptr& newPrior) const; public: @@ -62,7 +62,7 @@ namespace gtsam { /// @name Standard Constructors /// @{ - ExtendedKalmanFilter(T x_initial, + ExtendedKalmanFilter(Key key_initial, T x_initial, noiseModel::Gaussian::shared_ptr P_initial); /// @} diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.cpp b/gtsam/nonlinear/GaussNewtonOptimizer.cpp index 06e9ab148..e0e66adab 100644 --- a/gtsam/nonlinear/GaussNewtonOptimizer.cpp +++ b/gtsam/nonlinear/GaussNewtonOptimizer.cpp @@ -17,6 +17,8 @@ */ #include +#include +#include using namespace std; @@ -28,7 +30,7 @@ void GaussNewtonOptimizer::iterate() { const NonlinearOptimizerState& current = state_; // Linearize graph - GaussianFactorGraph::shared_ptr linear = graph_.linearize(current.values, *params_.ordering); + GaussianFactorGraph::shared_ptr linear = graph_.linearize(current.values); // Solve Factor Graph const VectorValues delta = solveGaussianFactorGraph(*linear, params_); @@ -37,9 +39,18 @@ void GaussNewtonOptimizer::iterate() { if(params_.verbosity >= NonlinearOptimizerParams::DELTA) delta.print("delta"); // Create new state with new values and new error - state_.values = current.values.retract(delta, *params_.ordering); + state_.values = current.values.retract(delta); state_.error = graph_.error(state_.values); ++ state_.iterations; } +/* ************************************************************************* */ +GaussNewtonParams GaussNewtonOptimizer::ensureHasOrdering( + GaussNewtonParams params, const NonlinearFactorGraph& graph) const +{ + if(!params.ordering) + params.ordering = Ordering::COLAMD(graph); + return params; +} + } /* namespace gtsam */ diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.h b/gtsam/nonlinear/GaussNewtonOptimizer.h index 67daf7305..1d267071c 100644 --- a/gtsam/nonlinear/GaussNewtonOptimizer.h +++ b/gtsam/nonlinear/GaussNewtonOptimizer.h @@ -61,7 +61,7 @@ public: */ GaussNewtonOptimizer(const NonlinearFactorGraph& graph, const Values& initialValues, const GaussNewtonParams& params = GaussNewtonParams()) : - NonlinearOptimizer(graph), params_(ensureHasOrdering(params, graph, initialValues)), state_(graph, initialValues) {} + NonlinearOptimizer(graph), params_(ensureHasOrdering(params, graph)), state_(graph, initialValues) {} /** Standard constructor, requires a nonlinear factor graph, initial * variable assignments, and optimization parameters. For convenience this @@ -110,11 +110,7 @@ protected: virtual const NonlinearOptimizerState& _state() const { return state_; } /** Internal function for computing a COLAMD ordering if no ordering is specified */ - GaussNewtonParams ensureHasOrdering(GaussNewtonParams params, const NonlinearFactorGraph& graph, const Values& values) const { - if(!params.ordering) - params.ordering = *graph.orderingCOLAMD(values); - return params; - } + GaussNewtonParams ensureHasOrdering(GaussNewtonParams params, const NonlinearFactorGraph& graph) const; }; diff --git a/gtsam/nonlinear/ISAM2-impl.cpp b/gtsam/nonlinear/ISAM2-impl.cpp index 9abd62016..92f6425dc 100644 --- a/gtsam/nonlinear/ISAM2-impl.cpp +++ b/gtsam/nonlinear/ISAM2-impl.cpp @@ -30,137 +30,60 @@ namespace gtsam { /* ************************************************************************* */ void ISAM2::Impl::AddVariables( const Values& newTheta, Values& theta, VectorValues& delta, - VectorValues& deltaNewton, VectorValues& RgProd, vector& replacedKeys, - Ordering& ordering, const KeyFormatter& keyFormatter) { + VectorValues& deltaNewton, VectorValues& RgProd, + const KeyFormatter& keyFormatter) +{ const bool debug = ISDEBUG("ISAM2 AddVariables"); theta.insert(newTheta); if(debug) newTheta.print("The new variables are: "); - // Add the new keys onto the ordering, add zeros to the delta for the new variables - std::vector dims(newTheta.dims(*newTheta.orderingArbitrary())); - if(debug) cout << "New variables have total dimensionality " << accumulate(dims.begin(), dims.end(), 0) << endl; - const size_t originalnVars = delta.size(); - delta.append(dims); - deltaNewton.append(dims); - RgProd.append(dims); - for(Index j = originalnVars; j < delta.size(); ++j) { - delta[j].setZero(); - deltaNewton[j].setZero(); - RgProd[j].setZero(); - } - { - Index nextVar = originalnVars; - BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, newTheta) { - ordering.insert(key_value.key, nextVar); - if(debug) cout << "Adding variable " << keyFormatter(key_value.key) << " with order " << nextVar << endl; - ++ nextVar; - } - assert(ordering.size() == delta.size()); - } - replacedKeys.resize(ordering.size(), false); + // Add zeros into the VectorValues + delta.insert(newTheta.zeroVectors()); + deltaNewton.insert(newTheta.zeroVectors()); + RgProd.insert(newTheta.zeroVectors()); } /* ************************************************************************* */ -void ISAM2::Impl::RemoveVariables(const FastSet& unusedKeys, const ISAM2Clique::shared_ptr& root, +void ISAM2::Impl::RemoveVariables(const FastSet& unusedKeys, const std::vector& roots, Values& theta, VariableIndex& variableIndex, VectorValues& delta, VectorValues& deltaNewton, VectorValues& RgProd, - std::vector& replacedKeys, Ordering& ordering, Base::Nodes& nodes, - GaussianFactorGraph& linearFactors, FastSet& fixedVariables) { - - // Get indices of unused keys - vector unusedIndices; unusedIndices.reserve(unusedKeys.size()); - BOOST_FOREACH(Key key, unusedKeys) { unusedIndices.push_back(ordering[key]); } - - // Create a permutation that shifts the unused variables to the end - Permutation unusedToEnd = Permutation::PushToBack(unusedIndices, delta.size()); - Permutation unusedToEndInverse = *unusedToEnd.inverse(); - - // Use the permutation to remove unused variables while shifting all the others to take up the space - variableIndex.permuteInPlace(unusedToEnd); - variableIndex.removeUnusedAtEnd(unusedIndices.size()); - { - // Create a vector of variable dimensions with the unused ones removed - // by applying the unusedToEnd permutation to the original vector of - // variable dimensions. We only allocate space in the shifted dims - // vector for the used variables, so that the unused ones are dropped - // when the permutation is applied. - vector originalDims = delta.dims(); - vector dims(delta.size() - unusedIndices.size()); - unusedToEnd.applyToCollection(dims, originalDims); - - // Copy from the old data structures to new ones, only iterating up to - // the number of used variables, and applying the unusedToEnd permutation - // in order to remove the unused variables. - VectorValues newDelta(dims); - VectorValues newDeltaNewton(dims); - VectorValues newDeltaGradSearch(dims); - std::vector newReplacedKeys(replacedKeys.size() - unusedIndices.size()); - Base::Nodes newNodes(replacedKeys.size() - unusedIndices.size()); - - for(size_t j = 0; j < dims.size(); ++j) { - newDelta[j] = delta[unusedToEnd[j]]; - newDeltaNewton[j] = deltaNewton[unusedToEnd[j]]; - newDeltaGradSearch[j] = RgProd[unusedToEnd[j]]; - newReplacedKeys[j] = replacedKeys[unusedToEnd[j]]; - newNodes[j] = nodes[unusedToEnd[j]]; - } - - // Swap the new data structures with the outputs of this function - delta.swap(newDelta); - deltaNewton.swap(newDeltaNewton); - RgProd.swap(newDeltaGradSearch); - replacedKeys.swap(newReplacedKeys); - nodes.swap(newNodes); - } - - // Reorder and remove from ordering, solution, and fixed keys - ordering.permuteInPlace(unusedToEnd); - BOOST_REVERSE_FOREACH(Key key, unusedKeys) { - Ordering::value_type removed = ordering.pop_back(); - assert(removed.first == key); - theta.erase(key); - fixedVariables.erase(key); - } - - // Finally, permute references to variables - if(root) - root->permuteWithInverse(unusedToEndInverse); - linearFactors.permuteWithInverse(unusedToEndInverse); -} - -/* ************************************************************************* */ -FastSet ISAM2::Impl::IndicesFromFactors(const Ordering& ordering, const NonlinearFactorGraph& factors) { - FastSet indices; - BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, factors) { - BOOST_FOREACH(Key key, factor->keys()) { - indices.insert(ordering[key]); - } + FastSet& replacedKeys, Base::Nodes& nodes, + FastSet& fixedVariables) +{ + variableIndex.removeUnusedVariables(unusedKeys.begin(), unusedKeys.end()); + BOOST_FOREACH(Key key, unusedKeys) { + delta.erase(key); + deltaNewton.erase(key); + RgProd.erase(key); + replacedKeys.erase(key); + nodes.erase(key); + theta.erase(key); + fixedVariables.erase(key); } - return indices; } /* ************************************************************************* */ -FastSet ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta, const Ordering& ordering, - const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) { +FastSet ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta, + const ISAM2Params::RelinearizationThreshold& relinearizeThreshold) +{ FastSet relinKeys; - if(relinearizeThreshold.type() == typeid(double)) { - double threshold = boost::get(relinearizeThreshold); - for(Index var=0; var(); - if(maxDelta >= threshold) { - relinKeys.insert(var); - } + if(const double* threshold = boost::get(&relinearizeThreshold)) + { + BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) { + double maxDelta = key_delta.second.lpNorm(); + if(maxDelta >= *threshold) + relinKeys.insert(key_delta.first); } - } else if(relinearizeThreshold.type() == typeid(FastMap)) { - const FastMap& thresholds = boost::get >(relinearizeThreshold); - BOOST_FOREACH(const Ordering::value_type& key_index, ordering) { - const Vector& threshold = thresholds.find(Symbol(key_index.first).chr())->second; - Index j = key_index.second; - if(threshold.rows() != delta[j].rows()) - throw std::invalid_argument("Relinearization threshold vector dimensionality passed into iSAM2 parameters does not match actual variable dimensionality"); - if((delta[j].array().abs() > threshold.array()).any()) - relinKeys.insert(j); + } + else if(const FastMap* thresholds = boost::get >(&relinearizeThreshold)) + { + BOOST_FOREACH(const VectorValues::KeyValuePair& key_delta, delta) { + const Vector& threshold = thresholds->find(Symbol(key_delta.first).chr())->second; + if(threshold.rows() != key_delta.second.rows()) + throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(key_delta.first).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality."); + if((key_delta.second.array().abs() > threshold.array()).any()) + relinKeys.insert(key_delta.first); } } @@ -168,11 +91,12 @@ FastSet ISAM2::Impl::CheckRelinearizationFull(const VectorValues& delta, } /* ************************************************************************* */ -void CheckRelinearizationRecursiveDouble(FastSet& relinKeys, double threshold, const VectorValues& delta, const ISAM2Clique::shared_ptr& clique) { - +void CheckRelinearizationRecursiveDouble(FastSet& relinKeys, double threshold, + const VectorValues& delta, const ISAM2Clique::shared_ptr& clique) +{ // Check the current clique for relinearization bool relinearize = false; - BOOST_FOREACH(Index var, clique->conditional()->keys()) { + BOOST_FOREACH(Key var, *clique->conditional()) { double maxDelta = delta[var].lpNorm(); if(maxDelta >= threshold) { relinKeys.insert(var); @@ -182,31 +106,31 @@ void CheckRelinearizationRecursiveDouble(FastSet& relinKeys, double thres // If this node was relinearized, also check its children if(relinearize) { - BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) { + BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) { CheckRelinearizationRecursiveDouble(relinKeys, threshold, delta, child); } } } /* ************************************************************************* */ -void CheckRelinearizationRecursiveMap(FastSet& relinKeys, const FastMap& thresholds, const VectorValues& delta, const Ordering& ordering, const ISAM2Clique::shared_ptr& clique) { - +void CheckRelinearizationRecursiveMap(FastSet& relinKeys, const FastMap& thresholds, + const VectorValues& delta, + const ISAM2Clique::shared_ptr& clique) +{ // Check the current clique for relinearization bool relinearize = false; - BOOST_FOREACH(Index var, clique->conditional()->keys()) { - - // Lookup the key associated with this index - Key key = ordering.key(var); - + BOOST_FOREACH(Key var, *clique->conditional()) { // Find the threshold for this variable type - const Vector& threshold = thresholds.find(Symbol(key).chr())->second; + const Vector& threshold = thresholds.find(Symbol(var).chr())->second; + + const Vector& deltaVar = delta[var]; // Verify the threshold vector matches the actual variable size - if(threshold.rows() != delta[var].rows()) - throw std::invalid_argument("Relinearization threshold vector dimensionality passed into iSAM2 parameters does not match actual variable dimensionality"); + if(threshold.rows() != deltaVar.rows()) + throw std::invalid_argument("Relinearization threshold vector dimensionality for '" + std::string(1, Symbol(var).chr()) + "' passed into iSAM2 parameters does not match actual variable dimensionality."); // Check for relinearization - if((delta[var].array().abs() > threshold.array()).any()) { + if((deltaVar.array().abs() > threshold.array()).any()) { relinKeys.insert(var); relinearize = true; } @@ -214,52 +138,54 @@ void CheckRelinearizationRecursiveMap(FastSet& relinKeys, const FastMapchildren()) { - CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, ordering, child); + BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) { + CheckRelinearizationRecursiveMap(relinKeys, thresholds, delta, child); } } } /* ************************************************************************* */ -FastSet ISAM2::Impl::CheckRelinearizationPartial(const ISAM2Clique::shared_ptr& root, const VectorValues& delta, const Ordering& ordering, - const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter) { - +FastSet ISAM2::Impl::CheckRelinearizationPartial(const std::vector& roots, + const VectorValues& delta, + const ISAM2Params::RelinearizationThreshold& relinearizeThreshold) +{ FastSet relinKeys; - - if(root) { - if(relinearizeThreshold.type() == typeid(double)) { + BOOST_FOREACH(const ISAM2::sharedClique& root, roots) { + if(relinearizeThreshold.type() == typeid(double)) CheckRelinearizationRecursiveDouble(relinKeys, boost::get(relinearizeThreshold), delta, root); - } else if(relinearizeThreshold.type() == typeid(FastMap)) { - CheckRelinearizationRecursiveMap(relinKeys, boost::get >(relinearizeThreshold), delta, ordering, root); - } + else if(relinearizeThreshold.type() == typeid(FastMap)) + CheckRelinearizationRecursiveMap(relinKeys, boost::get >(relinearizeThreshold), delta, root); } - return relinKeys; } /* ************************************************************************* */ -void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, FastSet& keys, const vector& markedMask) { +void ISAM2::Impl::FindAll(ISAM2Clique::shared_ptr clique, FastSet& keys, const FastSet& markedMask) +{ static const bool debug = false; // does the separator contain any of the variables? bool found = false; - BOOST_FOREACH(const Index& key, (*clique)->parents()) { - if (markedMask[key]) + BOOST_FOREACH(Key key, clique->conditional()->parents()) { + if (markedMask.exists(key)) { found = true; + break; + } } if (found) { // then add this clique - keys.insert((*clique)->beginFrontals(), (*clique)->endFrontals()); + keys.insert(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals()); if(debug) clique->print("Key(s) marked in clique "); - if(debug) cout << "so marking key " << (*clique)->keys().front() << endl; + if(debug) cout << "so marking key " << clique->conditional()->front() << endl; } - BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children_) { + BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) { FindAll(child, keys, markedMask); } } /* ************************************************************************* */ -void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const Ordering& ordering, - const vector& mask, boost::optional invalidateIfDebug, const KeyFormatter& keyFormatter) { +void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, + const FastSet& mask, boost::optional invalidateIfDebug, const KeyFormatter& keyFormatter) +{ // If debugging, invalidate if requested, otherwise do not invalidate. // Invalidating means setting expmapped entries to Inf, to trigger assertions // if we try to re-use them. @@ -267,23 +193,16 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const invalidateIfDebug = boost::none; #endif - assert(values.size() == ordering.size()); - assert(delta.size() == ordering.size()); + assert(values.size() == delta.size()); Values::iterator key_value; - Ordering::const_iterator key_index; - for(key_value = values.begin(), key_index = ordering.begin(); - key_value != values.end() && key_index != ordering.end(); ++key_value, ++key_index) { - assert(key_value->key == key_index->first); - const Index var = key_index->second; - if(ISDEBUG("ISAM2 update verbose")) { - if(mask[var]) - cout << "expmap " << keyFormatter(key_value->key) << " (j = " << var << "), delta = " << delta[var].transpose() << endl; - else - cout << " " << keyFormatter(key_value->key) << " (j = " << var << "), delta = " << delta[var].transpose() << endl; - } + VectorValues::const_iterator key_delta; + for(key_value = values.begin(), key_delta = delta.begin(); key_value != values.end(); ++key_value, ++key_delta) + { + assert(key_value->key == key_delta->first); + Key var = key_value->key; assert(delta[var].size() == (int)key_value->value.dim()); - assert(delta[var].unaryExpr(ptr_fun(isfinite)).all()); - if(mask[var]) { + assert(delta[var].allFinite()); + if(mask.exists(var)) { Value* retracted = key_value->value.retract_(delta[var]); key_value->value = *retracted; retracted->deallocate_(); @@ -293,134 +212,35 @@ void ISAM2::Impl::ExpmapMasked(Values& values, const VectorValues& delta, const } } -/* ************************************************************************* */ -ISAM2::Impl::PartialSolveResult -ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors, - const FastSet& keys, const ReorderingMode& reorderingMode, bool useQR) { - - const bool debug = ISDEBUG("ISAM2 recalculate"); - - PartialSolveResult result; - - gttic(select_affected_variables); -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - // Debug check that all variables involved in the factors to be re-eliminated - // are in affectedKeys, since we will use it to select a subset of variables. - BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors) { - BOOST_FOREACH(Index key, factor->keys()) { - assert(find(keys.begin(), keys.end(), key) != keys.end()); - } - } -#endif - Permutation affectedKeysSelector(keys.size()); // Create a permutation that pulls the affected keys to the front - Permutation affectedKeysSelectorInverse(keys.size() > 0 ? *keys.rbegin()+1 : 0 /*ordering_.nVars()*/); // And its inverse -#ifndef NDEBUG - // If debugging, fill with invalid values that will trip asserts if dereferenced - std::fill(affectedKeysSelectorInverse.begin(), affectedKeysSelectorInverse.end(), numeric_limits::max()); -#endif - { Index position=0; BOOST_FOREACH(Index var, keys) { - affectedKeysSelector[position] = var; - affectedKeysSelectorInverse[var] = position; - ++ position; } } - if(debug) affectedKeysSelector.print("affectedKeysSelector: "); - if(debug) affectedKeysSelectorInverse.print("affectedKeysSelectorInverse: "); - factors.permuteWithInverse(affectedKeysSelectorInverse); - if(debug) factors.print("Factors to reorder/re-eliminate: "); - gttoc(select_affected_variables); - gttic(variable_index); - VariableIndex affectedFactorsIndex(factors); // Create a variable index for the factors to be re-eliminated - if(debug) affectedFactorsIndex.print("affectedFactorsIndex: "); - gttoc(variable_index); - gttic(ccolamd); - vector cmember(affectedKeysSelector.size(), 0); - if(reorderingMode.constrain == ReorderingMode::CONSTRAIN_LAST) { - assert(reorderingMode.constrainedKeys); - if(!reorderingMode.constrainedKeys->empty()) { - typedef std::pair Index_Group; - if(keys.size() > reorderingMode.constrainedKeys->size()) { // Only if some variables are unconstrained - BOOST_FOREACH(const Index_Group& index_group, *reorderingMode.constrainedKeys) { - cmember[affectedKeysSelectorInverse[index_group.first]] = index_group.second; } - } else { - int minGroup = *boost::range::min_element(boost::adaptors::values(*reorderingMode.constrainedKeys)); - BOOST_FOREACH(const Index_Group& index_group, *reorderingMode.constrainedKeys) { - cmember[affectedKeysSelectorInverse[index_group.first]] = index_group.second - minGroup; } - } - } - } - Permutation::shared_ptr affectedColamd(inference::PermutationCOLAMD_(affectedFactorsIndex, cmember)); - gttoc(ccolamd); - gttic(ccolamd_permutations); - Permutation::shared_ptr affectedColamdInverse(affectedColamd->inverse()); - if(debug) affectedColamd->print("affectedColamd: "); - if(debug) affectedColamdInverse->print("affectedColamdInverse: "); - result.reorderingSelector = affectedKeysSelector; - result.reorderingPermutation = *affectedColamd; - result.reorderingInverse = internal::Reduction::CreateFromPartialPermutation(affectedKeysSelector, *affectedColamdInverse); - gttoc(ccolamd_permutations); - - gttic(permute_affected_variable_index); - affectedFactorsIndex.permuteInPlace(*affectedColamd); - gttoc(permute_affected_variable_index); - - gttic(permute_affected_factors); - factors.permuteWithInverse(*affectedColamdInverse); - gttoc(permute_affected_factors); - - if(debug) factors.print("Colamd-ordered affected factors: "); - -#ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - VariableIndex fromScratchIndex(factors); - assert(assert_equal(fromScratchIndex, affectedFactorsIndex)); -#endif - - // eliminate into a Bayes net - gttic(eliminate); - JunctionTree jt(factors, affectedFactorsIndex); - if(!useQR) - result.bayesTree = jt.eliminate(EliminatePreferCholesky); - else - result.bayesTree = jt.eliminate(EliminateQR); - gttoc(eliminate); - - gttic(permute_eliminated); - if(result.bayesTree) result.bayesTree->permuteWithInverse(affectedKeysSelector); - if(debug && result.bayesTree) { - cout << "Full var-ordered eliminated BT:\n"; - result.bayesTree->printTree(""); - } - // Undo permutation on our subset of cached factors, we must later permute *all* of the cached factors - factors.permuteWithInverse(*affectedColamd); - factors.permuteWithInverse(affectedKeysSelector); - gttoc(permute_eliminated); - - return result; -} - /* ************************************************************************* */ namespace internal { inline static void optimizeInPlace(const boost::shared_ptr& clique, VectorValues& result) { // parents are assumed to already be solved and available in result - clique->conditional()->solveInPlace(result); + result.update(clique->conditional()->solve(result)); // starting from the root, call optimize on each conditional - BOOST_FOREACH(const boost::shared_ptr& child, clique->children_) + BOOST_FOREACH(const boost::shared_ptr& child, clique->children) optimizeInPlace(child, result); } } /* ************************************************************************* */ -size_t ISAM2::Impl::UpdateDelta(const boost::shared_ptr& root, std::vector& replacedKeys, VectorValues& delta, double wildfireThreshold) { +size_t ISAM2::Impl::UpdateDelta(const std::vector& roots, FastSet& replacedKeys, VectorValues& delta, double wildfireThreshold) { size_t lastBacksubVariableCount; if (wildfireThreshold <= 0.0) { // Threshold is zero or less, so do a full recalculation - internal::optimizeInPlace(root, delta); + BOOST_FOREACH(const ISAM2::sharedClique& root, roots) + internal::optimizeInPlace(root, delta); lastBacksubVariableCount = delta.size(); } else { // Optimize with wildfire - lastBacksubVariableCount = optimizeWildfireNonRecursive(root, wildfireThreshold, replacedKeys, delta); // modifies delta_ + lastBacksubVariableCount = 0; + BOOST_FOREACH(const ISAM2::sharedClique& root, roots) + lastBacksubVariableCount += optimizeWildfireNonRecursive( + root, wildfireThreshold, replacedKeys, delta); // modifies delta #ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS for(size_t j=0; j& root, std: } // Clear replacedKeys - replacedKeys.assign(replacedKeys.size(), false); + replacedKeys.clear(); return lastBacksubVariableCount; } /* ************************************************************************* */ namespace internal { -void updateDoglegDeltas(const boost::shared_ptr& clique, const std::vector& replacedKeys, - const VectorValues& grad, VectorValues& deltaNewton, VectorValues& RgProd, size_t& varsUpdated) { +void updateDoglegDeltas(const boost::shared_ptr& clique, const FastSet& replacedKeys, + const VectorValues& grad, VectorValues& RgProd, size_t& varsUpdated) { // Check if any frontal or separator keys were recalculated, if so, we need // update deltas and recurse to children, but if not, we do not need to // recurse further because of the running separator property. bool anyReplaced = false; - BOOST_FOREACH(Index j, *clique->conditional()) { - if(replacedKeys[j]) { + BOOST_FOREACH(Key j, *clique->conditional()) { + if(replacedKeys.exists(j)) { anyReplaced = true; break; } @@ -453,41 +273,49 @@ void updateDoglegDeltas(const boost::shared_ptr& clique, const std: if(anyReplaced) { // Update the current variable // Get VectorValues slice corresponding to current variables - Vector gR = internal::extractVectorValuesSlices(grad, (*clique)->beginFrontals(), (*clique)->endFrontals()); - Vector gS = internal::extractVectorValuesSlices(grad, (*clique)->beginParents(), (*clique)->endParents()); + Vector gR = grad.vector(std::vector(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals())); + Vector gS = grad.vector(std::vector(clique->conditional()->beginParents(), clique->conditional()->endParents())); // Compute R*g and S*g for this clique - Vector RSgProd = (*clique)->get_R() * gR + (*clique)->get_S() * gS; + Vector RSgProd = clique->conditional()->get_R() * gR + clique->conditional()->get_S() * gS; // Write into RgProd vector - internal::writeVectorValuesSlices(RSgProd, RgProd, (*clique)->beginFrontals(), (*clique)->endFrontals()); + DenseIndex vectorPosition = 0; + BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) { + Vector& RgProdValue = RgProd[frontal]; + RgProdValue = RSgProd.segment(vectorPosition, RgProdValue.size()); + vectorPosition += RgProdValue.size(); + } // Now solve the part of the Newton's method point for this clique (back-substitution) //(*clique)->solveInPlace(deltaNewton); - varsUpdated += (*clique)->nrFrontals(); + varsUpdated += clique->conditional()->nrFrontals(); // Recurse to children - BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children()) { - updateDoglegDeltas(child, replacedKeys, grad, deltaNewton, RgProd, varsUpdated); } + BOOST_FOREACH(const ISAM2Clique::shared_ptr& child, clique->children) { + updateDoglegDeltas(child, replacedKeys, grad, RgProd, varsUpdated); } } } } /* ************************************************************************* */ -size_t ISAM2::Impl::UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector& replacedKeys, +size_t ISAM2::Impl::UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, FastSet& replacedKeys, VectorValues& deltaNewton, VectorValues& RgProd) { // Get gradient - VectorValues grad = *allocateVectorValues(isam); + VectorValues grad; gradientAtZero(isam, grad); // Update variables size_t varsUpdated = 0; - internal::updateDoglegDeltas(isam.root(), replacedKeys, grad, deltaNewton, RgProd, varsUpdated); - optimizeWildfireNonRecursive(isam.root(), wildfireThreshold, replacedKeys, deltaNewton); + BOOST_FOREACH(const ISAM2::sharedClique& root, isam.roots()) + { + internal::updateDoglegDeltas(root, replacedKeys, grad, RgProd, varsUpdated); + optimizeWildfireNonRecursive(root, wildfireThreshold, replacedKeys, deltaNewton); + } - replacedKeys.assign(replacedKeys.size(), false); + replacedKeys.clear(); return varsUpdated; } diff --git a/gtsam/nonlinear/ISAM2-impl.h b/gtsam/nonlinear/ISAM2-impl.h index 07c27c896..2463f55e7 100644 --- a/gtsam/nonlinear/ISAM2-impl.h +++ b/gtsam/nonlinear/ISAM2-impl.h @@ -26,9 +26,6 @@ struct GTSAM_EXPORT ISAM2::Impl { struct GTSAM_EXPORT PartialSolveResult { ISAM2::sharedClique bayesTree; - Permutation reorderingSelector; - Permutation reorderingPermutation; - internal::Reduction reorderingInverse; }; struct GTSAM_EXPORT ReorderingMode { @@ -48,25 +45,16 @@ struct GTSAM_EXPORT ISAM2::Impl { * @param keyFormatter Formatter for printing nonlinear keys during debugging */ static void AddVariables(const Values& newTheta, Values& theta, VectorValues& delta, - VectorValues& deltaNewton, VectorValues& RgProd, std::vector& replacedKeys, - Ordering& ordering, const KeyFormatter& keyFormatter = DefaultKeyFormatter); + VectorValues& deltaNewton, VectorValues& RgProd, + const KeyFormatter& keyFormatter = DefaultKeyFormatter); /** * Remove variables from the ISAM2 system. */ - static void RemoveVariables(const FastSet& unusedKeys, const ISAM2Clique::shared_ptr& root, + static void RemoveVariables(const FastSet& unusedKeys, const std::vector& roots, Values& theta, VariableIndex& variableIndex, VectorValues& delta, VectorValues& deltaNewton, - VectorValues& RgProd, std::vector& replacedKeys, Ordering& ordering, Base::Nodes& nodes, - GaussianFactorGraph& linearFactors, FastSet& fixedVariables); - - /** - * Extract the set of variable indices from a NonlinearFactorGraph. For each Symbol - * in each NonlinearFactor, obtains the index by calling ordering[symbol]. - * @param ordering The current ordering from which to obtain the variable indices - * @param factors The factors from which to extract the variables - * @return The set of variables indices from the factors - */ - static FastSet IndicesFromFactors(const Ordering& ordering, const NonlinearFactorGraph& factors); + VectorValues& RgProd, FastSet& replacedKeys, Base::Nodes& nodes, + FastSet& fixedVariables); /** * Find the set of variables to be relinearized according to relinearizeThreshold. @@ -77,8 +65,8 @@ struct GTSAM_EXPORT ISAM2::Impl { * @return The set of variable indices in delta whose magnitude is greater than or * equal to relinearizeThreshold */ - static FastSet CheckRelinearizationFull(const VectorValues& delta, const Ordering& ordering, - const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter); + static FastSet CheckRelinearizationFull(const VectorValues& delta, + const ISAM2Params::RelinearizationThreshold& relinearizeThreshold); /** * Find the set of variables to be relinearized according to relinearizeThreshold. @@ -91,8 +79,8 @@ struct GTSAM_EXPORT ISAM2::Impl { * @return The set of variable indices in delta whose magnitude is greater than or * equal to relinearizeThreshold */ - static FastSet CheckRelinearizationPartial(const ISAM2Clique::shared_ptr& root, const VectorValues& delta, const Ordering& ordering, - const ISAM2Params::RelinearizationThreshold& relinearizeThreshold, const KeyFormatter& keyFormatter = DefaultKeyFormatter); + static FastSet CheckRelinearizationPartial(const std::vector& roots, + const VectorValues& delta, const ISAM2Params::RelinearizationThreshold& relinearizeThreshold); /** * Recursively search this clique and its children for marked keys appearing @@ -109,7 +97,7 @@ struct GTSAM_EXPORT ISAM2::Impl { * * Alternatively could we trace up towards the root for each variable here? */ - static void FindAll(ISAM2Clique::shared_ptr clique, FastSet& keys, const std::vector& markedMask); + static void FindAll(ISAM2Clique::shared_ptr clique, FastSet& keys, const FastSet& markedMask); /** * Apply expmap to the given values, but only for indices appearing in @@ -125,30 +113,14 @@ struct GTSAM_EXPORT ISAM2::Impl { * @param keyFormatter Formatter for printing nonlinear keys during debugging */ static void ExpmapMasked(Values& values, const VectorValues& delta, - const Ordering& ordering, const std::vector& mask, + const FastSet& mask, boost::optional invalidateIfDebug = boost::none, const KeyFormatter& keyFormatter = DefaultKeyFormatter); - /** - * Reorder and eliminate factors. These factors form a subset of the full - * problem, so along with the BayesTree we get a partial reordering of the - * problem that needs to be applied to the other data in ISAM2, which is the - * VariableIndex, the delta, the ordering, and the orphans (including cached - * factors). - * \param factors The factors to eliminate, representing part of the full - * problem. This is permuted during use and so is cleared when this function - * returns in order to invalidate it. - * \param keys The set of indices used in \c factors. - * \param useQR Whether to use QR (if true), or Cholesky (if false). - * \return The eliminated BayesTree and the permutation to be applied to the - * rest of the ISAM2 data. - */ - static PartialSolveResult PartialSolve(GaussianFactorGraph& factors, const FastSet& keys, - const ReorderingMode& reorderingMode, bool useQR); + static size_t UpdateDelta(const std::vector& roots, + FastSet& replacedKeys, VectorValues& delta, double wildfireThreshold); - static size_t UpdateDelta(const boost::shared_ptr& root, std::vector& replacedKeys, VectorValues& delta, double wildfireThreshold); - - static size_t UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, std::vector& replacedKeys, + static size_t UpdateDoglegDeltas(const ISAM2& isam, double wildfireThreshold, FastSet& replacedKeys, VectorValues& deltaNewton, VectorValues& RgProd); }; diff --git a/gtsam/nonlinear/ISAM2-inl.h b/gtsam/nonlinear/ISAM2-inl.h index 4a9cf105a..71904a56a 100644 --- a/gtsam/nonlinear/ISAM2-inl.h +++ b/gtsam/nonlinear/ISAM2-inl.h @@ -28,8 +28,7 @@ namespace gtsam { /* ************************************************************************* */ template VALUE ISAM2::calculateEstimate(Key key) const { - const Index index = getOrdering()[key]; - const Vector& delta = getDelta()[index]; + const Vector& delta = getDelta()[key]; return theta_.at(key).retract(delta); } @@ -37,24 +36,25 @@ VALUE ISAM2::calculateEstimate(Key key) const { namespace internal { template void optimizeWildfire(const boost::shared_ptr& clique, double threshold, - std::vector& changed, const std::vector& replaced, VectorValues& delta, int& count) { + FastSet& changed, const FastSet& replaced, VectorValues& delta, size_t& count) +{ // if none of the variables in this clique (frontal and separator!) changed // significantly, then by the running intersection property, none of the // cliques in the children need to be processed // Are any clique variables part of the tree that has been redone? - bool cliqueReplaced = replaced[(*clique)->frontals().front()]; + bool cliqueReplaced = replaced.exists((*clique)->frontals().front()); #ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - BOOST_FOREACH(Index frontal, (*clique)->frontals()) { - assert(cliqueReplaced == replaced[frontal]); + BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) { + assert(cliqueReplaced == replaced.exists(frontal)); } #endif // If not redone, then has one of the separator variables changed significantly? bool recalculate = cliqueReplaced; if(!recalculate) { - BOOST_FOREACH(Index parent, (*clique)->parents()) { - if(changed[parent]) { + BOOST_FOREACH(Key parent, clique->conditional()->parents()) { + if(changed.exists(parent)) { recalculate = true; break; } @@ -66,22 +66,22 @@ void optimizeWildfire(const boost::shared_ptr& clique, double threshold, if(recalculate) { // Temporary copy of the original values, to check how much they change - std::vector originalValues((*clique)->nrFrontals()); + std::vector originalValues(clique->conditional()->nrFrontals()); GaussianConditional::const_iterator it; - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - originalValues[it - (*clique)->beginFrontals()] = delta[*it]; + for(it = clique->conditional()->beginFrontals(); it!=clique->conditional()->endFrontals(); it++) { + originalValues[it - clique->conditional()->beginFrontals()] = delta[*it]; } // Back-substitute - (*clique)->solveInPlace(delta); - count += (*clique)->nrFrontals(); + delta.update(clique->conditional()->solve(delta)); + count += clique->conditional()->nrFrontals(); // Whether the values changed above a threshold, or always true if the // clique was replaced. bool valuesChanged = cliqueReplaced; - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { + for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) { if(!valuesChanged) { - const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]); + const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]); const Vector& newValue(delta[*it]); if((oldValue - newValue).lpNorm() >= threshold) { valuesChanged = true; @@ -94,18 +94,18 @@ void optimizeWildfire(const boost::shared_ptr& clique, double threshold, // If the values were above the threshold or this clique was replaced if(valuesChanged) { // Set changed flag for each frontal variable and leave the new values - BOOST_FOREACH(Index frontal, (*clique)->frontals()) { - changed[frontal] = true; + BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) { + changed.insert(frontal); } } else { // Replace with the old values - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - delta[*it] = originalValues[it - (*clique)->beginFrontals()]; + for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) { + delta[*it] = originalValues[it - clique->conditional()->beginFrontals()]; } } // Recurse to children - BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children_) { + BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, clique->children) { optimizeWildfire(child, threshold, changed, replaced, delta, count); } } @@ -113,24 +113,25 @@ void optimizeWildfire(const boost::shared_ptr& clique, double threshold, template bool optimizeWildfireNode(const boost::shared_ptr& clique, double threshold, - std::vector& changed, const std::vector& replaced, VectorValues& delta, int& count) { + FastSet& changed, const FastSet& replaced, VectorValues& delta, size_t& count) +{ // if none of the variables in this clique (frontal and separator!) changed // significantly, then by the running intersection property, none of the // cliques in the children need to be processed // Are any clique variables part of the tree that has been redone? - bool cliqueReplaced = replaced[(*clique)->frontals().front()]; + bool cliqueReplaced = replaced.exists(clique->conditional()->frontals().front()); #ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS - BOOST_FOREACH(Index frontal, (*clique)->frontals()) { - assert(cliqueReplaced == replaced[frontal]); + BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) { + assert(cliqueReplaced == replaced.exists(frontal)); } #endif // If not redone, then has one of the separator variables changed significantly? bool recalculate = cliqueReplaced; if(!recalculate) { - BOOST_FOREACH(Index parent, (*clique)->parents()) { - if(changed[parent]) { + BOOST_FOREACH(Key parent, clique->conditional()->parents()) { + if(changed.exists(parent)) { recalculate = true; break; } @@ -139,25 +140,25 @@ bool optimizeWildfireNode(const boost::shared_ptr& clique, double thresh // Solve clique if it was replaced, or if any parents were changed above the // threshold or themselves replaced. - if(recalculate) { - + if(recalculate) + { // Temporary copy of the original values, to check how much they change - std::vector originalValues((*clique)->nrFrontals()); + std::vector originalValues(clique->conditional()->nrFrontals()); GaussianConditional::const_iterator it; - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - originalValues[it - (*clique)->beginFrontals()] = delta[*it]; + for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) { + originalValues[it - clique->conditional()->beginFrontals()] = delta[*it]; } // Back-substitute - (*clique)->solveInPlace(delta); - count += (*clique)->nrFrontals(); + delta.update(clique->conditional()->solve(delta)); + count += clique->conditional()->nrFrontals(); // Whether the values changed above a threshold, or always true if the // clique was replaced. bool valuesChanged = cliqueReplaced; - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { + for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) { if(!valuesChanged) { - const Vector& oldValue(originalValues[it - (*clique)->beginFrontals()]); + const Vector& oldValue(originalValues[it - clique->conditional()->beginFrontals()]); const Vector& newValue(delta[*it]); if((oldValue - newValue).lpNorm() >= threshold) { valuesChanged = true; @@ -170,16 +171,15 @@ bool optimizeWildfireNode(const boost::shared_ptr& clique, double thresh // If the values were above the threshold or this clique was replaced if(valuesChanged) { // Set changed flag for each frontal variable and leave the new values - BOOST_FOREACH(Index frontal, (*clique)->frontals()) { - changed[frontal] = true; + BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) { + changed.insert(frontal); } } else { // Replace with the old values - for(it = (*clique)->beginFrontals(); it!=(*clique)->endFrontals(); it++) { - delta[*it] = originalValues[it - (*clique)->beginFrontals()]; + for(it = clique->conditional()->beginFrontals(); it != clique->conditional()->endFrontals(); it++) { + delta[*it] = originalValues[it - clique->conditional()->beginFrontals()]; } } - } return recalculate; @@ -189,8 +189,8 @@ bool optimizeWildfireNode(const boost::shared_ptr& clique, double thresh /* ************************************************************************* */ template -int optimizeWildfire(const boost::shared_ptr& root, double threshold, const std::vector& keys, VectorValues& delta) { - std::vector changed(keys.size(), false); +size_t optimizeWildfire(const boost::shared_ptr& root, double threshold, const FastSet& keys, VectorValues& delta) { + FastSet changed; int count = 0; // starting from the root, call optimize on each conditional if(root) @@ -200,9 +200,10 @@ int optimizeWildfire(const boost::shared_ptr& root, double threshold, co /* ************************************************************************* */ template -int optimizeWildfireNonRecursive(const boost::shared_ptr& root, double threshold, const std::vector& keys, VectorValues& delta) { - std::vector changed(keys.size(), false); - int count = 0; +size_t optimizeWildfireNonRecursive(const boost::shared_ptr& root, double threshold, const FastSet& keys, VectorValues& delta) +{ + FastSet changed; + size_t count = 0; if (root) { std::stack > travStack; @@ -213,7 +214,7 @@ int optimizeWildfireNonRecursive(const boost::shared_ptr& root, double t travStack.pop(); bool recalculate = internal::optimizeWildfireNode(currentNode, threshold, changed, keys, delta, count); if (recalculate) { - BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, currentNode->children_) { + BOOST_FOREACH(const typename CLIQUE::shared_ptr& child, currentNode->children) { travStack.push(child); } } diff --git a/gtsam/nonlinear/ISAM2.cpp b/gtsam/nonlinear/ISAM2.cpp index 2275ec2be..380b6c87f 100644 --- a/gtsam/nonlinear/ISAM2.cpp +++ b/gtsam/nonlinear/ISAM2.cpp @@ -21,27 +21,61 @@ using namespace boost::assign; #include #include #include +namespace br { using namespace boost::range; using namespace boost::adaptors; } #include #include -#include -#include -#include +#include +#include +#include // We need the inst file because we'll make a special JT templated on ISAM2 +#include #include #include +#include #include #include #include #include +using namespace std; + namespace gtsam { -using namespace std; +// Instantiate base classes +template class BayesTreeCliqueBase; +template class BayesTree; static const bool disableReordering = false; static const double batchThreshold = 0.65; +/* ************************************************************************* */ +// Special BayesTree class that uses ISAM2 cliques - this is the result of reeliminating ISAM2 +// subtrees. +class ISAM2BayesTree : public ISAM2::Base +{ +public: + typedef ISAM2::Base Base; + typedef ISAM2BayesTree This; + typedef boost::shared_ptr shared_ptr; + + ISAM2BayesTree() {} +}; + +/* ************************************************************************* */ +// Special JunctionTree class that produces ISAM2 BayesTree cliques, used for reeliminating ISAM2 +// subtrees. +class ISAM2JunctionTree : public JunctionTree +{ +public: + typedef JunctionTree Base; + typedef ISAM2JunctionTree This; + typedef boost::shared_ptr shared_ptr; + + ISAM2JunctionTree(const GaussianEliminationTree& eliminationTree) : + Base(Base::FromEliminationTree(eliminationTree)) {} +}; + /* ************************************************************************* */ std::string ISAM2DoglegParams::adaptationModeTranslator(const DoglegOptimizerImpl::TrustRegionAdaptationMode& adaptationMode) const { std::string s; @@ -84,6 +118,38 @@ std::string ISAM2Params::factorizationTranslator(const ISAM2Params::Factorizatio return s; } +/* ************************************************************************* */ +void ISAM2Clique::setEliminationResult(const FactorGraphType::EliminationResult& eliminationResult) +{ + conditional_ = eliminationResult.first; + cachedFactor_ = eliminationResult.second; + // Compute gradient contribution + gradientContribution_.resize(conditional_->cols() - 1); + // Rewrite -(R * P')'*d as -(d' * R * P')' for computational speed reasons + gradientContribution_ << -conditional_->get_R().transpose() * conditional_->get_d(), + -conditional_->get_S().transpose() * conditional_->get_d(); +} + +/* ************************************************************************* */ +bool ISAM2Clique::equals(const This& other, double tol) const { + return Base::equals(other) && + ((!cachedFactor_ && !other.cachedFactor_) + || (cachedFactor_ && other.cachedFactor_ + && cachedFactor_->equals(*other.cachedFactor_, tol))); +} + +/* ************************************************************************* */ +void ISAM2Clique::print(const std::string& s, const KeyFormatter& formatter) const +{ + Base::print(s,formatter); + if(cachedFactor_) + cachedFactor_->print(s + "Cached: ", formatter); + else + std::cout << s << "Cached empty" << std::endl; + if(gradientContribution_.rows() != 0) + gtsam::print(gradientContribution_, "Gradient contribution: "); +} + /* ************************************************************************* */ ISAM2::ISAM2(const ISAM2Params& params): deltaDoglegUptodate_(true), deltaUptodate_(true), params_(params) { @@ -99,58 +165,23 @@ ISAM2::ISAM2(): } /* ************************************************************************* */ -ISAM2::ISAM2(const ISAM2& other) { - *this = other; +bool ISAM2::equals(const ISAM2& other, double tol) const { + return Base::equals(other, tol) + && theta_.equals(other.theta_, tol) && variableIndex_.equals(other.variableIndex_, tol) + && nonlinearFactors_.equals(other.nonlinearFactors_, tol) + && fixedVariables_ == other.fixedVariables_; } /* ************************************************************************* */ -ISAM2& ISAM2::operator=(const ISAM2& rhs) { - // Copy BayesTree - this->Base::operator=(rhs); - - // Copy our variables - // When we have Permuted<...>, it is only necessary to copy this permuted - // view and not the original, because copying the permuted view automatically - // copies the original. - theta_ = rhs.theta_; - variableIndex_ = rhs.variableIndex_; - delta_ = rhs.delta_; - deltaNewton_ = rhs.deltaNewton_; - RgProd_ = rhs.RgProd_; - deltaDoglegUptodate_ = rhs.deltaDoglegUptodate_; - deltaUptodate_ = rhs.deltaUptodate_; - deltaReplacedMask_ = rhs.deltaReplacedMask_; - nonlinearFactors_ = rhs.nonlinearFactors_; - - linearFactors_ = GaussianFactorGraph(); - linearFactors_.reserve(rhs.linearFactors_.size()); - BOOST_FOREACH(const GaussianFactor::shared_ptr& linearFactor, rhs.linearFactors_) { - linearFactors_.push_back(linearFactor ? linearFactor->clone() : GaussianFactor::shared_ptr()); } - - ordering_ = rhs.ordering_; - params_ = rhs.params_; - doglegDelta_ = rhs.doglegDelta_; - - lastAffectedVariableCount = rhs.lastAffectedVariableCount; - lastAffectedFactorCount = rhs.lastAffectedFactorCount; - lastAffectedCliqueCount = rhs.lastAffectedCliqueCount; - lastAffectedMarkedCount = rhs.lastAffectedMarkedCount; - lastBacksubVariableCount = rhs.lastBacksubVariableCount; - lastNnzTop = rhs.lastNnzTop; - - return *this; -} - -/* ************************************************************************* */ -FastList ISAM2::getAffectedFactors(const FastList& keys) const { +FastList ISAM2::getAffectedFactors(const FastList& keys) const { static const bool debug = false; if(debug) cout << "Getting affected factors for "; - if(debug) { BOOST_FOREACH(const Index key, keys) { cout << key << " "; } } + if(debug) { BOOST_FOREACH(const Key key, keys) { cout << key << " "; } } if(debug) cout << endl; - FactorGraph allAffected; + NonlinearFactorGraph allAffected; FastList indices; - BOOST_FOREACH(const Index key, keys) { + BOOST_FOREACH(const Key key, keys) { // const list l = nonlinearFactors_.factors(key); // indices.insert(indices.begin(), l.begin(), l.end()); const VariableIndex::Factors& factors(variableIndex_[key]); @@ -170,9 +201,10 @@ FastList ISAM2::getAffectedFactors(const FastList& keys) const { /* ************************************************************************* */ // retrieve all factors that ONLY contain the affected variables // (note that the remaining stuff is summarized in the cached factors) -FactorGraph::shared_ptr -ISAM2::relinearizeAffectedFactors(const FastList& affectedKeys, const FastSet& relinKeys) const { +GaussianFactorGraph::shared_ptr +ISAM2::relinearizeAffectedFactors(const FastList& affectedKeys, const FastSet& relinKeys) const +{ gttic(getAffectedFactors); FastList candidates = getAffectedFactors(affectedKeys); gttoc(getAffectedFactors); @@ -181,33 +213,32 @@ ISAM2::relinearizeAffectedFactors(const FastList& affectedKeys, const Fas gttic(affectedKeysSet); // for fast lookup below - FastSet affectedKeysSet; + FastSet affectedKeysSet; affectedKeysSet.insert(affectedKeys.begin(), affectedKeys.end()); gttoc(affectedKeysSet); gttic(check_candidates_and_linearize); - FactorGraph::shared_ptr linearized = boost::make_shared >(); + GaussianFactorGraph::shared_ptr linearized = boost::make_shared(); BOOST_FOREACH(size_t idx, candidates) { bool inside = true; bool useCachedLinear = params_.cacheLinearizedFactors; BOOST_FOREACH(Key key, nonlinearFactors_[idx]->keys()) { - Index var = ordering_[key]; - if(affectedKeysSet.find(var) == affectedKeysSet.end()) { + if(affectedKeysSet.find(key) == affectedKeysSet.end()) { inside = false; break; } - if(useCachedLinear && relinKeys.find(var) != relinKeys.end()) + if(useCachedLinear && relinKeys.find(key) != relinKeys.end()) useCachedLinear = false; } if(inside) { if(useCachedLinear) { #ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS assert(linearFactors_[idx]); - assert(linearFactors_[idx]->keys() == nonlinearFactors_[idx]->symbolic(ordering_)->keys()); + assert(linearFactors_[idx]->keys() == nonlinearFactors_[idx]->keys()); #endif linearized->push_back(linearFactors_[idx]); } else { - GaussianFactor::shared_ptr linearFactor = nonlinearFactors_[idx]->linearize(theta_, ordering_); + GaussianFactor::shared_ptr linearFactor = nonlinearFactors_[idx]->linearize(theta_); linearized->push_back(linearFactor); if(params_.cacheLinearizedFactors) { #ifdef GTSAM_EXTRA_CONSISTENCY_CHECKS @@ -225,27 +256,25 @@ ISAM2::relinearizeAffectedFactors(const FastList& affectedKeys, const Fas /* ************************************************************************* */ // find intermediate (linearized) factors from cache that are passed into the affected area + GaussianFactorGraph ISAM2::getCachedBoundaryFactors(Cliques& orphans) { - - static const bool debug = false; - GaussianFactorGraph cachedBoundary; BOOST_FOREACH(sharedClique orphan, orphans) { - // find the last variable that was eliminated - Index key = (*orphan)->frontals().back(); // retrieve the cached factor and add to boundary cachedBoundary.push_back(orphan->cachedFactor()); - if(debug) { cout << "Cached factor for variable " << key; orphan->cachedFactor()->print(""); } } return cachedBoundary; } -boost::shared_ptr > ISAM2::recalculate(const FastSet& markedKeys, - const FastSet& relinKeys, const FastVector& observedKeys, const FastSet& unusedIndices, - const boost::optional >& constrainKeys, ISAM2Result& result) { - +/* ************************************************************************* */ +boost::shared_ptr > ISAM2::recalculate(const FastSet& markedKeys, const FastSet& relinKeys, + const vector& observedKeys, + const FastSet& unusedIndices, + const boost::optional >& constrainKeys, + ISAM2Result& result) +{ // TODO: new factors are linearized twice, the newFactors passed in are not used. const bool debug = ISDEBUG("ISAM2 recalculate"); @@ -272,10 +301,10 @@ boost::shared_ptr > ISAM2::recalculate(const FastSet& mark if(debug) { cout << "markedKeys: "; - BOOST_FOREACH(const Index key, markedKeys) { cout << key << " "; } + BOOST_FOREACH(const Key key, markedKeys) { cout << key << " "; } cout << endl; cout << "observedKeys: "; - BOOST_FOREACH(const Index key, observedKeys) { cout << key << " "; } + BOOST_FOREACH(const Key key, observedKeys) { cout << key << " "; } cout << endl; } @@ -284,13 +313,10 @@ boost::shared_ptr > ISAM2::recalculate(const FastSet& mark // (b) Store orphaned sub-trees \BayesTree_{O} of removed cliques. gttic(removetop); Cliques orphans; - BayesNet affectedBayesNet; - this->removeTop(markedKeys, affectedBayesNet, orphans); + GaussianBayesNet affectedBayesNet; + this->removeTop(vector(markedKeys.begin(), markedKeys.end()), affectedBayesNet, orphans); gttoc(removetop); - if(debug) affectedBayesNet.print("Removed top: "); - if(debug) orphans.print("Orphans: "); - // FactorGraph factors(affectedBayesNet); // bug was here: we cannot reuse the original factors, because then the cached factors get messed up // [all the necessary data is actually contained in the affectedBayesNet, including what was passed in from the boundaries, @@ -304,78 +330,59 @@ boost::shared_ptr > ISAM2::recalculate(const FastSet& mark // ordering provides all keys in conditionals, there cannot be others because path to root included gttic(affectedKeys); - FastList affectedKeys = affectedBayesNet.ordering(); + FastList affectedKeys; + BOOST_FOREACH(const ConditionalType::shared_ptr& conditional, affectedBayesNet) + affectedKeys.insert(affectedKeys.end(), conditional->beginFrontals(), conditional->endFrontals()); gttoc(affectedKeys); - boost::shared_ptr > affectedKeysSet(new FastSet()); // Will return this result - - if(affectedKeys.size() >= theta_.size() * batchThreshold) { + boost::shared_ptr > affectedKeysSet(new FastSet()); // Will return this result + if(affectedKeys.size() >= theta_.size() * batchThreshold) + { + // Do a batch step - reorder and relinearize all variables gttic(batch); gttic(add_keys); - BOOST_FOREACH(const Ordering::value_type& key_index, ordering_) { affectedKeysSet->insert(key_index.second); } + br::copy(variableIndex_ | br::map_keys, std::inserter(*affectedKeysSet, affectedKeysSet->end())); gttoc(add_keys); - gttic(reorder); - gttic(CCOLAMD); - // Do a batch step - reorder and relinearize all variables - vector cmember(theta_.size(), 0); - if(constrainKeys) { - if(!constrainKeys->empty()) { - typedef std::pair Index_Group; - if(theta_.size() > constrainKeys->size()) { // Only if some variables are unconstrained - BOOST_FOREACH(const Index_Group& index_group, *constrainKeys) { - cmember[index_group.first] = index_group.second; } - } else { - int minGroup = *boost::range::min_element(boost::adaptors::values(*constrainKeys)); - BOOST_FOREACH(const Index_Group& index_group, *constrainKeys) { - cmember[index_group.first] = index_group.second - minGroup; } - } + gttic(ordering); + Ordering order; + if(constrainKeys) + { + order = Ordering::COLAMDConstrained(variableIndex_, *constrainKeys); + } + else + { + if(theta_.size() > observedKeys.size()) + { + // Only if some variables are unconstrained + FastMap constraintGroups; + BOOST_FOREACH(Key var, observedKeys) + constraintGroups[var] = 1; + order = Ordering::COLAMDConstrained(variableIndex_, constraintGroups); } - } else { - if(theta_.size() > observedKeys.size()) { // Only if some variables are unconstrained - BOOST_FOREACH(Index var, observedKeys) { cmember[var] = 1; } + else + { + order = Ordering::COLAMD(variableIndex_); } } - Permutation::shared_ptr colamd(inference::PermutationCOLAMD_(variableIndex_, cmember)); - Permutation::shared_ptr colamdInverse(colamd->inverse()); - gttoc(CCOLAMD); - - // Reorder - gttic(permute_global_variable_index); - variableIndex_.permuteInPlace(*colamd); - gttoc(permute_global_variable_index); - gttic(permute_delta); - delta_.permuteInPlace(*colamd); - deltaNewton_.permuteInPlace(*colamd); - RgProd_.permuteInPlace(*colamd); - gttoc(permute_delta); - gttic(permute_ordering); - ordering_.permuteInPlace(*colamd); - gttoc(permute_ordering); - gttoc(reorder); gttic(linearize); - GaussianFactorGraph linearized = *nonlinearFactors_.linearize(theta_, ordering_); + GaussianFactorGraph linearized = *nonlinearFactors_.linearize(theta_); if(params_.cacheLinearizedFactors) linearFactors_ = linearized; gttoc(linearize); gttic(eliminate); - JunctionTree jt(linearized, variableIndex_); - sharedClique newRoot; - if(params_.factorization == ISAM2Params::CHOLESKY) - newRoot = jt.eliminate(EliminatePreferCholesky); - else if(params_.factorization == ISAM2Params::QR) - newRoot = jt.eliminate(EliminateQR); - else assert(false); - if(debug) newRoot->print("Eliminated: "); + ISAM2BayesTree::shared_ptr bayesTree = ISAM2JunctionTree(GaussianEliminationTree(linearized, variableIndex_, order)) + .eliminate(params_.getEliminationFunction()).first; gttoc(eliminate); gttic(insert); this->clear(); - this->insert(newRoot); + this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), bayesTree->roots().end()); + this->nodes_.insert(bayesTree->nodes().begin(), bayesTree->nodes().end()); gttoc(insert); result.variablesReeliminated = affectedKeysSet->size(); @@ -399,7 +406,7 @@ boost::shared_ptr > ISAM2::recalculate(const FastSet& mark gttic(incremental); // 2. Add the new factors \Factors' into the resulting factor graph - FastList affectedAndNewKeys; + FastList affectedAndNewKeys; affectedAndNewKeys.insert(affectedAndNewKeys.end(), affectedKeys.begin(), affectedKeys.end()); affectedAndNewKeys.insert(affectedAndNewKeys.end(), observedKeys.begin(), observedKeys.end()); gttic(relinearizeAffected); @@ -411,8 +418,8 @@ boost::shared_ptr > ISAM2::recalculate(const FastSet& mark // Reeliminated keys for detailed results if(params_.enableDetailedResults) { - BOOST_FOREACH(Index index, affectedAndNewKeys) { - result.detail->variableStatus[ordering_.key(index)].isReeliminated = true; + BOOST_FOREACH(Index key, affectedAndNewKeys) { + result.detail->variableStatus[key].isReeliminated = true; } } @@ -436,6 +443,13 @@ boost::shared_ptr > ISAM2::recalculate(const FastSet& mark factors.push_back(cachedBoundary); gttoc(cached); + gttic(orphans); + // Add the orphaned subtrees + BOOST_FOREACH(const sharedClique& orphan, orphans) + factors += boost::make_shared >(orphan); + gttoc(orphans); + + // END OF COPIED CODE // 3. Re-order and eliminate the factor graph into a Bayes net (Algorithm [alg:eliminate]), and re-assemble into a new Bayes tree (Algorithm [alg:BayesTree]) @@ -449,106 +463,60 @@ boost::shared_ptr > ISAM2::recalculate(const FastSet& mark affectedKeysSet->insert(affectedKeys.begin(), affectedKeys.end()); gttoc(list_to_set); - gttic(PartialSolve); - Impl::ReorderingMode reorderingMode; - reorderingMode.nFullSystemVars = ordering_.size(); - reorderingMode.algorithm = Impl::ReorderingMode::COLAMD; - reorderingMode.constrain = Impl::ReorderingMode::CONSTRAIN_LAST; - if(constrainKeys) { - reorderingMode.constrainedKeys = *constrainKeys; - } else { - reorderingMode.constrainedKeys = FastMap(); - BOOST_FOREACH(Index var, observedKeys) { reorderingMode.constrainedKeys->insert(make_pair(var, 1)); } - } - FastSet affectedUsedKeys = *affectedKeysSet; // Remove unused keys from the set we pass to PartialSolve - BOOST_FOREACH(Index unused, unusedIndices) { - affectedUsedKeys.erase(unused); - } - // Remove unaffected keys from the constraints - FastMap::iterator iter = reorderingMode.constrainedKeys->begin(); - while(iter != reorderingMode.constrainedKeys->end()) { - if(affectedUsedKeys.find(iter->first) == affectedUsedKeys.end()) { - reorderingMode.constrainedKeys->erase(iter++); - } else { - ++iter; - } - } - Impl::PartialSolveResult partialSolveResult = - Impl::PartialSolve(factors, affectedUsedKeys, reorderingMode, (params_.factorization == ISAM2Params::QR)); - gttoc(PartialSolve); + VariableIndex affectedFactorsVarIndex(factors); - // We now need to permute everything according this partial reordering: the - // delta vector, the global ordering, and the factors we're about to - // re-eliminate. The reordered variables are also mentioned in the - // orphans and the leftover cached factors. - gttic(permute_global_variable_index); - variableIndex_.permuteInPlace(partialSolveResult.reorderingSelector, partialSolveResult.reorderingPermutation); - gttoc(permute_global_variable_index); - gttic(permute_delta); - delta_.permuteInPlace(partialSolveResult.reorderingSelector, partialSolveResult.reorderingPermutation); - deltaNewton_.permuteInPlace(partialSolveResult.reorderingSelector, partialSolveResult.reorderingPermutation); - RgProd_.permuteInPlace(partialSolveResult.reorderingSelector, partialSolveResult.reorderingPermutation); - gttoc(permute_delta); - gttic(permute_ordering); - ordering_.permuteInPlace(partialSolveResult.reorderingSelector, partialSolveResult.reorderingPermutation); - gttoc(permute_ordering); - if(params_.cacheLinearizedFactors) { - gttic(permute_cached_linear); - //linearFactors_.permuteWithInverse(partialSolveResult.fullReorderingInverse); - FastList permuteLinearIndices = getAffectedFactors(affectedAndNewKeys); - BOOST_FOREACH(size_t idx, permuteLinearIndices) { - linearFactors_[idx]->reduceWithInverse(partialSolveResult.reorderingInverse); - } - gttoc(permute_cached_linear); + gttic(ordering_constraints); + // Create ordering constraints + FastMap constraintGroups; + if(constrainKeys) { + constraintGroups = *constrainKeys; + } else { + constraintGroups = FastMap(); + const int group = observedKeys.size() < affectedFactorsVarIndex.size() + ? 1 : 0; + BOOST_FOREACH(Key var, observedKeys) + constraintGroups.insert(make_pair(var, group)); } + // Remove unaffected keys from the constraints + for(FastMap::iterator iter = constraintGroups.begin(); iter != constraintGroups.end(); ++iter) { + if(unusedIndices.exists(iter->first) || !affectedKeysSet->exists(iter->first)) + constraintGroups.erase(iter); + } + gttoc(ordering_constraints); + + // Generate ordering + gttic(Ordering); + Ordering ordering = Ordering::COLAMDConstrained(affectedFactorsVarIndex, constraintGroups); + + ISAM2BayesTree::shared_ptr bayesTree = ISAM2JunctionTree(GaussianEliminationTree( + factors, affectedFactorsVarIndex, ordering)).eliminate(params_.getEliminationFunction()).first; + gttoc(PartialSolve); + gttoc(reorder_and_eliminate); gttic(reassemble); - if(partialSolveResult.bayesTree) { - assert(!this->root_); - this->insert(partialSolveResult.bayesTree); - } + this->roots_.insert(this->roots_.end(), bayesTree->roots().begin(), bayesTree->roots().end()); + this->nodes_.insert(bayesTree->nodes().begin(), bayesTree->nodes().end()); gttoc(reassemble); - // 4. Insert the orphans back into the new Bayes tree. - gttic(orphans); - gttic(permute); - BOOST_FOREACH(sharedClique orphan, orphans) { - (void)orphan->reduceSeparatorWithInverse(partialSolveResult.reorderingInverse); - } - gttoc(permute); - gttic(insert); - // add orphans to the bottom of the new tree - BOOST_FOREACH(sharedClique orphan, orphans) { - // Because the affectedKeysSelector is sorted, the orphan separator keys - // will be sorted correctly according to the new elimination order after - // applying the permutation, so findParentClique, which looks for the - // lowest-ordered parent, will still work. - Index parentRepresentative = Base::findParentClique((*orphan)->parents()); - sharedClique parent = (*this)[parentRepresentative]; - parent->children_ += orphan; - orphan->parent_ = parent; // set new parent! - } - gttoc(insert); - gttoc(orphans); + // 4. The orphans have already been inserted during elimination gttoc(incremental); } // Root clique variables for detailed results - if(params_.enableDetailedResults) { - BOOST_FOREACH(Index index, this->root()->conditional()->frontals()) { - result.detail->variableStatus[ordering_.key(index)].inRootClique = true; - } - } + if(params_.enableDetailedResults) + BOOST_FOREACH(const sharedNode& root, this->roots()) + BOOST_FOREACH(Key var, *root->conditional()) + result.detail->variableStatus[var].inRootClique = true; return affectedKeysSet; } /* ************************************************************************* */ ISAM2Result ISAM2::update( - const NonlinearFactorGraph& newFactors, const Values& newTheta, const FastVector& removeFactorIndices, + const NonlinearFactorGraph& newFactors, const Values& newTheta, const vector& removeFactorIndices, const boost::optional >& constrainedKeys, const boost::optional >& noRelinKeys, const boost::optional >& extraReelimKeys, bool force_relinearize) { @@ -601,7 +569,7 @@ ISAM2Result ISAM2::update( } // Remove removed factors from the variable index so we do not attempt to relinearize them - variableIndex_.remove(removeFactorIndices, *removeFactors.symbolic(ordering_)); + variableIndex_.remove(removeFactorIndices.begin(), removeFactorIndices.end(), removeFactors); // Compute unused keys and indices FastSet unusedKeys; @@ -611,7 +579,7 @@ ISAM2Result ISAM2::update( // i.e., keys that are empty now and do not appear in the new factors. FastSet removedAndEmpty; BOOST_FOREACH(Key key, removeFactors.keys()) { - if(variableIndex_[ordering_[key]].empty()) + if(variableIndex_[key].empty()) removedAndEmpty.insert(removedAndEmpty.end(), key); } FastSet newFactorSymbKeys = newFactors.keys(); @@ -620,14 +588,14 @@ ISAM2Result ISAM2::update( // Get indices for unused keys BOOST_FOREACH(Key key, unusedKeys) { - unusedIndices.insert(unusedIndices.end(), ordering_[key]); + unusedIndices.insert(unusedIndices.end(), key); } } gttoc(push_back_factors); gttic(add_new_variables); // 2. Initialize any new variables \Theta_{new} and add \Theta:=\Theta\cup\Theta_{new}. - Impl::AddVariables(newTheta, theta_, delta_, deltaNewton_, RgProd_, deltaReplacedMask_, ordering_); + Impl::AddVariables(newTheta, theta_, delta_, deltaNewton_, RgProd_); // New keys for detailed results if(params_.enableDetailedResults) { BOOST_FOREACH(Key key, newTheta.keys()) { result.detail->variableStatus[key].isNew = true; } } @@ -640,29 +608,29 @@ ISAM2Result ISAM2::update( gttic(gather_involved_keys); // 3. Mark linear update - FastSet markedKeys = Impl::IndicesFromFactors(ordering_, newFactors); // Get keys from new factors + FastSet markedKeys = newFactors.keys(); // Get keys from new factors // Also mark keys involved in removed factors { - FastSet markedRemoveKeys = Impl::IndicesFromFactors(ordering_, removeFactors); // Get keys involved in removed factors + FastSet markedRemoveKeys = removeFactors.keys(); // Get keys involved in removed factors markedKeys.insert(markedRemoveKeys.begin(), markedRemoveKeys.end()); // Add to the overall set of marked keys } // Also mark any provided extra re-eliminate keys if(extraReelimKeys) { BOOST_FOREACH(Key key, *extraReelimKeys) { - markedKeys.insert(ordering_.at(key)); + markedKeys.insert(key); } } // Observed keys for detailed results if(params_.enableDetailedResults) { - BOOST_FOREACH(Index index, markedKeys) { - result.detail->variableStatus[ordering_.key(index)].isObserved = true; + BOOST_FOREACH(Key key, markedKeys) { + result.detail->variableStatus[key].isObserved = true; } } // NOTE: we use assign instead of the iterator constructor here because this // is a vector of size_t, so the constructor unintentionally resolves to // vector(size_t count, Index value) instead of the iterator constructor. - FastVector observedKeys; observedKeys.reserve(markedKeys.size()); + vector observedKeys; observedKeys.reserve(markedKeys.size()); BOOST_FOREACH(Index index, markedKeys) { if(unusedIndices.find(index) == unusedIndices.end()) // Only add if not unused observedKeys.push_back(index); // Make a copy of these, as we'll soon add to them @@ -675,47 +643,50 @@ ISAM2Result ISAM2::update( gttic(gather_relinearize_keys); // 4. Mark keys in \Delta above threshold \beta: J=\{\Delta_{j}\in\Delta|\Delta_{j}\geq\beta\}. if(params_.enablePartialRelinearizationCheck) - relinKeys = Impl::CheckRelinearizationPartial(root_, delta_, ordering_, params_.relinearizeThreshold); + relinKeys = Impl::CheckRelinearizationPartial(roots_, delta_, params_.relinearizeThreshold); else - relinKeys = Impl::CheckRelinearizationFull(delta_, ordering_, params_.relinearizeThreshold); - if(disableReordering) relinKeys = Impl::CheckRelinearizationFull(delta_, ordering_, 0.0); // This is used for debugging + relinKeys = Impl::CheckRelinearizationFull(delta_, params_.relinearizeThreshold); + if(disableReordering) relinKeys = Impl::CheckRelinearizationFull(delta_, 0.0); // This is used for debugging // Remove from relinKeys any keys whose linearization points are fixed BOOST_FOREACH(Key key, fixedVariables_) { - relinKeys.erase(ordering_[key]); + relinKeys.erase(key); } if(noRelinKeys) { BOOST_FOREACH(Key key, *noRelinKeys) { - relinKeys.erase(ordering_[key]); + relinKeys.erase(key); } } // Above relin threshold keys for detailed results if(params_.enableDetailedResults) { - BOOST_FOREACH(Index index, relinKeys) { - result.detail->variableStatus[ordering_.key(index)].isAboveRelinThreshold = true; - result.detail->variableStatus[ordering_.key(index)].isRelinearized = true; } } + BOOST_FOREACH(Key key, relinKeys) { + result.detail->variableStatus[key].isAboveRelinThreshold = true; + result.detail->variableStatus[key].isRelinearized = true; } } // Add the variables being relinearized to the marked keys - vector markedRelinMask(ordering_.size(), false); - BOOST_FOREACH(const Index j, relinKeys) { markedRelinMask[j] = true; } + FastSet markedRelinMask; + BOOST_FOREACH(const Key key, relinKeys) + markedRelinMask.insert(key); markedKeys.insert(relinKeys.begin(), relinKeys.end()); gttoc(gather_relinearize_keys); gttic(fluid_find_all); // 5. Mark all cliques that involve marked variables \Theta_{J} and all their ancestors. - if (!relinKeys.empty() && this->root()) { - // add other cliques that have the marked ones in the separator - Impl::FindAll(this->root(), markedKeys, markedRelinMask); + if (!relinKeys.empty()) { + BOOST_FOREACH(const sharedClique& root, roots_) + // add other cliques that have the marked ones in the separator + Impl::FindAll(root, markedKeys, markedRelinMask); // Relin involved keys for detailed results if(params_.enableDetailedResults) { FastSet involvedRelinKeys; - Impl::FindAll(this->root(), involvedRelinKeys, markedRelinMask); - BOOST_FOREACH(Index index, involvedRelinKeys) { - if(!result.detail->variableStatus[ordering_.key(index)].isAboveRelinThreshold) { - result.detail->variableStatus[ordering_.key(index)].isRelinearizeInvolved = true; - result.detail->variableStatus[ordering_.key(index)].isRelinearized = true; } } + BOOST_FOREACH(const sharedClique& root, roots_) + Impl::FindAll(root, involvedRelinKeys, markedRelinMask); + BOOST_FOREACH(Key key, involvedRelinKeys) { + if(!result.detail->variableStatus[key].isAboveRelinThreshold) { + result.detail->variableStatus[key].isRelinearizeInvolved = true; + result.detail->variableStatus[key].isRelinearized = true; } } } } gttoc(fluid_find_all); @@ -723,7 +694,7 @@ ISAM2Result ISAM2::update( gttic(expmap); // 6. Update linearization point for marked variables: \Theta_{J}:=\Theta_{J}+\Delta_{J}. if (!relinKeys.empty()) - Impl::ExpmapMasked(theta_, delta_, ordering_, markedRelinMask, delta_); + Impl::ExpmapMasked(theta_, delta_, markedRelinMask, delta_); gttoc(expmap); result.variablesRelinearized = markedKeys.size(); @@ -735,39 +706,29 @@ ISAM2Result ISAM2::update( // 7. Linearize new factors if(params_.cacheLinearizedFactors) { gttic(linearize); - FactorGraph::shared_ptr linearFactors = newFactors.linearize(theta_, ordering_); + GaussianFactorGraph::shared_ptr linearFactors = newFactors.linearize(theta_); linearFactors_.push_back(*linearFactors); assert(nonlinearFactors_.size() == linearFactors_.size()); gttoc(linearize); gttic(augment_VI); // Augment the variable index with the new factors - variableIndex_.augment(*linearFactors); + variableIndex_.augment(*linearFactors); // TODO: move this to a better place now gttoc(augment_VI); } else { - variableIndex_.augment(*newFactors.symbolic(ordering_)); + variableIndex_.augment(newFactors); } gttoc(linearize_new); gttic(recalculate); // 8. Redo top of Bayes tree - // Convert constrained symbols to indices - boost::optional > constrainedIndices; - if(constrainedKeys) { - constrainedIndices = FastMap(); - typedef pair Key_Group; - BOOST_FOREACH(Key_Group key_group, *constrainedKeys) { - constrainedIndices->insert(make_pair(ordering_[key_group.first], key_group.second)); - } - } - boost::shared_ptr > replacedKeys; + boost::shared_ptr > replacedKeys; if(!markedKeys.empty() || !observedKeys.empty()) - replacedKeys = recalculate(markedKeys, relinKeys, observedKeys, unusedIndices, constrainedIndices, result); + replacedKeys = recalculate(markedKeys, relinKeys, observedKeys, unusedIndices, constrainedKeys, result); // Update replaced keys mask (accumulates until back-substitution takes place) - if(replacedKeys) { - BOOST_FOREACH(const Index var, *replacedKeys) { - deltaReplacedMask_[var] = true; } } + if(replacedKeys) + deltaReplacedMask_.insert(replacedKeys->begin(), replacedKeys->end()); gttoc(recalculate); // After the top of the tree has been redone and may have index gaps from @@ -775,8 +736,8 @@ ISAM2Result ISAM2::update( // in all data structures. if(!unusedKeys.empty()) { gttic(remove_variables); - Impl::RemoveVariables(unusedKeys, root_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_, - deltaReplacedMask_, ordering_, Base::nodes_, linearFactors_, fixedVariables_); + Impl::RemoveVariables(unusedKeys, roots_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_, + deltaReplacedMask_, Base::nodes_, fixedVariables_); gttoc(remove_variables); } result.cliques = this->nodes().size(); @@ -792,28 +753,24 @@ ISAM2Result ISAM2::update( } /* ************************************************************************* */ -void ISAM2::marginalizeLeaves(const FastList& leafKeys, boost::optional&> marginalFactorsIndices, - boost::optional&> deletedFactorsIndices) +void ISAM2::marginalizeLeaves(const FastList& leafKeysList) { - // Convert set of keys into a set of indices - FastSet indices; - BOOST_FOREACH(Key key, leafKeys) { - indices.insert(ordering_[key]); - } + // Convert to ordered set + FastSet leafKeys(leafKeysList.begin(), leafKeysList.end()); // Keep track of marginal factors - map from clique to the marginal factors // that should be incorporated into it, passed up from it's children. multimap marginalFactors; // Remove each variable and its subtrees - BOOST_REVERSE_FOREACH(Index j, indices) { - if(nodes_[j]) { // If the index was not already removed by removing another subtree + BOOST_REVERSE_FOREACH(Key j, leafKeys) { + if(nodes_.exists(j)) { // If the index was not already removed by removing another subtree sharedClique clique = nodes_[j]; // See if we should remove the whole clique bool marginalizeEntireClique = true; - BOOST_FOREACH(Index frontal, clique->conditional()->frontals()) { - if(indices.find(frontal) == indices.end()) { + BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) { + if(!leafKeys.exists(frontal)) { marginalizeEntireClique = false; break; } } @@ -831,8 +788,8 @@ void ISAM2::marginalizeLeaves(const FastList& leafKeys, boost::optionalremoveSubtree(clique); // Remove the subtree and throw away the cliques BOOST_FOREACH(const sharedClique& removedClique, removedCliques) { marginalFactors.erase(removedClique); - BOOST_FOREACH(Index indexInClique, removedClique->conditional()->frontals()) { - if(indices.find(indexInClique) == indices.end()) + BOOST_FOREACH(Key frontal, removedClique->conditional()->frontals()) { + if(!leafKeys.exists(frontal)) throw runtime_error("Requesting to marginalize variables that are not leaves, the ISAM2 object is now in an inconsistent state so should no longer be used."); } } } @@ -847,10 +804,10 @@ void ISAM2::marginalizeLeaves(const FastList& leafKeys, boost::optional factorsInSubtreeRoot; Cliques subtreesToRemove; - BOOST_FOREACH(const sharedClique& child, clique->children()) { + BOOST_FOREACH(const sharedClique& child, clique->children) { // Remove subtree if child depends on any marginalized keys - BOOST_FOREACH(Index parentIndex, child->conditional()->parents()) { - if(indices.find(parentIndex) != indices.end()) { + BOOST_FOREACH(Key parent, child->conditional()->parents()) { + if(leafKeys.exists(parent)) { subtreesToRemove.push_back(child); graph.push_back(child->cachedFactor()); // Add child marginal break; @@ -863,28 +820,28 @@ void ISAM2::marginalizeLeaves(const FastList& leafKeys, boost::optionalconditional()->frontals()) { - if(indices.find(indexInClique) == indices.end()) + BOOST_FOREACH(Key frontal, removedClique->conditional()->frontals()) { + if(!leafKeys.exists(frontal)) throw runtime_error("Requesting to marginalize variables that are not leaves, the ISAM2 object is now in an inconsistent state so should no longer be used."); } } } // Gather remaining children after we removed marginalized subtrees - vector orphans(clique->children().begin(), clique->children().end()); + vector orphans(clique->children.begin(), clique->children.end()); // Add the factors that are pulled into the current clique by the marginalized variables. // These are the factors that involve *marginalized* frontal variables in this clique // but do not involve frontal variables of any of its children. FastSet factorsFromMarginalizedInClique; - BOOST_FOREACH(Index indexInClique, clique->conditional()->frontals()) { - if(indices.find(indexInClique) != indices.end()) - factorsFromMarginalizedInClique.insert(variableIndex_[indexInClique].begin(), variableIndex_[indexInClique].end()); } + BOOST_FOREACH(Key frontal, clique->conditional()->frontals()) { + if(leafKeys.exists(frontal)) + factorsFromMarginalizedInClique.insert(variableIndex_[frontal].begin(), variableIndex_[frontal].end()); } BOOST_FOREACH(const sharedClique& removedChild, childrenRemoved) { BOOST_FOREACH(Index indexInClique, removedChild->conditional()->frontals()) { BOOST_FOREACH(size_t factorInvolving, variableIndex_[indexInClique]) { factorsFromMarginalizedInClique.erase(factorInvolving); } } } BOOST_FOREACH(size_t i, factorsFromMarginalizedInClique) { - graph.push_back(nonlinearFactors_[i]->linearize(theta_, ordering_)); } + graph.push_back(nonlinearFactors_[i]->linearize(theta_)); } // Remove the current clique sharedClique parent = clique->parent(); @@ -893,34 +850,31 @@ void ISAM2::marginalizeLeaves(const FastList& leafKeys, boost::optional cliqueFrontals(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals()); FastSet cliqueFrontalsToEliminate; - std::set_intersection(cliqueFrontals.begin(), cliqueFrontals.end(), indices.begin(), indices.end(), + std::set_intersection(cliqueFrontals.begin(), cliqueFrontals.end(), leafKeys.begin(), leafKeys.end(), std::inserter(cliqueFrontalsToEliminate, cliqueFrontalsToEliminate.end())); vector cliqueFrontalsToEliminateV(cliqueFrontalsToEliminate.begin(), cliqueFrontalsToEliminate.end()); - pair eliminationResult1 = - graph.eliminate(cliqueFrontalsToEliminateV, - params_.factorization==ISAM2Params::QR ? EliminateQR : EliminatePreferCholesky); + pair eliminationResult1 = + params_.getEliminationFunction()(graph, Ordering(cliqueFrontalsToEliminateV)); // Add the resulting marginal - BOOST_FOREACH(const GaussianFactor::shared_ptr& marginal, eliminationResult1.second) { - if(marginal) - marginalFactors.insert(make_pair(clique, marginal)); } + if(eliminationResult1.second) + marginalFactors.insert(make_pair(clique, eliminationResult1.second)); // Recover the conditional on the remaining subset of frontal variables - // of this clique being martially marginalized. - size_t nToEliminate = std::find(clique->conditional()->beginFrontals(), clique->conditional()->endFrontals(), j) - clique->conditional()->begin() + 1; + // of this clique being partially marginalized. + GaussianConditional::iterator jPosition = std::find( + clique->conditional()->beginFrontals(), clique->conditional()->endFrontals(), j); GaussianFactorGraph graph2; - graph2.push_back(clique->conditional()->toFactor()); + graph2.push_back(clique->conditional()); GaussianFactorGraph::EliminationResult eliminationResult2 = - params_.factorization == ISAM2Params::QR ? - EliminateQR(graph2, nToEliminate) : - EliminatePreferCholesky(graph2, nToEliminate); + params_.getEliminationFunction()(graph2, Ordering( + clique->conditional()->beginFrontals(), jPosition + 1)); GaussianFactorGraph graph3; graph3.push_back(eliminationResult2.second); GaussianFactorGraph::EliminationResult eliminationResult3 = - params_.factorization == ISAM2Params::QR ? - EliminateQR(graph3, clique->conditional()->nrFrontals() - nToEliminate) : - EliminatePreferCholesky(graph3, clique->conditional()->nrFrontals() - nToEliminate); - sharedClique newClique = boost::make_shared(make_pair(eliminationResult3.first, clique->cachedFactor())); + params_.getEliminationFunction()(graph3, Ordering(jPosition + 1, clique->conditional()->endFrontals())); + sharedClique newClique = boost::make_shared(); + newClique->setEliminationResult(make_pair(eliminationResult3.first, clique->cachedFactor())); // Add the marginalized clique to the BayesTree this->addClique(newClique, parent); @@ -938,40 +892,35 @@ void ISAM2::marginalizeLeaves(const FastList& leafKeys, boost::optional Clique_Factor; BOOST_FOREACH(const Clique_Factor& clique_factor, marginalFactors) { - if(clique_factor.second) { + if(clique_factor.second) factorsToAdd.push_back(clique_factor.second); - if(marginalFactorsIndices) marginalFactorsIndices->push_back(nonlinearFactors_.size()); - nonlinearFactors_.push_back(boost::make_shared( - clique_factor.second, ordering_)); - if(params_.cacheLinearizedFactors) { - linearFactors_.push_back(clique_factor.second); - } - BOOST_FOREACH(Index factorIndex, *clique_factor.second) { - fixedVariables_.insert(ordering_.key(factorIndex)); - } - } + nonlinearFactors_.push_back(boost::make_shared( + clique_factor.second)); + if(params_.cacheLinearizedFactors) + linearFactors_.push_back(clique_factor.second); + BOOST_FOREACH(Key factorKey, *clique_factor.second) { + fixedVariables_.insert(factorKey); } } variableIndex_.augment(factorsToAdd); // Augment the variable index // Remove the factors to remove that have been summarized in the newly-added marginal factors FastSet factorIndicesToRemove; - BOOST_FOREACH(Index j, indices) { + BOOST_FOREACH(Key j, leafKeys) { factorIndicesToRemove.insert(variableIndex_[j].begin(), variableIndex_[j].end()); } - vector removedFactorsIndices; - SymbolicFactorGraph removedFactors; + vector removedFactorIndices; + NonlinearFactorGraph removedFactors; BOOST_FOREACH(size_t i, factorIndicesToRemove) { - if(deletedFactorsIndices) deletedFactorsIndices->push_back(i); - removedFactorsIndices.push_back(i); - removedFactors.push_back(nonlinearFactors_[i]->symbolic(ordering_)); + removedFactorIndices.push_back(i); + removedFactors.push_back(nonlinearFactors_[i]); nonlinearFactors_.remove(i); if(params_.cacheLinearizedFactors) linearFactors_.remove(i); } - variableIndex_.remove(removedFactorsIndices, removedFactors); + variableIndex_.remove(removedFactorIndices.begin(), removedFactorIndices.end(), removedFactors); // Remove the marginalized variables - Impl::RemoveVariables(FastSet(leafKeys.begin(), leafKeys.end()), root_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_, - deltaReplacedMask_, ordering_, nodes_, linearFactors_, fixedVariables_); + Impl::RemoveVariables(FastSet(leafKeys.begin(), leafKeys.end()), roots_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_, + deltaReplacedMask_, nodes_, fixedVariables_); } /* ************************************************************************* */ @@ -983,7 +932,7 @@ void ISAM2::updateDelta(bool forceFullSolve) const { boost::get(params_.optimizationParams); const double effectiveWildfireThreshold = forceFullSolve ? 0.0 : gaussNewtonParams.wildfireThreshold; gttic(Wildfire_update); - lastBacksubVariableCount = Impl::UpdateDelta(this->root(), deltaReplacedMask_, delta_, effectiveWildfireThreshold); + lastBacksubVariableCount = Impl::UpdateDelta(roots_, deltaReplacedMask_, delta_, effectiveWildfireThreshold); gttoc(Wildfire_update); } else if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) { @@ -993,8 +942,11 @@ void ISAM2::updateDelta(bool forceFullSolve) const { // Do one Dogleg iteration gttic(Dogleg_Iterate); + VectorValues dx_u = gtsam::optimizeGradientSearch(*this); + VectorValues dx_n = gtsam::optimize(*this); DoglegOptimizerImpl::IterationResult doglegResult(DoglegOptimizerImpl::Iterate( - *doglegDelta_, doglegParams.adaptationMode, *this, nonlinearFactors_, theta_, ordering_, nonlinearFactors_.error(theta_), doglegParams.verbose)); + *doglegDelta_, doglegParams.adaptationMode, dx_u, dx_n, *this, nonlinearFactors_, + theta_, nonlinearFactors_.error(theta_), doglegParams.verbose)); gttoc(Dogleg_Iterate); gttic(Copy_dx_d); @@ -1009,8 +961,6 @@ void ISAM2::updateDelta(bool forceFullSolve) const { /* ************************************************************************* */ Values ISAM2::calculateEstimate() const { - // We use ExpmapMasked here instead of regular expmap because the former - // handles Permuted gttic(Copy_Values); Values ret(theta_); gttoc(Copy_Values); @@ -1018,31 +968,25 @@ Values ISAM2::calculateEstimate() const { const VectorValues& delta(getDelta()); gttoc(getDelta); gttic(Expmap); - vector mask(ordering_.size(), true); - Impl::ExpmapMasked(ret, delta, ordering_, mask); + ret = ret.retract(delta); gttoc(Expmap); return ret; } /* ************************************************************************* */ const Value& ISAM2::calculateEstimate(Key key) const { - const Index index = getOrdering()[key]; - const Vector& delta = getDelta()[index]; + const Vector& delta = getDelta()[key]; return *theta_.at(key).retract_(delta); } /* ************************************************************************* */ Values ISAM2::calculateBestEstimate() const { - VectorValues delta(theta_.dims(ordering_)); - internal::optimizeInPlace(this->root(), delta); - return theta_.retract(delta, ordering_); + return theta_.retract(internal::linearAlgorithms::optimizeBayesTree(*this)); } /* ************************************************************************* */ -Matrix ISAM2::marginalCovariance(Index key) const { - return marginalFactor(ordering_[key], - params_.factorization == ISAM2Params::QR ? EliminateQR : EliminatePreferCholesky) - ->information().inverse(); +Matrix ISAM2::marginalCovariance(Key key) const { + return marginalFactor(key, params_.getEliminationFunction())->information().inverse(); } /* ************************************************************************* */ @@ -1052,11 +996,15 @@ const VectorValues& ISAM2::getDelta() const { return delta_; } +/* ************************************************************************* */ +double ISAM2::error(const VectorValues& x) const +{ + return GaussianFactorGraph(*this).error(x); +} + /* ************************************************************************* */ VectorValues optimize(const ISAM2& isam) { - gttic(allocateVectorValues); - VectorValues delta = *allocateVectorValues(isam); - gttoc(allocateVectorValues); + VectorValues delta; optimizeInPlace(isam, delta); return delta; } @@ -1084,13 +1032,10 @@ void optimizeInPlace(const ISAM2& isam, VectorValues& delta) { } /* ************************************************************************* */ + VectorValues optimizeGradientSearch(const ISAM2& isam) { - gttic(Allocate_VectorValues); - VectorValues grad = *allocateVectorValues(isam); - gttoc(Allocate_VectorValues); - + VectorValues grad; optimizeGradientSearchInPlace(isam, grad); - return grad; } @@ -1119,34 +1064,38 @@ void optimizeGradientSearchInPlace(const ISAM2& isam, VectorValues& grad) { gttic(Compute_minimizing_step_size); // Compute minimizing step size - double RgNormSq = isam.RgProd_.asVector().squaredNorm(); + double RgNormSq = isam.RgProd_.vector().squaredNorm(); double step = -gradientSqNorm / RgNormSq; gttoc(Compute_minimizing_step_size); gttic(Compute_point); // Compute steepest descent point - scal(step, grad); + grad *= step; gttoc(Compute_point); } /* ************************************************************************* */ + VectorValues gradient(const ISAM2& bayesTree, const VectorValues& x0) { - return gradient(FactorGraph(bayesTree), x0); + return GaussianFactorGraph(bayesTree).gradient(x0); } /* ************************************************************************* */ static void gradientAtZeroTreeAdder(const boost::shared_ptr& root, VectorValues& g) { // Loop through variables in each clique, adding contributions - int variablePosition = 0; + DenseIndex variablePosition = 0; for(GaussianConditional::const_iterator jit = root->conditional()->begin(); jit != root->conditional()->end(); ++jit) { - const int dim = root->conditional()->dim(jit); - g[*jit] += root->gradientContribution().segment(variablePosition, dim); + const DenseIndex dim = root->conditional()->getDim(jit); + pair pos_ins = + g.tryInsert(*jit, root->gradientContribution().segment(variablePosition, dim)); + if(!pos_ins.second) + pos_ins.first->second += root->gradientContribution().segment(variablePosition, dim); variablePosition += dim; } // Recursively add contributions from children typedef boost::shared_ptr sharedClique; - BOOST_FOREACH(const sharedClique& child, root->children()) { + BOOST_FOREACH(const sharedClique& child, root->children) { gradientAtZeroTreeAdder(child, g); } } @@ -1157,8 +1106,8 @@ void gradientAtZero(const ISAM2& bayesTree, VectorValues& g) { g.setZero(); // Sum up contributions for each clique - if(bayesTree.root()) - gradientAtZeroTreeAdder(bayesTree.root(), g); + BOOST_FOREACH(const ISAM2::sharedClique& root, bayesTree.roots()) + gradientAtZeroTreeAdder(root, g); } } diff --git a/gtsam/nonlinear/ISAM2.h b/gtsam/nonlinear/ISAM2.h index f3d8a3e20..22322a52c 100644 --- a/gtsam/nonlinear/ISAM2.h +++ b/gtsam/nonlinear/ISAM2.h @@ -232,6 +232,12 @@ struct GTSAM_EXPORT ISAM2Params { Factorization factorizationTranslator(const std::string& str) const; std::string factorizationTranslator(const Factorization& value) const; + + GaussianFactorGraph::Eliminate getEliminationFunction() const { + return factorization == CHOLESKY + ? (GaussianFactorGraph::Eliminate)EliminatePreferCholesky + : (GaussianFactorGraph::Eliminate)EliminateQR; + } }; @@ -342,10 +348,11 @@ struct GTSAM_EXPORT ISAM2Result { * Specialized Clique structure for ISAM2, incorporating caching and gradient contribution * TODO: more documentation */ -class GTSAM_EXPORT ISAM2Clique : public BayesTreeCliqueBase { +class GTSAM_EXPORT ISAM2Clique : public BayesTreeCliqueBase +{ public: typedef ISAM2Clique This; - typedef BayesTreeCliqueBase Base; + typedef BayesTreeCliqueBase Base; typedef boost::shared_ptr shared_ptr; typedef boost::weak_ptr weak_ptr; typedef GaussianConditional ConditionalType; @@ -354,29 +361,8 @@ public: Base::FactorType::shared_ptr cachedFactor_; Vector gradientContribution_; - /** Construct from a conditional */ - ISAM2Clique(const sharedConditional& conditional) : Base(conditional) { - throw std::runtime_error("ISAM2Clique should always be constructed with the elimination result constructor"); } - - /** Construct from an elimination result */ - ISAM2Clique(const std::pair >& result) : - Base(result.first), cachedFactor_(result.second), - gradientContribution_(result.first->get_R().cols() + result.first->get_S().cols()) { - // Compute gradient contribution - const ConditionalType& conditional(*result.first); - // Rewrite -(R * P')'*d as -(d' * R * P')' for computational speed reasons - gradientContribution_ << -conditional.get_R().transpose() * conditional.get_d(), - -conditional.get_S().transpose() * conditional.get_d(); - } - - /** Produce a deep copy, copying the cached factor and gradient contribution */ - shared_ptr clone() const { - shared_ptr copy(new ISAM2Clique(std::make_pair( - sharedConditional(new ConditionalType(*Base::conditional_)), - cachedFactor_ ? cachedFactor_->clone() : Base::FactorType::shared_ptr()))); - copy->gradientContribution_ = gradientContribution_; - return copy; - } + /// Overridden to also store the remaining factor and gradient contribution + void setEliminationResult(const FactorGraphType::EliminationResult& eliminationResult); /** Access the cached factor */ Base::FactorType::shared_ptr& cachedFactor() { return cachedFactor_; } @@ -384,35 +370,10 @@ public: /** Access the gradient contribution */ const Vector& gradientContribution() const { return gradientContribution_; } - bool equals(const This& other, double tol=1e-9) const { - return Base::equals(other) && - ((!cachedFactor_ && !other.cachedFactor_) - || (cachedFactor_ && other.cachedFactor_ - && cachedFactor_->equals(*other.cachedFactor_, tol))); - } + bool equals(const This& other, double tol=1e-9) const; /** print this node */ - void print(const std::string& s = "", - const IndexFormatter& formatter = DefaultIndexFormatter) const { - Base::print(s,formatter); - if(cachedFactor_) - cachedFactor_->print(s + "Cached: ", formatter); - else - std::cout << s << "Cached empty" << std::endl; - if(gradientContribution_.rows() != 0) - gtsam::print(gradientContribution_, "Gradient contribution: "); - } - - void permuteWithInverse(const Permutation& inversePermutation) { - if(cachedFactor_) cachedFactor_->permuteWithInverse(inversePermutation); - Base::permuteWithInverse(inversePermutation); - } - - bool reduceSeparatorWithInverse(const internal::Reduction& inverseReduction) { - bool changed = Base::reduceSeparatorWithInverse(inverseReduction); - if(changed) if(cachedFactor_) cachedFactor_->reduceWithInverse(inverseReduction); - return changed; - } + void print(const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const; private: @@ -436,7 +397,7 @@ private: * estimate of all variables. * */ -class ISAM2: public BayesTree { +class GTSAM_EXPORT ISAM2: public BayesTree { protected: @@ -479,7 +440,7 @@ protected: * This is \c mutable because it is used internally to not update delta_ * until it is needed. */ - mutable std::vector deltaReplacedMask_; + mutable FastSet deltaReplacedMask_; // TODO: Make sure accessed in the right way /** All original nonlinear factors are stored here to use during relinearization */ NonlinearFactorGraph nonlinearFactors_; @@ -487,10 +448,6 @@ protected: /** The current linear factors, which are only updated as needed */ mutable GaussianFactorGraph linearFactors_; - /** The current elimination ordering Symbols to Index (integer) keys. - * We keep it up to date as we add and reorder variables. */ - Ordering ordering_; - /** The current parameters */ ISAM2Params params_; @@ -504,24 +461,20 @@ protected: public: typedef ISAM2 This; ///< This class - typedef BayesTree Base; ///< The BayesTree base class - - /** Create an empty ISAM2 instance */ - GTSAM_EXPORT ISAM2(const ISAM2Params& params); - - /** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */ - GTSAM_EXPORT ISAM2(); - - /** Copy constructor */ - GTSAM_EXPORT ISAM2(const ISAM2& other); - - /** Assignment operator */ - GTSAM_EXPORT ISAM2& operator=(const ISAM2& rhs); - + typedef BayesTree Base; ///< The BayesTree base class typedef Base::Clique Clique; ///< A clique typedef Base::sharedClique sharedClique; ///< Shared pointer to a clique typedef Base::Cliques Cliques; ///< List of Clique typedef from base class + /** Create an empty ISAM2 instance */ + ISAM2(const ISAM2Params& params); + + /** Create an empty ISAM2 instance using the default set of parameters (see ISAM2Params) */ + ISAM2(); + + /** Compare equality */ + bool equals(const ISAM2& other, double tol = 1e-9) const; + /** * Add new factors, updating the solution and relinearizing as needed. * @@ -550,8 +503,8 @@ public: * of the size of the linear delta. This allows the provided keys to be reordered. * @return An ISAM2Result struct containing information about the update */ - GTSAM_EXPORT ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(), - const FastVector& removeFactorIndices = FastVector(), + ISAM2Result update(const NonlinearFactorGraph& newFactors = NonlinearFactorGraph(), const Values& newTheta = Values(), + const std::vector& removeFactorIndices = std::vector(), const boost::optional >& constrainedKeys = boost::none, const boost::optional >& noRelinKeys = boost::none, const boost::optional >& extraReelimKeys = boost::none, @@ -571,18 +524,24 @@ public: * If provided, 'deletedFactorsIndices' will be augmented with the factor graph * indices of any factor that was removed during the 'marginalizeLeaves' call */ - GTSAM_EXPORT void marginalizeLeaves(const FastList& leafKeys, - boost::optional&> marginalFactorsIndices = boost::none, - boost::optional&> deletedFactorsIndices = boost::none); + void marginalizeLeaves(const FastList& leafKeys); /** Access the current linearization point */ const Values& getLinearizationPoint() const { return theta_; } + /// Compute the current solution. This is the "standard" function for computing the solution that + /// uses: + /// - Partial relinearization and backsubstitution using the thresholds provided in ISAM2Params. + /// - Dogleg trust-region step, if enabled in ISAM2Params. + /// - Equivalent to getLinearizationPoint().retract(getDelta()) + /// The solution returned is in general not the same as that returned by getLinearizationPoint(). + Values optimize() const; + /** Compute an estimate from the incomplete linear delta computed during the last update. * This delta is incomplete because it was not updated below wildfire_threshold. If only * a single variable is needed, it is faster to call calculateEstimate(const KEY&). */ - GTSAM_EXPORT Values calculateEstimate() const; + Values calculateEstimate() const; /** Compute an estimate for a single variable using its incomplete linear delta computed * during the last update. This is faster than calling the no-argument version of @@ -600,10 +559,10 @@ public: * @param key * @return */ - GTSAM_EXPORT const Value& calculateEstimate(Key key) const; + const Value& calculateEstimate(Key key) const; /** Return marginal on any variable as a covariance matrix */ - GTSAM_EXPORT Matrix marginalCovariance(Index key) const; + Matrix marginalCovariance(Index key) const; /// @name Public members for non-typical usage /// @{ @@ -613,22 +572,22 @@ public: /** Compute an estimate using a complete delta computed by a full back-substitution. */ - GTSAM_EXPORT Values calculateBestEstimate() const; + Values calculateBestEstimate() const; /** Access the current delta, computed during the last call to update */ - GTSAM_EXPORT const VectorValues& getDelta() const; + const VectorValues& getDelta() const; + + /** Compute the linear error */ + double error(const VectorValues& x) const; /** Access the set of nonlinear factors */ - GTSAM_EXPORT const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; } - - /** Access the current ordering */ - GTSAM_EXPORT const Ordering& getOrdering() const { return ordering_; } + const NonlinearFactorGraph& getFactorsUnsafe() const { return nonlinearFactors_; } /** Access the nonlinear variable index */ - GTSAM_EXPORT const VariableIndex& getVariableIndex() const { return variableIndex_; } + const VariableIndex& getVariableIndex() const { return variableIndex_; } /** Access the nonlinear variable index */ - GTSAM_EXPORT const FastSet& getFixedVariables() const { return fixedVariables_; } + const FastSet& getFixedVariables() const { return fixedVariables_; } size_t lastAffectedVariableCount; size_t lastAffectedFactorCount; @@ -637,26 +596,26 @@ public: mutable size_t lastBacksubVariableCount; size_t lastNnzTop; - GTSAM_EXPORT const ISAM2Params& params() const { return params_; } + const ISAM2Params& params() const { return params_; } /** prints out clique statistics */ - GTSAM_EXPORT void printStats() const { getCliqueData().getStats().print(); } + void printStats() const { getCliqueData().getStats().print(); } /// @} private: - GTSAM_EXPORT FastList getAffectedFactors(const FastList& keys) const; - GTSAM_EXPORT FactorGraph::shared_ptr relinearizeAffectedFactors(const FastList& affectedKeys, const FastSet& relinKeys) const; - GTSAM_EXPORT GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans); + FastList getAffectedFactors(const FastList& keys) const; + GaussianFactorGraph::shared_ptr relinearizeAffectedFactors(const FastList& affectedKeys, const FastSet& relinKeys) const; + GaussianFactorGraph getCachedBoundaryFactors(Cliques& orphans); - GTSAM_EXPORT boost::shared_ptr > recalculate(const FastSet& markedKeys, const FastSet& relinKeys, - const FastVector& observedKeys, const FastSet& unusedIndices, const boost::optional >& constrainKeys, ISAM2Result& result); + boost::shared_ptr > recalculate(const FastSet& markedKeys, const FastSet& relinKeys, + const std::vector& observedKeys, const FastSet& unusedIndices, const boost::optional >& constrainKeys, ISAM2Result& result); // void linear_update(const GaussianFactorGraph& newFactors); - GTSAM_EXPORT void updateDelta(bool forceFullSolve = false) const; + void updateDelta(bool forceFullSolve = false) const; - GTSAM_EXPORT friend void optimizeInPlace(const ISAM2&, VectorValues&); - GTSAM_EXPORT friend void optimizeGradientSearchInPlace(const ISAM2&, VectorValues&); + friend GTSAM_EXPORT void optimizeInPlace(const ISAM2&, VectorValues&); + friend GTSAM_EXPORT void optimizeGradientSearchInPlace(const ISAM2&, VectorValues&); }; // ISAM2 @@ -678,12 +637,12 @@ GTSAM_EXPORT void optimizeInPlace(const ISAM2& isam, VectorValues& delta); /// variables are contained in the subtree. /// @return The number of variables that were solved for template -int optimizeWildfire(const boost::shared_ptr& root, - double threshold, const std::vector& replaced, VectorValues& delta); +size_t optimizeWildfire(const boost::shared_ptr& root, + double threshold, const FastSet& replaced, VectorValues& delta); template -int optimizeWildfireNonRecursive(const boost::shared_ptr& root, - double threshold, const std::vector& replaced, VectorValues& delta); +size_t optimizeWildfireNonRecursive(const boost::shared_ptr& root, + double threshold, const FastSet& replaced, VectorValues& delta); /** * Optimize along the gradient direction, with a closed-form computation to diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp index 8505834b5..bb7e065dc 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.cpp @@ -19,6 +19,8 @@ #include #include +#include +#include #include #include @@ -74,7 +76,7 @@ void LevenbergMarquardtOptimizer::iterate() { gttic(LM_iterate); // Linearize graph - GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values, *params_.ordering); + GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values); // Pull out parameters we'll use const NonlinearOptimizerParams::Verbosity nloVerbosity = params_.verbosity; @@ -88,18 +90,17 @@ void LevenbergMarquardtOptimizer::iterate() { // Add prior-factors // TODO: replace this dampening with a backsubstitution approach gttic(damp); - GaussianFactorGraph dampedSystem(*linear); + GaussianFactorGraph dampedSystem = *linear; { double sigma = 1.0 / std::sqrt(state_.lambda); - dampedSystem.reserve(dampedSystem.size() + dimensions_.size()); + dampedSystem.reserve(dampedSystem.size() + state_.values.size()); // for each of the variables, add a prior - for(Index j=0; j(key_value.key, A, b, model); } } gttoc(damp); @@ -115,7 +116,7 @@ void LevenbergMarquardtOptimizer::iterate() { // update values gttic(retract); - Values newValues = state_.values.retract(delta, *params_.ordering); + Values newValues = state_.values.retract(delta); gttoc(retract); // create new optimization state with more adventurous lambda @@ -143,6 +144,7 @@ void LevenbergMarquardtOptimizer::iterate() { } } } catch(const IndeterminantLinearSystemException& e) { + (void) e; // Prevent unused variable warning if(lmVerbosity >= LevenbergMarquardtParams::LAMBDA) cout << "Negative matrix, increasing lambda" << endl; // Either we're not cautious, or the same lambda was worse than the current error. @@ -167,4 +169,14 @@ void LevenbergMarquardtOptimizer::iterate() { ++state_.iterations; } +/* ************************************************************************* */ +LevenbergMarquardtParams LevenbergMarquardtOptimizer::ensureHasOrdering( + LevenbergMarquardtParams params, const NonlinearFactorGraph& graph) const +{ + if(!params.ordering) + params.ordering = Ordering::COLAMD(graph); + return params; +} + } /* namespace gtsam */ + diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h index 5c769ea7b..46db598d8 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h @@ -94,7 +94,6 @@ class GTSAM_EXPORT LevenbergMarquardtOptimizer : public NonlinearOptimizer { protected: LevenbergMarquardtParams params_; ///< LM parameters LevenbergMarquardtState state_; ///< optimization state - std::vector dimensions_; ///< undocumented public: typedef boost::shared_ptr shared_ptr; @@ -112,8 +111,8 @@ public: */ LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const Values& initialValues, const LevenbergMarquardtParams& params = LevenbergMarquardtParams()) : - NonlinearOptimizer(graph), params_(ensureHasOrdering(params, graph, initialValues)), - state_(graph, initialValues, params_), dimensions_(initialValues.dims(*params_.ordering)) {} + NonlinearOptimizer(graph), params_(ensureHasOrdering(params, graph)), + state_(graph, initialValues, params_) {} /** Standard constructor, requires a nonlinear factor graph, initial * variable assignments, and optimization parameters. For convenience this @@ -123,7 +122,7 @@ public: * @param initialValues The initial variable assignments */ LevenbergMarquardtOptimizer(const NonlinearFactorGraph& graph, const Values& initialValues, const Ordering& ordering) : - NonlinearOptimizer(graph), dimensions_(initialValues.dims(ordering)) { + NonlinearOptimizer(graph) { params_.ordering = ordering; state_ = LevenbergMarquardtState(graph, initialValues, params_); } @@ -172,11 +171,7 @@ protected: virtual const NonlinearOptimizerState& _state() const { return state_; } /** Internal function for computing a COLAMD ordering if no ordering is specified */ - LevenbergMarquardtParams ensureHasOrdering(LevenbergMarquardtParams params, const NonlinearFactorGraph& graph, const Values& values) const { - if(!params.ordering) - params.ordering = *graph.orderingCOLAMD(values); - return params; - } + LevenbergMarquardtParams ensureHasOrdering(LevenbergMarquardtParams params, const NonlinearFactorGraph& graph) const; }; } diff --git a/gtsam/nonlinear/LinearContainerFactor.cpp b/gtsam/nonlinear/LinearContainerFactor.cpp index afbc4cfc7..6fda8e7ed 100644 --- a/gtsam/nonlinear/LinearContainerFactor.cpp +++ b/gtsam/nonlinear/LinearContainerFactor.cpp @@ -6,20 +6,15 @@ */ #include +#include +#include +#include +#include #include namespace gtsam { -/* ************************************************************************* */ -void LinearContainerFactor::rekeyFactor(const Ordering& ordering) { - BOOST_FOREACH(Index& idx, factor_->keys()) { - Key fullKey = ordering.key(idx); - idx = fullKey; - keys_.push_back(fullKey); - } -} - /* ************************************************************************* */ void LinearContainerFactor::initializeLinearizationPoint(const Values& linearizationPoint) { if (!linearizationPoint.empty()) { @@ -35,48 +30,27 @@ void LinearContainerFactor::initializeLinearizationPoint(const Values& lineariza /* ************************************************************************* */ LinearContainerFactor::LinearContainerFactor(const GaussianFactor::shared_ptr& factor, const boost::optional& linearizationPoint) -: factor_(factor), linearizationPoint_(linearizationPoint) { - // Extract keys stashed in linear factor - BOOST_FOREACH(const Index& idx, factor_->keys()) - keys_.push_back(idx); +: NonlinearFactor(factor->keys()), factor_(factor), linearizationPoint_(linearizationPoint) { } /* ************************************************************************* */ LinearContainerFactor::LinearContainerFactor( - const JacobianFactor& factor, const Ordering& ordering, - const Values& linearizationPoint) -: factor_(factor.clone()) { - rekeyFactor(ordering); + const JacobianFactor& factor, const Values& linearizationPoint) +: NonlinearFactor(factor.keys()), factor_(factor.clone()) { initializeLinearizationPoint(linearizationPoint); } /* ************************************************************************* */ LinearContainerFactor::LinearContainerFactor( - const HessianFactor& factor, const Ordering& ordering, - const Values& linearizationPoint) -: factor_(factor.clone()) { - rekeyFactor(ordering); + const HessianFactor& factor, const Values& linearizationPoint) +: NonlinearFactor(factor.keys()), factor_(factor.clone()) { initializeLinearizationPoint(linearizationPoint); } /* ************************************************************************* */ LinearContainerFactor::LinearContainerFactor( - const GaussianFactor::shared_ptr& factor, const Ordering& ordering, - const Values& linearizationPoint) -: factor_(factor->clone()) { - rekeyFactor(ordering); - initializeLinearizationPoint(linearizationPoint); -} - -/* ************************************************************************* */ -LinearContainerFactor::LinearContainerFactor( - const GaussianFactor::shared_ptr& factor, - const Values& linearizationPoint) -: factor_(factor->clone()) -{ - // Extract keys stashed in linear factor - BOOST_FOREACH(const Index& idx, factor_->keys()) - keys_.push_back(idx); + const GaussianFactor::shared_ptr& factor, const Values& linearizationPoint) +: NonlinearFactor(factor->keys()), factor_(factor->clone()) { initializeLinearizationPoint(linearizationPoint); } @@ -112,19 +86,11 @@ double LinearContainerFactor::error(const Values& c) const { csub.insert(key, c.at(key)); // create dummy ordering for evaluation - Ordering ordering = *csub.orderingArbitrary(); - VectorValues delta = linearizationPoint_->localCoordinates(csub, ordering); - - // Change keys on stored factor - BOOST_FOREACH(gtsam::Index& index, factor_->keys()) - index = ordering[index]; + VectorValues delta = linearizationPoint_->localCoordinates(csub); // compute error double error = factor_->error(delta); - // change keys back - factor_->keys() = keys(); - return error; } @@ -137,36 +103,20 @@ size_t LinearContainerFactor::dim() const { } /* ************************************************************************* */ -GaussianFactor::shared_ptr LinearContainerFactor::order(const Ordering& ordering) const { - // clone factor - boost::shared_ptr result = factor_->clone(); - - // rekey - BOOST_FOREACH(Index& key, result->keys()) - key = ordering[key]; - - return result; -} - -/* ************************************************************************* */ -GaussianFactor::shared_ptr LinearContainerFactor::linearize( - const Values& c, const Ordering& ordering) const { +GaussianFactor::shared_ptr LinearContainerFactor::linearize(const Values& c) const { if (!hasLinearizationPoint()) - return order(ordering); + return factor_; // Extract subset of values Values subsetC; BOOST_FOREACH(const gtsam::Key& key, this->keys()) subsetC.insert(key, c.at(key)); - // Create a temp ordering for this factor - Ordering localOrdering = *subsetC.orderingArbitrary(); - // Determine delta between linearization points using new ordering - VectorValues delta = linearizationPoint_->localCoordinates(subsetC, localOrdering); + VectorValues delta = linearizationPoint_->localCoordinates(subsetC); // clone and reorder linear factor to final ordering - GaussianFactor::shared_ptr linFactor = order(localOrdering); + GaussianFactor::shared_ptr linFactor = factor_->clone(); if (isJacobian()) { JacobianFactor::shared_ptr jacFactor = boost::dynamic_pointer_cast(linFactor); jacFactor->getb() = -jacFactor->unweighted_error(delta); @@ -174,26 +124,23 @@ GaussianFactor::shared_ptr LinearContainerFactor::linearize( HessianFactor::shared_ptr hesFactor = boost::dynamic_pointer_cast(linFactor); size_t dim = hesFactor->linearTerm().size(); Eigen::Block Gview = hesFactor->info().block(0, 0, dim, dim); - Vector deltaVector = delta.asVector(); + Vector deltaVector = delta.vector(); Vector G_delta = Gview.selfadjointView() * deltaVector; hesFactor->constantTerm() += deltaVector.dot(G_delta) - 2.0 * deltaVector.dot(hesFactor->linearTerm()); hesFactor->linearTerm() -= G_delta; } - // reset ordering - BOOST_FOREACH(Index& idx, linFactor->keys()) - idx = ordering[localOrdering.key(idx)]; return linFactor; } /* ************************************************************************* */ bool LinearContainerFactor::isJacobian() const { - return boost::dynamic_pointer_cast(factor_); + return dynamic_cast(factor_.get()) != 0; } /* ************************************************************************* */ bool LinearContainerFactor::isHessian() const { - return boost::dynamic_pointer_cast(factor_); + return dynamic_cast(factor_.get()) != 0; } /* ************************************************************************* */ @@ -207,33 +154,29 @@ HessianFactor::shared_ptr LinearContainerFactor::toHessian() const { } /* ************************************************************************* */ -GaussianFactor::shared_ptr LinearContainerFactor::negate(const Ordering& ordering) const { +GaussianFactor::shared_ptr LinearContainerFactor::negateToGaussian() const { GaussianFactor::shared_ptr result = factor_->negate(); - BOOST_FOREACH(Key& key, result->keys()) - key = ordering[key]; return result; } /* ************************************************************************* */ -NonlinearFactor::shared_ptr LinearContainerFactor::negate() const { +NonlinearFactor::shared_ptr LinearContainerFactor::negateToNonlinear() const { GaussianFactor::shared_ptr antifactor = factor_->negate(); // already has keys in place - return NonlinearFactor::shared_ptr(new LinearContainerFactor(antifactor,linearizationPoint_)); + return NonlinearFactor::shared_ptr(new LinearContainerFactor(antifactor, linearizationPoint_)); } /* ************************************************************************* */ NonlinearFactorGraph LinearContainerFactor::convertLinearGraph( - const GaussianFactorGraph& linear_graph, const Ordering& ordering, - const Values& linearizationPoint) { + const GaussianFactorGraph& linear_graph, const Values& linearizationPoint) +{ NonlinearFactorGraph result; BOOST_FOREACH(const GaussianFactor::shared_ptr& f, linear_graph) if (f) result.push_back(NonlinearFactorGraph::sharedFactor( - new LinearContainerFactor(f, ordering, linearizationPoint))); + new LinearContainerFactor(f, linearizationPoint))); return result; } /* ************************************************************************* */ } // \namespace gtsam - - diff --git a/gtsam/nonlinear/LinearContainerFactor.h b/gtsam/nonlinear/LinearContainerFactor.h index cc87e278a..379821cfe 100644 --- a/gtsam/nonlinear/LinearContainerFactor.h +++ b/gtsam/nonlinear/LinearContainerFactor.h @@ -9,11 +9,15 @@ #pragma once -#include #include + namespace gtsam { + // Forward declarations + class JacobianFactor; + class HessianFactor; + /** * Dummy version of a generic linear factor to be injected into a nonlinear factor graph * @@ -30,26 +34,18 @@ protected: LinearContainerFactor() {} /** direct copy constructor */ - LinearContainerFactor(const GaussianFactor::shared_ptr& factor, - const boost::optional& linearizationPoint); + LinearContainerFactor(const GaussianFactor::shared_ptr& factor, const boost::optional& linearizationPoint); public: - /** Primary constructor: store a linear factor and decode the ordering */ - LinearContainerFactor(const JacobianFactor& factor, const Ordering& ordering, - const Values& linearizationPoint = Values()); + /** Primary constructor: store a linear factor with optional linearization point */ + LinearContainerFactor(const JacobianFactor& factor, const Values& linearizationPoint = Values()); - /** Primary constructor: store a linear factor and decode the ordering */ - LinearContainerFactor(const HessianFactor& factor, const Ordering& ordering, - const Values& linearizationPoint = Values()); + /** Primary constructor: store a linear factor with optional linearization point */ + LinearContainerFactor(const HessianFactor& factor, const Values& linearizationPoint = Values()); /** Constructor from shared_ptr */ - LinearContainerFactor(const GaussianFactor::shared_ptr& factor, const Ordering& ordering, - const Values& linearizationPoint = Values()); - - /** Constructor from re-keyed factor: all indices assumed replaced with Key */ - LinearContainerFactor(const GaussianFactor::shared_ptr& factor, - const Values& linearizationPoint = Values()); + LinearContainerFactor(const GaussianFactor::shared_ptr& factor, const Values& linearizationPoint = Values()); // Access @@ -81,9 +77,6 @@ public: /** Extract the linearization point used in recalculating error */ const boost::optional& linearizationPoint() const { return linearizationPoint_; } - /** Apply the ordering to a graph - same as linearize(), but without needing a linearization point */ - GaussianFactor::shared_ptr order(const Ordering& ordering) const; - /** * Linearize to a GaussianFactor, with method depending on the presence of a linearizationPoint * - With no linearization point, returns a reordered, but numerically identical, @@ -101,18 +94,18 @@ public: * TODO: better approximation of relinearization * TODO: switchable modes for approximation technique */ - GaussianFactor::shared_ptr linearize(const Values& c, const Ordering& ordering) const; + GaussianFactor::shared_ptr linearize(const Values& c) const; /** * Creates an anti-factor directly and performs rekeying due to ordering */ - GaussianFactor::shared_ptr negate(const Ordering& ordering) const; + GaussianFactor::shared_ptr negateToGaussian() const; /** * Creates the equivalent anti-factor as another LinearContainerFactor, * so it remains independent of ordering. */ - NonlinearFactor::shared_ptr negate() const; + NonlinearFactor::shared_ptr negateToNonlinear() const; /** * Creates a shared_ptr clone of the factor - needs to be specialized to allow @@ -135,20 +128,19 @@ public: bool isHessian() const; /** Casts to JacobianFactor */ - JacobianFactor::shared_ptr toJacobian() const; + boost::shared_ptr toJacobian() const; /** Casts to HessianFactor */ - HessianFactor::shared_ptr toHessian() const; + boost::shared_ptr toHessian() const; /** * Utility function for converting linear graphs to nonlinear graphs * consisting of LinearContainerFactors. */ static NonlinearFactorGraph convertLinearGraph(const GaussianFactorGraph& linear_graph, - const Ordering& ordering, const Values& linearizationPoint = Values()); + const Values& linearizationPoint = Values()); protected: - void rekeyFactor(const Ordering& ordering); void initializeLinearizationPoint(const Values& linearizationPoint); private: @@ -167,4 +159,3 @@ private: } // \namespace gtsam - diff --git a/gtsam/nonlinear/Marginals.cpp b/gtsam/nonlinear/Marginals.cpp index 466a20a84..ec227298c 100644 --- a/gtsam/nonlinear/Marginals.cpp +++ b/gtsam/nonlinear/Marginals.cpp @@ -17,8 +17,8 @@ */ #include -#include -#include +#include +#include #include using namespace std; @@ -26,14 +26,12 @@ using namespace std; namespace gtsam { /* ************************************************************************* */ -Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization) { +Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization) +{ gttic(MarginalsConstructor); - // Compute COLAMD ordering - ordering_ = *graph.orderingCOLAMD(solution); - // Linearize graph - graph_ = *graph.linearize(solution, ordering_); + graph_ = *graph.linearize(solution); // Store values values_ = solution; @@ -41,14 +39,14 @@ Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, // Compute BayesTree factorization_ = factorization; if(factorization_ == CHOLESKY) - bayesTree_ = *GaussianMultifrontalSolver(graph_, false).eliminate(); + bayesTree_ = *graph_.eliminateMultifrontal(boost::none, EliminatePreferCholesky); else if(factorization_ == QR) - bayesTree_ = *GaussianMultifrontalSolver(graph_, true).eliminate(); + bayesTree_ = *graph_.eliminateMultifrontal(boost::none, EliminateQR); } /* ************************************************************************* */ -void Marginals::print(const std::string& str, const KeyFormatter& keyFormatter) const { - ordering_.print(str+"Ordering: ", keyFormatter); +void Marginals::print(const std::string& str, const KeyFormatter& keyFormatter) const +{ graph_.print(str+"Graph: "); values_.print(str+"Solution: ", keyFormatter); bayesTree_.print(str+"Bayes Tree: "); @@ -62,38 +60,23 @@ Matrix Marginals::marginalCovariance(Key variable) const { /* ************************************************************************* */ Matrix Marginals::marginalInformation(Key variable) const { gttic(marginalInformation); - // Get linear key - Index index = ordering_[variable]; // Compute marginal GaussianFactor::shared_ptr marginalFactor; if(factorization_ == CHOLESKY) - marginalFactor = bayesTree_.marginalFactor(index, EliminatePreferCholesky); + marginalFactor = bayesTree_.marginalFactor(variable, EliminatePreferCholesky); else if(factorization_ == QR) - marginalFactor = bayesTree_.marginalFactor(index, EliminateQR); + marginalFactor = bayesTree_.marginalFactor(variable, EliminateQR); // Get information matrix (only store upper-right triangle) gttic(AsMatrix); return marginalFactor->information(); } -/* ************************************************************************* */ -JointMarginal::JointMarginal(const JointMarginal& other) : - blockView_(fullMatrix_) { - *this = other; -} - -/* ************************************************************************* */ -JointMarginal& JointMarginal::operator=(const JointMarginal& rhs) { - indices_ = rhs.indices_; - blockView_.assignNoalias(rhs.blockView_); - return *this; -} - /* ************************************************************************* */ JointMarginal Marginals::jointMarginalCovariance(const std::vector& variables) const { JointMarginal info = jointMarginalInformation(variables); - info.fullMatrix_ = info.fullMatrix_.inverse(); + info.blockMatrix_.full() = info.blockMatrix_.full().inverse(); return info; } @@ -102,61 +85,41 @@ JointMarginal Marginals::jointMarginalInformation(const std::vector& variab // If 2 variables, we can use the BayesTree::joint function, otherwise we // have to use sequential elimination. - if(variables.size() == 1) { + if(variables.size() == 1) + { Matrix info = marginalInformation(variables.front()); std::vector dims; dims.push_back(info.rows()); - Ordering indices; - indices.insert(variables.front(), 0); - return JointMarginal(info, dims, indices); - - } else { - // Obtain requested variables as ordered indices - vector indices(variables.size()); - for(size_t i=0; i usedIndices; - for(size_t i=0; i Index_Key; - BOOST_FOREACH(const Index_Key& index_key, usedIndices) { - variableConversion.insert(index_key.second, slot); - ++ slot; - } + jointFG = GaussianFactorGraph(*graph_.marginalMultifrontalBayesTree(Ordering(variables), boost::none, EliminateQR)); } // Get dimensions from factor graph - std::vector dims(indices.size(), 0); + std::vector dims; + dims.reserve(variables.size()); BOOST_FOREACH(Key key, variables) { - dims[variableConversion[key]] = values_.at(key).dim(); + dims.push_back(values_.at(key).dim()); } // Get information matrix Matrix augmentedInfo = jointFG.augmentedHessian(); Matrix info = augmentedInfo.topLeftCorner(augmentedInfo.rows()-1, augmentedInfo.cols()-1); - return JointMarginal(info, dims, variableConversion); + return JointMarginal(info, dims, variables); } } @@ -164,12 +127,12 @@ JointMarginal Marginals::jointMarginalInformation(const std::vector& variab void JointMarginal::print(const std::string& s, const KeyFormatter& formatter) const { cout << s << "Joint marginal on keys "; bool first = true; - BOOST_FOREACH(const Ordering::value_type& key_index, indices_) { + BOOST_FOREACH(Key key, keys_) { if(!first) cout << ", "; else first = false; - cout << formatter(key_index.first); + cout << formatter(key); } cout << ". Use 'at' or 'operator()' to query matrix blocks." << endl; } diff --git a/gtsam/nonlinear/Marginals.h b/gtsam/nonlinear/Marginals.h index 94876dcf9..fbcfcf15b 100644 --- a/gtsam/nonlinear/Marginals.h +++ b/gtsam/nonlinear/Marginals.h @@ -18,7 +18,6 @@ #pragma once -#include #include #include #include @@ -43,7 +42,6 @@ public: protected: GaussianFactorGraph graph_; - Ordering ordering_; Values values_; Factorization factorization_; GaussianBayesTree bayesTree_; @@ -81,12 +79,9 @@ public: class GTSAM_EXPORT JointMarginal { protected: - - typedef SymmetricBlockView BlockView; - - Matrix fullMatrix_; - BlockView blockView_; - Ordering indices_; + SymmetricBlockMatrix blockMatrix_; + std::vector keys_; + FastMap indices_; public: /** A block view of the joint marginal - this stores a reference to the @@ -94,7 +89,7 @@ public: * while this block view is needed, otherwise assign this block object to a * Matrix to store it. */ - typedef BlockView::constBlock Block; + typedef SymmetricBlockMatrix::constBlock Block; /** Access a block, corresponding to a pair of variables, of the joint * marginal. Each block is accessed by its "vertical position", @@ -110,7 +105,7 @@ public: * @param jVariable The nonlinear Key specifying the "horizontal position" of the requested block */ Block operator()(Key iVariable, Key jVariable) const { - return blockView_(indices_[iVariable], indices_[jVariable]); } + return blockMatrix_(indices_.at(iVariable), indices_.at(jVariable)); } /** Synonym for operator() */ Block at(Key iVariable, Key jVariable) const { @@ -121,20 +116,14 @@ public: * in scope while this view is needed. Otherwise assign this block object to a Matrix * to store it. */ - const Matrix& fullMatrix() const { return fullMatrix_; } - - /** Copy constructor */ - JointMarginal(const JointMarginal& other); - - /** Assignment operator */ - JointMarginal& operator=(const JointMarginal& rhs); + const Matrix& fullMatrix() const { return blockMatrix_.matrix(); } /** Print */ void print(const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const; protected: - JointMarginal(const Matrix& fullMatrix, const std::vector& dims, const Ordering& indices) : - fullMatrix_(fullMatrix), blockView_(fullMatrix_, dims.begin(), dims.end()), indices_(indices) {} + JointMarginal(const Matrix& fullMatrix, const std::vector& dims, const std::vector& keys) : + blockMatrix_(dims, fullMatrix), keys_(keys), indices_(Ordering(keys).invert()) {} friend class Marginals; diff --git a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp index 90f663639..e5a501b15 100644 --- a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp +++ b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp @@ -6,7 +6,6 @@ */ #include -#include #include #include #include @@ -19,18 +18,10 @@ namespace gtsam { /* Return the gradient vector of a nonlinear factor given a linearization point and a variable ordering * Can be moved to NonlinearFactorGraph.h if desired */ -void gradientInPlace(const NonlinearFactorGraph &nfg, const Values &values, const Ordering &ordering, VectorValues &g) { +VectorValues gradientInPlace(const NonlinearFactorGraph &nfg, const Values &values) { // Linearize graph - GaussianFactorGraph::shared_ptr linear = nfg.linearize(values, ordering); - FactorGraph jfg; jfg.reserve(linear->size()); - BOOST_FOREACH(const GaussianFactorGraph::sharedFactor& factor, *linear) { - if(boost::shared_ptr jf = boost::dynamic_pointer_cast(factor)) - jfg.push_back((jf)); - else - jfg.push_back(boost::make_shared(*factor)); - } - // compute the gradient direction - gradientAtZero(jfg, g); + GaussianFactorGraph::shared_ptr linear = nfg.linearize(values); + return linear->gradientAtZero(); } double NonlinearConjugateGradientOptimizer::System::error(const State &state) const { @@ -38,27 +29,26 @@ double NonlinearConjugateGradientOptimizer::System::error(const State &state) co } NonlinearConjugateGradientOptimizer::System::Gradient NonlinearConjugateGradientOptimizer::System::gradient(const State &state) const { - Gradient result = state.zeroVectors(ordering_); - gradientInPlace(graph_, state, ordering_, result); - return result; + return gradientInPlace(graph_, state); } NonlinearConjugateGradientOptimizer::System::State NonlinearConjugateGradientOptimizer::System::advance(const State ¤t, const double alpha, const Gradient &g) const { Gradient step = g; - scal(alpha, step); - return current.retract(step, ordering_); + step *= alpha; + return current.retract(step); } void NonlinearConjugateGradientOptimizer::iterate() { size_t dummy ; - boost::tie(state_.values, dummy) = nonlinearConjugateGradient(System(graph_, *ordering_), state_.values, params_, true /* single iterations */); + boost::tie(state_.values, dummy) = nonlinearConjugateGradient(System(graph_), state_.values, params_, true /* single iterations */); ++state_.iterations; state_.error = graph_.error(state_.values); } const Values& NonlinearConjugateGradientOptimizer::optimize() { - boost::tie(state_.values, state_.iterations) = nonlinearConjugateGradient(System(graph_, *ordering_), state_.values, params_, false /* up to convergent */); + boost::tie(state_.values, state_.iterations) = nonlinearConjugateGradient(System(graph_), state_.values, params_, false /* up to convergent */); state_.error = graph_.error(state_.values); return state_.values; } } /* namespace gtsam */ + diff --git a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h index 4cb5be573..4b12eae6f 100644 --- a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h +++ b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h @@ -31,10 +31,9 @@ class GTSAM_EXPORT NonlinearConjugateGradientOptimizer : public NonlinearOptimiz protected: const NonlinearFactorGraph &graph_; - const Ordering &ordering_; public: - System(const NonlinearFactorGraph &graph, const Ordering &ordering): graph_(graph), ordering_(ordering) {} + System(const NonlinearFactorGraph &graph): graph_(graph) {} double error(const State &state) const ; Gradient gradient(const State &state) const ; State advance(const State ¤t, const double alpha, const Gradient &g) const ; @@ -49,15 +48,12 @@ public: protected: States state_; Parameters params_; - Ordering::shared_ptr ordering_; - VectorValues::shared_ptr gradient_; public: NonlinearConjugateGradientOptimizer(const NonlinearFactorGraph& graph, const Values& initialValues, const Parameters& params = Parameters()) - : Base(graph), state_(graph, initialValues), params_(params), ordering_(initialValues.orderingArbitrary()), - gradient_(new VectorValues(initialValues.zeroVectors(*ordering_))){} + : Base(graph), state_(graph, initialValues), params_(params) {} virtual ~NonlinearConjugateGradientOptimizer() {} virtual void iterate(); @@ -185,9 +181,5 @@ boost::tuple nonlinearConjugateGradient(const S &system, const V &ini return boost::tie(currentValues, iteration); } - - - - - } + diff --git a/gtsam/nonlinear/NonlinearEquality.h b/gtsam/nonlinear/NonlinearEquality.h index faee280b1..5d3319d97 100644 --- a/gtsam/nonlinear/NonlinearEquality.h +++ b/gtsam/nonlinear/NonlinearEquality.h @@ -153,12 +153,12 @@ namespace gtsam { } // Linearize is over-written, because base linearization tries to whiten - virtual GaussianFactor::shared_ptr linearize(const Values& x, const Ordering& ordering) const { + virtual GaussianFactor::shared_ptr linearize(const Values& x) const { const T& xj = x.at(this->key()); Matrix A; Vector b = evaluateError(xj, A); SharedDiagonal model = noiseModel::Constrained::All(b.size()); - return GaussianFactor::shared_ptr(new JacobianFactor(ordering[this->key()], A, b, model)); + return GaussianFactor::shared_ptr(new JacobianFactor(this->key(), A, b, model)); } /// @return a deep copy of this factor diff --git a/gtsam/nonlinear/NonlinearFactor.h b/gtsam/nonlinear/NonlinearFactor.h index 8b9b2a5da..010341804 100644 --- a/gtsam/nonlinear/NonlinearFactor.h +++ b/gtsam/nonlinear/NonlinearFactor.h @@ -20,19 +20,14 @@ #pragma once -#include -#include - #include -#include - -#include -#include -#include -#include +#include #include -#include +#include +#include +#include + /** * Macro to add a standard clone function to a derived factor @@ -45,6 +40,8 @@ namespace gtsam { +using boost::assign::cref_list_of; + /* ************************************************************************* */ /** * Nonlinear factor base class @@ -54,51 +51,39 @@ namespace gtsam { * which are objects in non-linear manifolds (Lie groups). * \nosubgrouping */ -class NonlinearFactor: public Factor { +class NonlinearFactor: public Factor { protected: // Some handy typedefs - typedef Factor Base; + typedef Factor Base; typedef NonlinearFactor This; public: - typedef boost::shared_ptr shared_ptr; + typedef boost::shared_ptr shared_ptr; /// @name Standard Constructors /// @{ /** Default constructor for I/O only */ - NonlinearFactor() { - } + NonlinearFactor() {} /** - * Constructor from a vector of the keys involved in this factor + * Constructor from a collection of the keys involved in this factor */ - NonlinearFactor(const std::vector& keys) : + template + NonlinearFactor(const CONTAINER& keys) : Base(keys) {} - /** - * Constructor from iterators over the keys involved in this factor - */ - template - NonlinearFactor(ITERATOR beginKeys, ITERATOR endKeys) : - Base(beginKeys, endKeys) {} - - NonlinearFactor(Key key) : Base(key) {} ///< Convenience constructor for 1 key - NonlinearFactor(Key key1, Key key2) : Base(key1, key2) {} ///< Convenience constructor for 2 keys - NonlinearFactor(Key key1, Key key2, Key key3) : Base(key1, key2, key3) {} ///< Convenience constructor for 3 keys - NonlinearFactor(Key key1, Key key2, Key key3, Key key4) : Base(key1, key2, key3, key4) {} ///< Convenience constructor for 4 keys - NonlinearFactor(Key key1, Key key2, Key key3, Key key4, Key key5) : Base(key1, key2, key3, key4, key5) {} ///< Convenience constructor for 5 keys - NonlinearFactor(Key key1, Key key2, Key key3, Key key4, Key key5, Key key6) : Base(key1, key2, key3, key4, key5, key6) {} ///< Convenience constructor for 6 keys - /// @} /// @name Testable /// @{ /** print */ - virtual void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + virtual void print(const std::string& s = "", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const + { std::cout << s << " keys = { "; BOOST_FOREACH(Key key, this->keys()) { std::cout << keyFormatter(key) << " "; } std::cout << "}" << std::endl; @@ -141,18 +126,18 @@ public: /** linearize to a GaussianFactor */ virtual boost::shared_ptr - linearize(const Values& c, const Ordering& ordering) const = 0; + linearize(const Values& c) const = 0; /** * Create a symbolic factor using the given ordering to determine the * variable indices. */ - virtual IndexFactor::shared_ptr symbolic(const Ordering& ordering) const { - std::vector indices(this->size()); - for(size_t j=0; jsize(); ++j) - indices[j] = ordering[this->keys()[j]]; - return IndexFactor::shared_ptr(new IndexFactor(indices)); - } + //virtual IndexFactor::shared_ptr symbolic(const Ordering& ordering) const { + // std::vector indices(this->size()); + // for(size_t j=0; jsize(); ++j) + // indices[j] = ordering[this->keys()[j]]; + // return IndexFactor::shared_ptr(new IndexFactor(indices)); + //} /** * Creates a shared_ptr clone of the factor - needs to be specialized to allow @@ -218,11 +203,10 @@ protected: public: - typedef boost::shared_ptr shared_ptr; + typedef boost::shared_ptr shared_ptr; /** Default constructor for I/O only */ - NoiseModelFactor() { - } + NoiseModelFactor() {} /** Destructor */ virtual ~NoiseModelFactor() {} @@ -230,17 +214,9 @@ public: /** * Constructor */ - template - NoiseModelFactor(const SharedNoiseModel& noiseModel, ITERATOR beginKeys, ITERATOR endKeys) - : Base(beginKeys, endKeys), noiseModel_(noiseModel) { - } - - NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key) : Base(key), noiseModel_(noiseModel) {} ///< Convenience constructor for 1 key - NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2) : Base(key1, key2), noiseModel_(noiseModel) {} ///< Convenience constructor for 2 keys - NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2, Key key3) : Base(key1, key2, key3), noiseModel_(noiseModel) {} ///< Convenience constructor for 3 keys - NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2, Key key3, Key key4) : Base(key1, key2, key3, key4), noiseModel_(noiseModel) {} ///< Convenience constructor for 4 keys - NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2, Key key3, Key key4, Key key5) : Base(key1, key2, key3, key4, key5), noiseModel_(noiseModel) {} ///< Convenience constructor for 5 keys - NoiseModelFactor(const SharedNoiseModel& noiseModel, Key key1, Key key2, Key key3, Key key4, Key key5, Key key6) : Base(key1, key2, key3, key4, key5, key6), noiseModel_(noiseModel) {} ///< Convenience constructor for 6 keys + template + NoiseModelFactor(const SharedNoiseModel& noiseModel, const CONTAINER& keys) : + Base(keys), noiseModel_(noiseModel) {} protected: @@ -252,7 +228,9 @@ protected: public: /** Print */ - virtual void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const { + virtual void print(const std::string& s = "", + const KeyFormatter& keyFormatter = DefaultKeyFormatter) const + { Base::print(s, keyFormatter); this->noiseModel_->print(" noise model: "); } @@ -314,7 +292,7 @@ public: * \f$ Ax-b \approx h(x+\delta x)-z = h(x) + A \delta x - z \f$ * Hence \f$ b = z - h(x) = - \mathtt{error\_vector}(x) \f$ */ - boost::shared_ptr linearize(const Values& x, const Ordering& ordering) const { + boost::shared_ptr linearize(const Values& x) const { // Only linearize if the factor is active if (!this->active(x)) return boost::shared_ptr(); @@ -329,10 +307,10 @@ public: this->noiseModel_->WhitenSystem(A,b); - std::vector > terms(this->size()); + std::vector > terms(this->size()); // Fill in terms for(size_t j=0; jsize(); ++j) { - terms[j].first = ordering[this->keys()[j]]; + terms[j].first = this->keys()[j]; terms[j].second.swap(A[j]); } @@ -340,11 +318,9 @@ public: noiseModel::Constrained::shared_ptr constrained = boost::dynamic_pointer_cast(this->noiseModel_); if(constrained) - return GaussianFactor::shared_ptr( - new JacobianFactor(terms, b, constrained->unit())); + return GaussianFactor::shared_ptr(new JacobianFactor(terms, b, constrained->unit())); else - return GaussianFactor::shared_ptr( - new JacobianFactor(terms, b, noiseModel::Unit::Create(b.size()))); + return GaussianFactor::shared_ptr(new JacobianFactor(terms, b)); } private: @@ -392,7 +368,7 @@ public: * @param key1 by which to look up X value in Values */ NoiseModelFactor1(const SharedNoiseModel& noiseModel, Key key1) : - Base(noiseModel, key1) {} + Base(noiseModel, cref_list_of<1>(key1)) {} /** Calls the 1-key specific version of evaluateError, which is pure virtual * so must be implemented in the derived class. @@ -461,7 +437,7 @@ public: * @param j2 key of the second variable */ NoiseModelFactor2(const SharedNoiseModel& noiseModel, Key j1, Key j2) : - Base(noiseModel, j1, j2) {} + Base(noiseModel, cref_list_of<2>(j1)(j2)) {} virtual ~NoiseModelFactor2() {} @@ -538,7 +514,7 @@ public: * @param j3 key of the third variable */ NoiseModelFactor3(const SharedNoiseModel& noiseModel, Key j1, Key j2, Key j3) : - Base(noiseModel, j1, j2, j3) {} + Base(noiseModel, cref_list_of<3>(j1)(j2)(j3)) {} virtual ~NoiseModelFactor3() {} @@ -617,7 +593,7 @@ public: * @param j4 key of the fourth variable */ NoiseModelFactor4(const SharedNoiseModel& noiseModel, Key j1, Key j2, Key j3, Key j4) : - Base(noiseModel, j1, j2, j3, j4) {} + Base(noiseModel, cref_list_of<4>(j1)(j2)(j3)(j4)) {} virtual ~NoiseModelFactor4() {} @@ -700,7 +676,7 @@ public: * @param j5 key of the fifth variable */ NoiseModelFactor5(const SharedNoiseModel& noiseModel, Key j1, Key j2, Key j3, Key j4, Key j5) : - Base(noiseModel, j1, j2, j3, j4, j5) {} + Base(noiseModel, cref_list_of<5>(j1)(j2)(j3)(j4)(j5)) {} virtual ~NoiseModelFactor5() {} @@ -787,7 +763,7 @@ public: * @param j6 key of the fifth variable */ NoiseModelFactor6(const SharedNoiseModel& noiseModel, Key j1, Key j2, Key j3, Key j4, Key j5, Key j6) : - Base(noiseModel, j1, j2, j3, j4, j5, j6) {} + Base(noiseModel, cref_list_of<6>(j1)(j2)(j3)(j4)(j5)(j6)) {} virtual ~NoiseModelFactor6() {} diff --git a/gtsam/nonlinear/NonlinearFactorGraph.cpp b/gtsam/nonlinear/NonlinearFactorGraph.cpp index 66a658f25..555e7a22d 100644 --- a/gtsam/nonlinear/NonlinearFactorGraph.cpp +++ b/gtsam/nonlinear/NonlinearFactorGraph.cpp @@ -20,16 +20,21 @@ #include #include #include -#include -#include +#include #include #include +#include +#include +#include #include using namespace std; namespace gtsam { +// Instantiate base classes +template class FactorGraph; + /* ************************************************************************* */ double NonlinearFactorGraph::probPrime(const Values& c) const { return exp(-0.5 * error(c)); @@ -45,6 +50,11 @@ void NonlinearFactorGraph::print(const std::string& str, const KeyFormatter& key } } +/* ************************************************************************* */ +bool NonlinearFactorGraph::equals(const NonlinearFactorGraph& other, double tol) const { + return Base::equals(other, tol); +} + /* ************************************************************************* */ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values, const GraphvizFormatting& graphvizFormatting, @@ -207,107 +217,61 @@ FastSet NonlinearFactorGraph::keys() const { } /* ************************************************************************* */ -Ordering::shared_ptr NonlinearFactorGraph::orderingCOLAMD( - const Values& config) const +Ordering NonlinearFactorGraph::orderingCOLAMD() const { - gttic(NonlinearFactorGraph_orderingCOLAMD); - - // Create symbolic graph and initial (iterator) ordering - SymbolicFactorGraph::shared_ptr symbolic; - Ordering::shared_ptr ordering; - boost::tie(symbolic, ordering) = this->symbolic(config); - - // Compute the VariableIndex (column-wise index) - VariableIndex variableIndex(*symbolic, ordering->size()); - if (config.size() != variableIndex.size()) throw std::runtime_error( - "orderingCOLAMD: some variables in the graph are not constrained!"); - - // Compute a fill-reducing ordering with COLAMD - Permutation::shared_ptr colamdPerm(inference::PermutationCOLAMD( - variableIndex)); - - // Permute the Ordering with the COLAMD ordering - ordering->permuteInPlace(*colamdPerm); - return ordering; + return Ordering::COLAMD(*this); } /* ************************************************************************* */ -Ordering::shared_ptr NonlinearFactorGraph::orderingCOLAMDConstrained(const Values& config, - const std::map& constraints) const +Ordering NonlinearFactorGraph::orderingCOLAMDConstrained(const FastMap& constraints) const { - gttic(NonlinearFactorGraph_orderingCOLAMDConstrained); - - // Create symbolic graph and initial (iterator) ordering - SymbolicFactorGraph::shared_ptr symbolic; - Ordering::shared_ptr ordering; - boost::tie(symbolic, ordering) = this->symbolic(config); - - // Compute the VariableIndex (column-wise index) - VariableIndex variableIndex(*symbolic, ordering->size()); - if (config.size() != variableIndex.size()) throw std::runtime_error( - "orderingCOLAMD: some variables in the graph are not constrained!"); - - // Create a constraint list with correct indices - typedef std::map::value_type constraint_type; - std::map index_constraints; - BOOST_FOREACH(const constraint_type& p, constraints) - index_constraints[ordering->at(p.first)] = p.second; - - // Compute a constrained fill-reducing ordering with COLAMD - Permutation::shared_ptr colamdPerm(inference::PermutationCOLAMDGrouped( - variableIndex, index_constraints)); - - // Permute the Ordering with the COLAMD ordering - ordering->permuteInPlace(*colamdPerm); - return ordering; + return Ordering::COLAMDConstrained(*this, constraints); } /* ************************************************************************* */ -SymbolicFactorGraph::shared_ptr NonlinearFactorGraph::symbolic(const Ordering& ordering) const { - gttic(NonlinearFactorGraph_symbolic_from_Ordering); - - // Generate the symbolic factor graph - SymbolicFactorGraph::shared_ptr symbolicfg(new SymbolicFactorGraph); - symbolicfg->reserve(this->size()); - - BOOST_FOREACH(const sharedFactor& factor, this->factors_) { - if(factor) - symbolicfg->push_back(factor->symbolic(ordering)); - else - symbolicfg->push_back(SymbolicFactorGraph::sharedFactor()); - } - - return symbolicfg; -} +//SymbolicFactorGraph::shared_ptr NonlinearFactorGraph::symbolic(const Ordering& ordering) const { +// gttic(NonlinearFactorGraph_symbolic_from_Ordering); +// +// // Generate the symbolic factor graph +// SymbolicFactorGraph::shared_ptr symbolicfg(new SymbolicFactorGraph); +// symbolicfg->reserve(this->size()); +// +// BOOST_FOREACH(const sharedFactor& factor, this->factors_) { +// if(factor) +// symbolicfg->push_back(factor->symbolic(ordering)); +// else +// symbolicfg->push_back(SymbolicFactorGraph::sharedFactor()); +// } +// +// return symbolicfg; +//} /* ************************************************************************* */ -pair NonlinearFactorGraph::symbolic( - const Values& config) const -{ - gttic(NonlinearFactorGraph_symbolic_from_Values); - - // Generate an initial key ordering in iterator order - Ordering::shared_ptr ordering(config.orderingArbitrary()); - return make_pair(symbolic(*ordering), ordering); -} +//pair NonlinearFactorGraph::symbolic( +// const Values& config) const +//{ +// gttic(NonlinearFactorGraph_symbolic_from_Values); +// +// // Generate an initial key ordering in iterator order +// Ordering::shared_ptr ordering(config.orderingArbitrary()); +// return make_pair(symbolic(*ordering), ordering); +//} /* ************************************************************************* */ -GaussianFactorGraph::shared_ptr NonlinearFactorGraph::linearize( - const Values& config, const Ordering& ordering) const +GaussianFactorGraph::shared_ptr NonlinearFactorGraph::linearize(const Values& linearizationPoint) const { gttic(NonlinearFactorGraph_linearize); // create an empty linear FG - GaussianFactorGraph::shared_ptr linearFG(new GaussianFactorGraph); + GaussianFactorGraph::shared_ptr linearFG = boost::make_shared(); linearFG->reserve(this->size()); // linearize all factors BOOST_FOREACH(const sharedFactor& factor, this->factors_) { if(factor) { - GaussianFactor::shared_ptr lf = factor->linearize(config, ordering); - if (lf) linearFG->push_back(lf); + (*linearFG) += factor->linearize(linearizationPoint); } else - linearFG->push_back(GaussianFactor::shared_ptr()); + (*linearFG) += GaussianFactor::shared_ptr(); } return linearFG; diff --git a/gtsam/nonlinear/NonlinearFactorGraph.h b/gtsam/nonlinear/NonlinearFactorGraph.h index 0bb9e4bde..9746f33ad 100644 --- a/gtsam/nonlinear/NonlinearFactorGraph.h +++ b/gtsam/nonlinear/NonlinearFactorGraph.h @@ -22,12 +22,16 @@ #pragma once #include -#include -#include #include +#include namespace gtsam { + // Forward declarations + class Values; + class Ordering; + class GaussianFactorGraph; + /** * Formatting options when saving in GraphViz format using * NonlinearFactorGraph::saveGraph. @@ -58,45 +62,53 @@ namespace gtsam { * tangent vector space at the linearization point. Because the tangent space is a true * vector space, the config type will be an VectorValues in that linearized factor graph. */ - class NonlinearFactorGraph: public FactorGraph { + class GTSAM_EXPORT NonlinearFactorGraph: public FactorGraph { public: typedef FactorGraph Base; - typedef boost::shared_ptr shared_ptr; - typedef boost::shared_ptr sharedFactor; + typedef NonlinearFactorGraph This; + typedef boost::shared_ptr shared_ptr; + + /** Default constructor */ + NonlinearFactorGraph() {} + + /** Construct from iterator over factors */ + template + NonlinearFactorGraph(ITERATOR firstFactor, ITERATOR lastFactor) : Base(firstFactor, lastFactor) {} + + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit NonlinearFactorGraph(const CONTAINER& factors) : Base(factors) {} + + /** Implicit copy/downcast constructor to override explicit template container constructor */ + template + NonlinearFactorGraph(const FactorGraph& graph) : Base(graph) {} /** print just calls base class */ - GTSAM_EXPORT void print(const std::string& str = "NonlinearFactorGraph: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + void print(const std::string& str = "NonlinearFactorGraph: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + + /** Test equality */ + bool equals(const NonlinearFactorGraph& other, double tol = 1e-9) const; /** Write the graph in GraphViz format for visualization */ - GTSAM_EXPORT void saveGraph(std::ostream& stm, const Values& values = Values(), + void saveGraph(std::ostream& stm, const Values& values = Values(), const GraphvizFormatting& graphvizFormatting = GraphvizFormatting(), const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; /** return keys as an ordered set - ordering is by key value */ - GTSAM_EXPORT FastSet keys() const; + FastSet keys() const; /** unnormalized error, \f$ 0.5 \sum_i (h_i(X_i)-z)^2/\sigma^2 \f$ in the most common case */ - GTSAM_EXPORT double error(const Values& c) const; + double error(const Values& c) const; /** Unnormalized probability. O(n) */ - GTSAM_EXPORT double probPrime(const Values& c) const; - - /// Add a factor by value - copies the factor object - void add(const NonlinearFactor& factor) { - this->push_back(factor.clone()); - } - - /// Add a factor by pointer - stores pointer without copying factor object - void add(const sharedFactor& factor) { - this->push_back(factor); - } + double probPrime(const Values& c) const; /** * Create a symbolic factor graph using an existing ordering */ - GTSAM_EXPORT SymbolicFactorGraph::shared_ptr symbolic(const Ordering& ordering) const; + //SymbolicFactorGraph::shared_ptr symbolic() const; /** * Create a symbolic factor graph and initial variable ordering that can @@ -104,13 +116,13 @@ namespace gtsam { * The graph and ordering should be permuted after such a fill-reducing * ordering is found. */ - GTSAM_EXPORT std::pair - symbolic(const Values& config) const; + //std::pair + // symbolic(const Values& config) const; /** * Compute a fill-reducing ordering using COLAMD. */ - GTSAM_EXPORT Ordering::shared_ptr orderingCOLAMD(const Values& config) const; + Ordering orderingCOLAMD() const; /** * Compute a fill-reducing ordering with constraints using CCOLAMD @@ -120,19 +132,17 @@ namespace gtsam { * indices need to appear in the constraints, unconstrained is assumed for all * other variables */ - GTSAM_EXPORT Ordering::shared_ptr orderingCOLAMDConstrained(const Values& config, - const std::map& constraints) const; + Ordering orderingCOLAMDConstrained(const FastMap& constraints) const; /** * linearize a nonlinear factor graph */ - GTSAM_EXPORT boost::shared_ptr - linearize(const Values& config, const Ordering& ordering) const; + boost::shared_ptr linearize(const Values& linearizationPoint) const; /** * Clone() performs a deep-copy of the graph, including all of the factors */ - GTSAM_EXPORT NonlinearFactorGraph clone() const; + NonlinearFactorGraph clone() const; /** * Rekey() performs a deep-copy of all of the factors, and changes @@ -143,7 +153,7 @@ namespace gtsam { * @param rekey_mapping is a map of old->new keys * @result a cloned graph with updated keys */ - GTSAM_EXPORT NonlinearFactorGraph rekey(const std::map& rekey_mapping) const; + NonlinearFactorGraph rekey(const std::map& rekey_mapping) const; private: diff --git a/gtsam/nonlinear/NonlinearISAM.cpp b/gtsam/nonlinear/NonlinearISAM.cpp index 214c1c887..b8b06e2b0 100644 --- a/gtsam/nonlinear/NonlinearISAM.cpp +++ b/gtsam/nonlinear/NonlinearISAM.cpp @@ -18,8 +18,8 @@ #include #include -#include -#include +#include +#include #include @@ -32,12 +32,11 @@ namespace gtsam { /* ************************************************************************* */ void NonlinearISAM::saveGraph(const string& s, const KeyFormatter& keyFormatter) const { - isam_.saveGraph(s, OrderingIndexFormatter(ordering_, keyFormatter)); + isam_.saveGraph(s, keyFormatter); } /* ************************************************************************* */ -void NonlinearISAM::update(const NonlinearFactorGraph& newFactors, - const Values& initialValues) { +void NonlinearISAM::update(const NonlinearFactorGraph& newFactors, const Values& initialValues) { if(newFactors.size() > 0) { @@ -53,15 +52,10 @@ void NonlinearISAM::update(const NonlinearFactorGraph& newFactors, // TODO: optimize for whole config? linPoint_.insert(initialValues); - // Augment ordering - // TODO: allow for ordering constraints within the new variables - BOOST_FOREACH(const Values::ConstKeyValuePair& key_value, initialValues) - ordering_.insert(key_value.key, ordering_.size()); - - boost::shared_ptr linearizedNewFactors = newFactors.linearize(linPoint_, ordering_); + boost::shared_ptr linearizedNewFactors = newFactors.linearize(linPoint_); // Update ISAM - isam_.update(*linearizedNewFactors); + isam_.update(*linearizedNewFactors, eliminationFunction_); } } @@ -76,16 +70,10 @@ void NonlinearISAM::reorder_relinearize() { isam_.clear(); - // Compute an ordering - // TODO: allow for constrained ordering here - ordering_ = *factors_.orderingCOLAMD(newLinPoint); - - // Create a linear factor graph at the new linearization point - // TODO: decouple relinearization and reordering to avoid - boost::shared_ptr gfg = factors_.linearize(newLinPoint, ordering_); - // Just recreate the whole BayesTree - isam_.update(*gfg); + // TODO: allow for constrained ordering here + // TODO: decouple relinearization and reordering to avoid + isam_.update(*factors_.linearize(newLinPoint), eliminationFunction_); // Update linearization point linPoint_ = newLinPoint; @@ -95,14 +83,14 @@ void NonlinearISAM::reorder_relinearize() { /* ************************************************************************* */ Values NonlinearISAM::estimate() const { if(isam_.size() > 0) - return linPoint_.retract(optimize(isam_), ordering_); + return linPoint_.retract(isam_.optimize()); else return linPoint_; } /* ************************************************************************* */ Matrix NonlinearISAM::marginalCovariance(Key key) const { - return isam_.marginalCovariance(ordering_[key]); + return isam_.marginalCovariance(key); } /* ************************************************************************* */ @@ -110,7 +98,6 @@ void NonlinearISAM::print(const string& s, const KeyFormatter& keyFormatter) con cout << s << "ReorderInterval: " << reorderInterval_ << " Current Count: " << reorderCounter_ << endl; isam_.print("GaussianISAM:\n"); linPoint_.print("Linearization Point:\n", keyFormatter); - ordering_.print("System Ordering:\n", keyFormatter); factors_.print("Nonlinear Graph:\n", keyFormatter); } diff --git a/gtsam/nonlinear/NonlinearISAM.h b/gtsam/nonlinear/NonlinearISAM.h index ba7393d81..9c4b3d3c4 100644 --- a/gtsam/nonlinear/NonlinearISAM.h +++ b/gtsam/nonlinear/NonlinearISAM.h @@ -33,9 +33,6 @@ protected: /** The current linearization point */ Values linPoint_; - /** The ordering */ - gtsam::Ordering ordering_; - /** The original factors, used when relinearizing */ NonlinearFactorGraph factors_; @@ -43,6 +40,9 @@ protected: int reorderInterval_; int reorderCounter_; + /** The elimination function */ + GaussianFactorGraph::Eliminate eliminationFunction_; + public: /// @name Standard Constructors @@ -55,7 +55,9 @@ public: * 1 (default) reorders every time, in worse case is batch every update * typical values are 50 or 100 */ - NonlinearISAM(int reorderInterval = 1) : reorderInterval_(reorderInterval), reorderCounter_(0) {} + NonlinearISAM(int reorderInterval = 1, + const GaussianFactorGraph::Eliminate& eliminationFunction = GaussianFactorGraph::EliminationTraitsType::DefaultEliminate) : + reorderInterval_(reorderInterval), reorderCounter_(0), eliminationFunction_(eliminationFunction) {} /// @} /// @name Standard Interface @@ -75,9 +77,6 @@ public: /** Return the current linearization point */ const Values& getLinearizationPoint() const { return linPoint_; } - /** Get the ordering */ - const gtsam::Ordering& getOrdering() const { return ordering_; } - /** get underlying nonlinear graph */ const NonlinearFactorGraph& getFactorsUnsafe() const { return factors_; } @@ -104,15 +103,8 @@ public: /** Relinearization and reordering of variables */ void reorder_relinearize(); - /** manually add a key to the end of the ordering */ - void addKey(Key key) { ordering_.push_back(key); } - - /** replace the current ordering */ - void setOrdering(const Ordering& new_ordering) { ordering_ = new_ordering; } - /// @} }; } // \namespace gtsam - diff --git a/gtsam/nonlinear/Ordering.cpp b/gtsam/nonlinear/Ordering.cpp deleted file mode 100644 index 4ab3a2b75..000000000 --- a/gtsam/nonlinear/Ordering.cpp +++ /dev/null @@ -1,130 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file Ordering.cpp - * @author Richard Roberts - * @date Sep 2, 2010 - */ - -#include "Ordering.h" - -#include -#include -#include - -using namespace std; - -namespace gtsam { - -/* ************************************************************************* */ -Ordering::Ordering(const std::list & L) { - int i = 0; - BOOST_FOREACH( Key s, L ) - insert(s, i++) ; -} - -/* ************************************************************************* */ -Ordering::Ordering(const Ordering& other) : order_(other.order_), orderingIndex_(other.size()) { - for(iterator item = order_.begin(); item != order_.end(); ++item) - orderingIndex_[item->second] = item; -} - -/* ************************************************************************* */ -Ordering& Ordering::operator=(const Ordering& rhs) { - order_ = rhs.order_; - orderingIndex_.resize(rhs.size()); - for(iterator item = order_.begin(); item != order_.end(); ++item) - orderingIndex_[item->second] = item; - return *this; -} - -/* ************************************************************************* */ -void Ordering::permuteInPlace(const Permutation& permutation) { - gttic(Ordering_permuteInPlace); - // Allocate new index and permute in map iterators - OrderingIndex newIndex(permutation.size()); - for(size_t j = 0; j < newIndex.size(); ++j) { - newIndex[j] = orderingIndex_[permutation[j]]; // Assign the iterator - newIndex[j]->second = j; // Change the stored index to its permuted value - } - // Swap the new index into this Ordering class - orderingIndex_.swap(newIndex); -} - -/* ************************************************************************* */ -void Ordering::permuteInPlace(const Permutation& selector, const Permutation& permutation) { - if(selector.size() != permutation.size()) - throw invalid_argument("Ordering::permuteInPlace (partial permutation version) called with selector and permutation of different sizes."); - // Create new index the size of the permuted entries - OrderingIndex newIndex(selector.size()); - // Permute the affected entries into the new index - for(size_t dstSlot = 0; dstSlot < selector.size(); ++dstSlot) - newIndex[dstSlot] = orderingIndex_[selector[permutation[dstSlot]]]; - // Put the affected entries back in the new order and fix the indices - for(size_t slot = 0; slot < selector.size(); ++slot) { - orderingIndex_[selector[slot]] = newIndex[slot]; - orderingIndex_[selector[slot]]->second = selector[slot]; - } -} - -/* ************************************************************************* */ -void Ordering::print(const string& str, const KeyFormatter& keyFormatter) const { - cout << str; - // Print ordering in index order - // Print the ordering with varsPerLine ordering entries printed on each line, - // for compactness. - static const size_t varsPerLine = 10; - bool endedOnNewline = false; - BOOST_FOREACH(const Map::iterator& index_key, orderingIndex_) { - if(index_key->second % varsPerLine != 0) - cout << ", "; - cout << index_key->second<< ":" << keyFormatter(index_key->first); - if(index_key->second % varsPerLine == varsPerLine - 1) { - cout << "\n"; - endedOnNewline = true; - } else { - endedOnNewline = false; - } - } - if(!endedOnNewline) - cout << "\n"; - cout.flush(); -} - -/* ************************************************************************* */ -bool Ordering::equals(const Ordering& rhs, double tol) const { - return order_ == rhs.order_; -} - -/* ************************************************************************* */ -Ordering::value_type Ordering::pop_back() { - iterator lastItem = orderingIndex_.back(); // Get the map iterator to the highest-index entry - value_type removed = *lastItem; // Save the key-index pair to return - order_.erase(lastItem); // Erase the entry from the map - orderingIndex_.pop_back(); // Erase the entry from the index - return removed; // Return the removed item -} - -/* ************************************************************************* */ -void Unordered::print(const string& s) const { - cout << s << " (" << size() << "):"; - BOOST_FOREACH(Index key, *this) - cout << " " << key; - cout << endl; -} - -/* ************************************************************************* */ -bool Unordered::equals(const Unordered &other, double tol) const { - return *this == other; -} - -} diff --git a/gtsam/nonlinear/Ordering.h b/gtsam/nonlinear/Ordering.h deleted file mode 100644 index bc69ff1bd..000000000 --- a/gtsam/nonlinear/Ordering.h +++ /dev/null @@ -1,277 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file Ordering.h - * @author Richard Roberts - * @date Sep 2, 2010 - */ - -#pragma once - -#include -#include -#include - -#include -#include -#include -#include - -namespace gtsam { - -/** - * An ordering is a map from symbols (non-typed keys) to integer indices - * \nosubgrouping - */ -class GTSAM_EXPORT Ordering { -protected: - typedef FastMap Map; - typedef std::vector OrderingIndex; - Map order_; - OrderingIndex orderingIndex_; - -public: - - typedef boost::shared_ptr shared_ptr; - - typedef std::pair value_type; - typedef Map::iterator iterator; - typedef Map::const_iterator const_iterator; - - /// @name Standard Constructors - /// @{ - - /// Default constructor for empty ordering - Ordering() {} - - /// Copy constructor - Ordering(const Ordering& other); - - /// Construct from list, assigns order indices sequentially to list items. - Ordering(const std::list & L); - - /// Assignment operator - Ordering& operator=(const Ordering& rhs); - - /// @} - /// @name Standard Interface - /// @{ - - /** The actual number of variables in this ordering, i.e. not including missing indices in the count. See also nVars(). */ - Index size() const { return orderingIndex_.size(); } - - const_iterator begin() const { return order_.begin(); } /**< Iterator in order of sorted symbols, not in elimination/index order! */ - const_iterator end() const { return order_.end(); } /**< Iterator in order of sorted symbols, not in elimination/index order! */ - - Index at(Key key) const { return operator[](key); } ///< Synonym for operator[](Key) const - Key key(Index index) const { - if(index >= orderingIndex_.size()) - throw std::out_of_range("Attempting to access out-of-range index in Ordering"); - else - return orderingIndex_[index]->first; } - - /** Assigns the ordering index of the requested \c key into \c index if the symbol - * is present in the ordering, otherwise does not modify \c index. The - * return value indicates whether the symbol is in fact present in the - * ordering. - * @param key The key whose index you request - * @param [out] index Reference into which to write the index of the requested key, if the key is present. - * @return true if the key is present and \c index was modified, false otherwise. - */ - bool tryAt(Key key, Index& index) const { - const_iterator i = order_.find(key); - if(i != order_.end()) { - index = i->second; - return true; - } else - return false; } - - /// Access the index for the requested key, throws std::out_of_range if the - /// key is not present in the ordering (note that this differs from the - /// behavior of std::map) - Index& operator[](Key key) { - iterator i=order_.find(key); - if(i == order_.end()) throw std::out_of_range( - std::string("Attempting to access a key from an ordering that does not contain that key:") + DefaultKeyFormatter(key)); - else return i->second; } - - /// Access the index for the requested key, throws std::out_of_range if the - /// key is not present in the ordering (note that this differs from the - /// behavior of std::map) - Index operator[](Key key) const { - const_iterator i=order_.find(key); - if(i == order_.end()) throw std::out_of_range( - std::string("Attempting to access a key from an ordering that does not contain that key:") + DefaultKeyFormatter(key)); - else return i->second; } - - /** Returns an iterator pointing to the symbol/index pair with the requested, - * or the end iterator if it does not exist. - * - * @return An iterator pointing to the symbol/index pair with the requested, - * or the end iterator if it does not exist. - */ - iterator find(Key key) { return order_.find(key); } - - /** Returns an iterator pointing to the symbol/index pair with the requested, - * or the end iterator if it does not exist. - * - * @return An iterator pointing to the symbol/index pair with the requested, - * or the end iterator if it does not exist. - */ - const_iterator find(Key key) const { return order_.find(key); } - - /** Insert a key-index pair, but will fail if the key is already present */ - iterator insert(const value_type& key_order) { - std::pair it_ok(order_.insert(key_order)); - if(it_ok.second) { - if(key_order.second >= orderingIndex_.size()) - orderingIndex_.resize(key_order.second + 1); - orderingIndex_[key_order.second] = it_ok.first; - return it_ok.first; - } else - throw std::invalid_argument(std::string("Attempting to insert a key into an ordering that already contains that key")); } - - /// Test if the key exists in the ordering. - bool exists(Key key) const { return order_.count(key) > 0; } - - /** Insert a key-index pair, but will fail if the key is already present */ - iterator insert(Key key, Index order) { return insert(std::make_pair(key,order)); } - - /// Adds a new key to the ordering with an index of one greater than the current highest index. - Index push_back(Key key) { return insert(std::make_pair(key, orderingIndex_.size()))->second; } - - /// @} - /// @name Advanced Interface - /// @{ - - /** - * Iterator in order of sorted symbols, not in elimination/index order! - */ - iterator begin() { return order_.begin(); } - - /** - * Iterator in order of sorted symbols, not in elimination/index order! - */ - iterator end() { return order_.end(); } - - /** Remove the last (last-ordered, not highest-sorting key) symbol/index pair - * from the ordering (this version is \f$ O(n) \f$, use it when you do not - * know the last-ordered key). - * - * If you already know the last-ordered symbol, call popback(Key) - * that accepts this symbol as an argument. - * - * @return The symbol and index that were removed. - */ - value_type pop_back(); - - /** - * += operator allows statements like 'ordering += x0,x1,x2,x3;', which are - * very useful for unit tests. This functionality is courtesy of - * boost::assign. - */ - inline boost::assign::list_inserter, Key> - operator+=(Key key) { - return boost::assign::make_list_inserter(boost::assign_detail::call_push_back(*this))(key); } - - /** - * Reorder the variables with a permutation. This is typically used - * internally, permuting an initial key-sorted ordering into a fill-reducing - * ordering. - */ - void permuteInPlace(const Permutation& permutation); - - void permuteInPlace(const Permutation& selector, const Permutation& permutation); - - /// Synonym for operator[](Key) - Index& at(Key key) { return operator[](key); } - - /// @} - /// @name Testable - /// @{ - - /** print (from Testable) for testing and debugging */ - void print(const std::string& str = "Ordering:\n", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; - - /** equals (from Testable) for testing and debugging */ - bool equals(const Ordering& rhs, double tol = 0.0) const; - - /// @} - -private: - - /** Serialization function */ - friend class boost::serialization::access; - template - void save(ARCHIVE & ar, const unsigned int version) const - { - ar & BOOST_SERIALIZATION_NVP(order_); - size_t size_ = orderingIndex_.size(); // Save only the size but not the iterators - ar & BOOST_SERIALIZATION_NVP(size_); - } - template - void load(ARCHIVE & ar, const unsigned int version) - { - ar & BOOST_SERIALIZATION_NVP(order_); - size_t size_; - ar & BOOST_SERIALIZATION_NVP(size_); - orderingIndex_.resize(size_); - for(iterator item = order_.begin(); item != order_.end(); ++item) - orderingIndex_[item->second] = item; // Assign the iterators - } - BOOST_SERIALIZATION_SPLIT_MEMBER() -}; // \class Ordering - -/** - * @class Unordered - * @brief a set of unordered indices - */ -class Unordered: public std::set { -public: - /** Default constructor creates empty ordering */ - Unordered() { } - - /** Create from a single symbol */ - Unordered(Index key) { insert(key); } - - /** Copy constructor */ - Unordered(const std::set& keys_in) : std::set(keys_in) {} - - /** whether a key exists */ - bool exists(const Index& key) { return find(key) != end(); } - - // Testable - GTSAM_EXPORT void print(const std::string& s = "Unordered") const; - GTSAM_EXPORT bool equals(const Unordered &t, double tol=0) const; -}; - -// Create an index formatter that looks up the Key in an inverse ordering, then -// formats the key using the provided key formatter, used in saveGraph. -class GTSAM_EXPORT OrderingIndexFormatter { -private: - Ordering ordering_; - KeyFormatter keyFormatter_; -public: - OrderingIndexFormatter(const Ordering& ordering, const KeyFormatter& keyFormatter) : - ordering_(ordering), keyFormatter_(keyFormatter) {} - std::string operator()(Index index) { - return keyFormatter_(ordering_.key(index)); } -}; - -/// Version of orderingIndexFormatter using multi-robot formatter -struct GTSAM_EXPORT MultiRobotLinearFormatter : gtsam::OrderingIndexFormatter { - MultiRobotLinearFormatter(const gtsam::Ordering& ordering) - : gtsam::OrderingIndexFormatter(ordering, MultiRobotKeyFormatter) {} -}; - -} // \namespace gtsam - diff --git a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp index 0e8e4d748..4b06bc3f7 100644 --- a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp +++ b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.cpp @@ -6,8 +6,8 @@ */ #include -#include -#include +#include +#include #include #include #include @@ -53,20 +53,19 @@ void SuccessiveLinearizationParams::print(const std::string& str) const { std::cout.flush(); } -VectorValues solveGaussianFactorGraph(const GaussianFactorGraph &gfg, const SuccessiveLinearizationParams ¶ms) { +VectorValues solveGaussianFactorGraph(const GaussianFactorGraph &gfg, const SuccessiveLinearizationParams ¶ms) +{ gttic(solveGaussianFactorGraph); VectorValues delta; if (params.isMultifrontal()) { - delta = GaussianJunctionTree(gfg).optimize(params.getEliminationFunction()); + delta = gfg.optimize(*params.ordering, params.getEliminationFunction()); } else if(params.isSequential()) { - const boost::shared_ptr gbn = - EliminationTree::Create(gfg)->eliminate(params.getEliminationFunction()); - delta = gtsam::optimize(*gbn); + delta = gfg.eliminateSequential(*params.ordering, params.getEliminationFunction())->optimize(); } else if ( params.isCG() ) { if ( !params.iterativeParams ) throw std::runtime_error("solveGaussianFactorGraph: cg parameter has to be assigned ..."); if ( boost::dynamic_pointer_cast(params.iterativeParams) ) { - SubgraphSolver solver (gfg, *boost::dynamic_pointer_cast(params.iterativeParams)); + SubgraphSolver solver(gfg, dynamic_cast(*params.iterativeParams), *params.ordering); delta = solver.optimize(); } else { diff --git a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h index bdedd1987..3ee305fe2 100644 --- a/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h +++ b/gtsam/nonlinear/SuccessiveLinearizationOptimizer.h @@ -20,9 +20,16 @@ #include #include +#include +#include +#include +#include +#include namespace gtsam { + // Forward declarations + class GTSAM_EXPORT SuccessiveLinearizationParams : public NonlinearOptimizerParams { public: /** See SuccessiveLinearizationParams::linearSolverType */ @@ -66,15 +73,13 @@ public: default: throw std::runtime_error("Nonlinear optimization parameter \"factorization\" is invalid"); - return EliminateQR; - break; } } std::string getLinearSolverType() const { return linearSolverTranslator(linearSolverType); } void setLinearSolverType(const std::string& solver) { linearSolverType = linearSolverTranslator(solver); } - void setIterativeParams(const SubgraphSolverParameters ¶ms); + void setIterativeParams(const SubgraphSolverParameters& params); void setOrdering(const Ordering& ordering) { this->ordering = ordering; } private: @@ -101,6 +106,6 @@ private: }; /* a wrapper for solving a GaussianFactorGraph according to the parameters */ -GTSAM_EXPORT VectorValues solveGaussianFactorGraph(const GaussianFactorGraph &gfg, const SuccessiveLinearizationParams ¶ms) ; +GTSAM_EXPORT VectorValues solveGaussianFactorGraph(const GaussianFactorGraph &gfg, const SuccessiveLinearizationParams ¶ms); } /* namespace gtsam */ diff --git a/gtsam/nonlinear/Symbol.h b/gtsam/nonlinear/Symbol.h index 2c406be34..6963905e0 100644 --- a/gtsam/nonlinear/Symbol.h +++ b/gtsam/nonlinear/Symbol.h @@ -18,9 +18,10 @@ #pragma once -#include #include +#include + namespace gtsam { /** diff --git a/gtsam/nonlinear/Values-inl.h b/gtsam/nonlinear/Values-inl.h index 162f353e8..1c96eed64 100644 --- a/gtsam/nonlinear/Values-inl.h +++ b/gtsam/nonlinear/Values-inl.h @@ -26,6 +26,8 @@ #include +#include + #include #include // Only so Eclipse finds class definition diff --git a/gtsam/nonlinear/Values.cpp b/gtsam/nonlinear/Values.cpp index b95e8932a..47ad0ca6f 100644 --- a/gtsam/nonlinear/Values.cpp +++ b/gtsam/nonlinear/Values.cpp @@ -23,6 +23,7 @@ */ #include +#include #include @@ -76,51 +77,41 @@ namespace gtsam { } /* ************************************************************************* */ - VectorValues Values::zeroVectors(const Ordering& ordering) const { - return VectorValues::Zero(this->dims(ordering)); - } - - /* ************************************************************************* */ - Values Values::retract(const VectorValues& delta, const Ordering& ordering) const { + Values Values::retract(const VectorValues& delta) const + { Values result; for(const_iterator key_value = begin(); key_value != end(); ++key_value) { - const Vector& singleDelta = delta[ordering[key_value->key]]; // Delta for this value + VectorValues::const_iterator vector_item = delta.find(key_value->key); Key key = key_value->key; // Non-const duplicate to deal with non-const insert argument - Value* retractedValue(key_value->value.retract_(singleDelta)); // Retract - result.values_.insert(key, retractedValue); // Add retracted result directly to result values + if(vector_item != delta.end()) { +// const Vector& singleDelta = delta[key_value->key]; // Delta for this value + const Vector& singleDelta = vector_item->second; + Value* retractedValue(key_value->value.retract_(singleDelta)); // Retract + result.values_.insert(key, retractedValue); // Add retracted result directly to result values + } else { + result.values_.insert(key, key_value->value.clone_()); // Add original version to result values + } } return result; } /* ************************************************************************* */ - VectorValues Values::localCoordinates(const Values& cp, const Ordering& ordering) const { - VectorValues result(this->dims(ordering)); + VectorValues Values::localCoordinates(const Values& cp) const { if(this->size() != cp.size()) throw DynamicValuesMismatched(); + VectorValues result; for(const_iterator it1=this->begin(), it2=cp.begin(); it1!=this->end(); ++it1, ++it2) { if(it1->key != it2->key) throw DynamicValuesMismatched(); // If keys do not match // Will throw a dynamic_cast exception if types do not match // NOTE: this is separate from localCoordinates(cp, ordering, result) due to at() vs. insert - result.at(ordering[it1->key]) = it1->value.localCoordinates_(it2->value); + result.insert(it1->key, it1->value.localCoordinates_(it2->value)); } return result; } - /* ************************************************************************* */ - void Values::localCoordinates(const Values& cp, const Ordering& ordering, VectorValues& result) const { - if(this->size() != cp.size()) - throw DynamicValuesMismatched(); - for(const_iterator it1=this->begin(), it2=cp.begin(); it1!=this->end(); ++it1, ++it2) { - if(it1->key != it2->key) - throw DynamicValuesMismatched(); // If keys do not match - // Will throw a dynamic_cast exception if types do not match - result.insert(ordering[it1->key], it1->value.localCoordinates_(it2->value)); - } - } - /* ************************************************************************* */ const Value& Values::at(Key j) const { // Find the item @@ -192,16 +183,6 @@ namespace gtsam { return *this; } - /* ************************************************************************* */ - vector Values::dims(const Ordering& ordering) const { - assert(ordering.size() == this->size()); // reads off of end of array if difference in size - vector result(values_.size()); - BOOST_FOREACH(const ConstKeyValuePair& key_value, *this) { - result[ordering[key_value.key]] = key_value.value.dim(); - } - return result; - } - /* ************************************************************************* */ size_t Values::dim() const { size_t result = 0; @@ -212,12 +193,11 @@ namespace gtsam { } /* ************************************************************************* */ - Ordering::shared_ptr Values::orderingArbitrary(Index firstVar) const { - Ordering::shared_ptr ordering(new Ordering); - for(const_iterator key_value = begin(); key_value != end(); ++key_value) { - ordering->insert(key_value->key, firstVar++); - } - return ordering; + VectorValues Values::zeroVectors() const { + VectorValues result; + BOOST_FOREACH(const ConstKeyValuePair& key_value, *this) + result.insert(key_value.key, Vector::Zero(key_value.value.dim())); + return result; } /* ************************************************************************* */ diff --git a/gtsam/nonlinear/Values.h b/gtsam/nonlinear/Values.h index e1f8def88..9ba1282b8 100644 --- a/gtsam/nonlinear/Values.h +++ b/gtsam/nonlinear/Values.h @@ -24,12 +24,7 @@ #pragma once -#include -#include -#include -#include -#include - +#include #include #include #include @@ -49,9 +44,14 @@ #include #include +#include +#include +#include + namespace gtsam { // Forward declarations / utilities + class VectorValues; class ValueCloneAllocator; class ValueAutomaticCasting; template static bool _truePredicate(const T&) { return true; } @@ -206,9 +206,6 @@ namespace gtsam { /** whether the config is empty */ bool empty() const { return values_.empty(); } - /** Get a zero VectorValues of the correct structure */ - VectorValues zeroVectors(const Ordering& ordering) const; - const_iterator begin() const { return boost::make_transform_iterator(values_.begin(), &make_const_deref_pair); } const_iterator end() const { return boost::make_transform_iterator(values_.end(), &make_const_deref_pair); } iterator begin() { return boost::make_transform_iterator(values_.begin(), &make_deref_pair); } @@ -222,13 +219,10 @@ namespace gtsam { /// @{ /** Add a delta config to current config and returns a new config */ - Values retract(const VectorValues& delta, const Ordering& ordering) const; + Values retract(const VectorValues& delta) const; /** Get a delta config about a linearization point c0 (*this) */ - VectorValues localCoordinates(const Values& cp, const Ordering& ordering) const; - - /** Get a delta config about a linearization point c0 (*this) - assumes uninitialized delta */ - void localCoordinates(const Values& cp, const Ordering& ordering, VectorValues& delta) const; + VectorValues localCoordinates(const Values& cp) const; ///@} @@ -262,19 +256,11 @@ namespace gtsam { /** Remove all variables from the config */ void clear() { values_.clear(); } - /** Create an array of variable dimensions using the given ordering (\f$ O(n) \f$) */ - std::vector dims(const Ordering& ordering) const; - /** Compute the total dimensionality of all values (\f$ O(n) \f$) */ size_t dim() const; - /** - * Generate a default ordering, simply in key sort order. To instead - * create a fill-reducing ordering, use - * NonlinearFactorGraph::orderingCOLAMD(). Alternatively, you may permute - * this ordering yourself (as orderingCOLAMD() does internally). - */ - Ordering::shared_ptr orderingArbitrary(Index firstVar = 0) const; + /** Return a VectorValues of zero vectors for each variable in this Values */ + VectorValues zeroVectors() const; /** * Return a filtered view of this Values class, without copying any data. diff --git a/gtsam/nonlinear/WhiteNoiseFactor.h b/gtsam/nonlinear/WhiteNoiseFactor.h index 910444cf8..160728e2f 100644 --- a/gtsam/nonlinear/WhiteNoiseFactor.h +++ b/gtsam/nonlinear/WhiteNoiseFactor.h @@ -144,22 +144,21 @@ namespace gtsam { * Create a symbolic factor using the given ordering to determine the * variable indices. */ - virtual IndexFactor::shared_ptr symbolic(const Ordering& ordering) const { - const Index j1 = ordering[meanKey_], j2 = ordering[precisionKey_]; - return IndexFactor::shared_ptr(new IndexFactor(j1, j2)); - } +// virtual IndexFactor::shared_ptr symbolic(const Ordering& ordering) const { +// const Index j1 = ordering[meanKey_], j2 = ordering[precisionKey_]; +// return IndexFactor::shared_ptr(new IndexFactor(j1, j2)); +// } /// @} /// @name Advanced Interface /// @{ /// linearize returns a Hessianfactor that is an approximation of error(p) - virtual boost::shared_ptr linearize(const Values& x, - const Ordering& ordering) const { + virtual boost::shared_ptr linearize(const Values& x) const { double u = x.at(meanKey_); double p = x.at(precisionKey_); - Index j1 = ordering[meanKey_]; - Index j2 = ordering[precisionKey_]; + Index j1 = meanKey_; + Index j2 = precisionKey_; return linearize(z_, u, p, j1, j2); } diff --git a/gtsam/nonlinear/nonlinearExceptions.h b/gtsam/nonlinear/nonlinearExceptions.h index 6f512f65f..413610f82 100644 --- a/gtsam/nonlinear/nonlinearExceptions.h +++ b/gtsam/nonlinear/nonlinearExceptions.h @@ -17,10 +17,11 @@ */ #pragma once -#include #include #include +#include + namespace gtsam { /** diff --git a/gtsam/nonlinear/summarization.cpp b/gtsam/nonlinear/summarization.cpp index acd222d43..e38226c9b 100644 --- a/gtsam/nonlinear/summarization.cpp +++ b/gtsam/nonlinear/summarization.cpp @@ -5,7 +5,6 @@ * @author Alex Cunningham */ -#include #include #include @@ -14,35 +13,29 @@ using namespace std; namespace gtsam { /* ************************************************************************* */ -std::pair -summarize(const NonlinearFactorGraph& graph, const Values& values, +GaussianFactorGraph summarize(const NonlinearFactorGraph& graph, const Values& values, const KeySet& saved_keys, SummarizationMode mode) { const size_t nrEliminatedKeys = values.size() - saved_keys.size(); + GaussianFactorGraph full_graph = *graph.linearize(values); // If we aren't eliminating anything, linearize and return - if (!nrEliminatedKeys || saved_keys.empty()) { - Ordering ordering = *values.orderingArbitrary(); - GaussianFactorGraph linear_graph = *graph.linearize(values, ordering); - return make_pair(linear_graph, ordering); - } + if (!nrEliminatedKeys || saved_keys.empty()) + return full_graph; - // Compute a constrained ordering with variables grouped to end - std::map ordering_constraints; + std::vector saved_keys_vec(saved_keys.begin(), saved_keys.end()); - // group all saved variables together - BOOST_FOREACH(const gtsam::Key& key, saved_keys) - ordering_constraints.insert(make_pair(key, 1)); - - Ordering ordering = *graph.orderingCOLAMDConstrained(values, ordering_constraints); +// // Compute a constrained ordering with variables grouped to end +// std::map ordering_constraints; +// +// // group all saved variables together +// BOOST_FOREACH(const gtsam::Key& key, saved_keys) +// ordering_constraints.insert(make_pair(key, 1)); +// +// Ordering ordering = *graph.orderingCOLAMDConstrained(values, ordering_constraints); // Linearize the system - GaussianFactorGraph full_graph = *graph.linearize(values, ordering); GaussianFactorGraph summarized_system; - std::vector indices; - BOOST_FOREACH(const Key& k, saved_keys) - indices.push_back(ordering[k]); - // PARTIAL_QR = 0, /// Uses QR solver to eliminate, does not require fully constrained system // PARTIAL_CHOLESKY = 1, /// Uses Cholesky solver, does not require fully constrained system // SEQUENTIAL_QR = 2, /// Uses QR to compute full joint graph (needs fully constrained system) @@ -50,35 +43,35 @@ summarize(const NonlinearFactorGraph& graph, const Values& values, switch (mode) { case PARTIAL_QR: { - summarized_system.push_back(EliminateQR(full_graph, nrEliminatedKeys).second); +// summarized_system.push_back(EliminateQR(full_graph, nrEliminatedKeys).second); break; } case PARTIAL_CHOLESKY: { - summarized_system.push_back(EliminateCholesky(full_graph, nrEliminatedKeys).second); +// summarized_system.push_back(EliminateCholesky(full_graph, nrEliminatedKeys).second); break; } case SEQUENTIAL_QR: { - GaussianSequentialSolver solver(full_graph, true); - summarized_system.push_back(*solver.jointFactorGraph(indices)); + summarized_system.push_back(*full_graph.marginal(saved_keys_vec, EliminateQR)); +// GaussianSequentialSolver solver(full_graph, true); +// summarized_system.push_back(*solver.jointFactorGraph(indices)); break; } case SEQUENTIAL_CHOLESKY: { - GaussianSequentialSolver solver(full_graph, false); - summarized_system.push_back(*solver.jointFactorGraph(indices)); + summarized_system.push_back(*full_graph.marginal(saved_keys_vec, EliminateCholesky)); +// GaussianSequentialSolver solver(full_graph, false); +// summarized_system.push_back(*solver.jointFactorGraph(indices)); break; } } - return make_pair(summarized_system, ordering); + return summarized_system; } /* ************************************************************************* */ NonlinearFactorGraph summarizeAsNonlinearContainer( const NonlinearFactorGraph& graph, const Values& values, const KeySet& saved_keys, SummarizationMode mode) { - GaussianFactorGraph summarized_graph; - Ordering ordering; - boost::tie(summarized_graph, ordering) = summarize(graph, values, saved_keys, mode); - return LinearContainerFactor::convertLinearGraph(summarized_graph, ordering); + GaussianFactorGraph summarized_graph = summarize(graph, values, saved_keys, mode); + return LinearContainerFactor::convertLinearGraph(summarized_graph); } /* ************************************************************************* */ diff --git a/gtsam/nonlinear/summarization.h b/gtsam/nonlinear/summarization.h index c25d5d934..cfed16330 100644 --- a/gtsam/nonlinear/summarization.h +++ b/gtsam/nonlinear/summarization.h @@ -11,9 +11,7 @@ #include #include - #include -#include namespace gtsam { @@ -38,9 +36,8 @@ typedef enum { * @param mode controls what elimination technique and requirements to use * @return a pair of the remaining graph and the ordering used for linearization */ -std::pair GTSAM_EXPORT -summarize(const NonlinearFactorGraph& graph, const Values& values, - const KeySet& saved_keys, SummarizationMode mode = PARTIAL_QR); +GaussianFactorGraph GTSAM_EXPORT summarize(const NonlinearFactorGraph& graph, + const Values& values, const KeySet& saved_keys, SummarizationMode mode = PARTIAL_QR); /** * Performs the same summarization technique used in summarize(), but returns the @@ -57,5 +54,3 @@ NonlinearFactorGraph GTSAM_EXPORT summarizeAsNonlinearContainer( const KeySet& saved_keys, SummarizationMode mode = PARTIAL_QR); } // \namespace gtsam - - diff --git a/gtsam/nonlinear/tests/testLinearContainerFactor.cpp b/gtsam/nonlinear/tests/testLinearContainerFactor.cpp index 6c4617171..549fccd3d 100644 --- a/gtsam/nonlinear/tests/testLinearContainerFactor.cpp +++ b/gtsam/nonlinear/tests/testLinearContainerFactor.cpp @@ -8,6 +8,8 @@ #include #include #include +#include +#include #include #include #include @@ -28,8 +30,6 @@ Pose2 poseA1(0.0, 0.0, 0.0), poseA2(2.0, 0.0, 0.0); /* ************************************************************************* */ TEST( testLinearContainerFactor, generic_jacobian_factor ) { - Ordering initOrdering; initOrdering += x1, x2, l1, l2; - Matrix A1 = Matrix_(2,2, 2.74222, -0.0067457, 0.0, 2.63624); @@ -39,9 +39,9 @@ TEST( testLinearContainerFactor, generic_jacobian_factor ) { Vector b = Vector_(2, 0.0277052, -0.0533393); - JacobianFactor expLinFactor(initOrdering[l1], A1, initOrdering[l2], A2, b, diag_model2); + JacobianFactor expLinFactor(l1, A1, l2, A2, b, diag_model2); - LinearContainerFactor actFactor(expLinFactor, initOrdering); + LinearContainerFactor actFactor(expLinFactor); EXPECT_LONGS_EQUAL(2, actFactor.size()); EXPECT(actFactor.isJacobian()); EXPECT(!actFactor.isHessian()); @@ -56,22 +56,14 @@ TEST( testLinearContainerFactor, generic_jacobian_factor ) { values.insert(x1, poseA1); values.insert(x2, poseA2); - // Check reconstruction from same ordering - GaussianFactor::shared_ptr actLinearizationA = actFactor.linearize(values, initOrdering); + // Check reconstruction + GaussianFactor::shared_ptr actLinearizationA = actFactor.linearize(values); EXPECT(assert_equal(*expLinFactor.clone(), *actLinearizationA, tol)); - - // Check reconstruction from new ordering - Ordering newOrdering; newOrdering += x1, l1, x2, l2; - GaussianFactor::shared_ptr actLinearizationB = actFactor.linearize(values, newOrdering); - JacobianFactor expLinFactor2(newOrdering[l1], A1, newOrdering[l2], A2, b, diag_model2); - EXPECT(assert_equal(*expLinFactor2.clone(), *actLinearizationB, tol)); } /* ************************************************************************* */ TEST( testLinearContainerFactor, jacobian_factor_withlinpoints ) { - Ordering ordering; ordering += x1, x2, l1, l2; - Matrix A1 = Matrix_(2,2, 2.74222, -0.0067457, 0.0, 2.63624); @@ -81,7 +73,7 @@ TEST( testLinearContainerFactor, jacobian_factor_withlinpoints ) { Vector b = Vector_(2, 0.0277052, -0.0533393); - JacobianFactor expLinFactor(ordering[l1], A1, ordering[l2], A2, b, diag_model2); + JacobianFactor expLinFactor(l1, A1, l2, A2, b, diag_model2); Values values; values.insert(l1, landmark1); @@ -89,8 +81,8 @@ TEST( testLinearContainerFactor, jacobian_factor_withlinpoints ) { values.insert(x1, poseA1); values.insert(x2, poseA2); - LinearContainerFactor actFactor(expLinFactor, ordering, values); - LinearContainerFactor actFactorNolin(expLinFactor, ordering); + LinearContainerFactor actFactor(expLinFactor, values); + LinearContainerFactor actFactorNolin(expLinFactor); EXPECT(assert_equal(actFactor, actFactor, tol)); EXPECT(assert_inequal(actFactor, actFactorNolin, tol)); @@ -108,19 +100,18 @@ TEST( testLinearContainerFactor, jacobian_factor_withlinpoints ) { Vector delta_l1 = Vector_(2, 1.0, 2.0); Vector delta_l2 = Vector_(2, 3.0, 4.0); - VectorValues delta = values.zeroVectors(ordering); - delta.at(ordering[l1]) = delta_l1; - delta.at(ordering[l2]) = delta_l2; - Values noisyValues = values.retract(delta, ordering); + VectorValues delta = values.zeroVectors(); + delta.at(l1) = delta_l1; + delta.at(l2) = delta_l2; + Values noisyValues = values.retract(delta); double expError = expLinFactor.error(delta); EXPECT_DOUBLES_EQUAL(expError, actFactor.error(noisyValues), tol); - EXPECT_DOUBLES_EQUAL(expLinFactor.error(values.zeroVectors(ordering)), actFactor.error(values), tol); + EXPECT_DOUBLES_EQUAL(expLinFactor.error(values.zeroVectors()), actFactor.error(values), tol); // Check linearization with corrections for updated linearization point - Ordering newOrdering; newOrdering += x1, l1, x2, l2; - GaussianFactor::shared_ptr actLinearizationB = actFactor.linearize(noisyValues, newOrdering); + GaussianFactor::shared_ptr actLinearizationB = actFactor.linearize(noisyValues); Vector bprime = b - A1 * delta_l1 - A2 * delta_l2; - JacobianFactor expLinFactor2(newOrdering[l1], A1, newOrdering[l2], A2, bprime, diag_model2); + JacobianFactor expLinFactor2(l1, A1, l2, A2, bprime, diag_model2); EXPECT(assert_equal(*expLinFactor2.clone(), *actLinearizationB, tol)); } @@ -145,8 +136,7 @@ TEST( testLinearContainerFactor, generic_hessian_factor ) { double f = 10.0; - Ordering initOrdering; initOrdering += x1, x2, l1, l2; - HessianFactor initFactor(initOrdering[x1], initOrdering[x2], initOrdering[l1], + HessianFactor initFactor(x1, x2, l1, G11, G12, G13, g1, G22, G23, g2, G33, g3, f); Values values; @@ -155,18 +145,12 @@ TEST( testLinearContainerFactor, generic_hessian_factor ) { values.insert(x1, poseA1); values.insert(x2, poseA2); - LinearContainerFactor actFactor(initFactor, initOrdering); + LinearContainerFactor actFactor(initFactor); EXPECT(!actFactor.isJacobian()); EXPECT(actFactor.isHessian()); - GaussianFactor::shared_ptr actLinearization1 = actFactor.linearize(values, initOrdering); + GaussianFactor::shared_ptr actLinearization1 = actFactor.linearize(values); EXPECT(assert_equal(*initFactor.clone(), *actLinearization1, tol)); - - Ordering newOrdering; newOrdering += l1, x1, x2, l2; - HessianFactor expLinFactor(newOrdering[x1], newOrdering[x2], newOrdering[l1], - G11, G12, G13, g1, G22, G23, g2, G33, g3, f); - GaussianFactor::shared_ptr actLinearization2 = actFactor.linearize(values, newOrdering); - EXPECT(assert_equal(*expLinFactor.clone(), *actLinearization2, tol)); } /* ************************************************************************* */ @@ -196,8 +180,7 @@ TEST( testLinearContainerFactor, hessian_factor_withlinpoints ) { Matrix G(5,5); G << G11, G12, Matrix::Zero(2,3), G22; - Ordering ordering; ordering += x1, x2, l1; - HessianFactor initFactor(ordering[x1], ordering[l1], G11, G12, g1, G22, g2, f); + HessianFactor initFactor(x1, l1, G11, G12, g1, G22, g2, f); Values linearizationPoint, expLinPoints; linearizationPoint.insert(l1, landmark1); @@ -205,7 +188,7 @@ TEST( testLinearContainerFactor, hessian_factor_withlinpoints ) { expLinPoints = linearizationPoint; linearizationPoint.insert(x2, poseA2); - LinearContainerFactor actFactor(initFactor, ordering, linearizationPoint); + LinearContainerFactor actFactor(initFactor, linearizationPoint); EXPECT(!actFactor.isJacobian()); EXPECT(actFactor.isHessian()); @@ -219,16 +202,16 @@ TEST( testLinearContainerFactor, hessian_factor_withlinpoints ) { Vector delta_x2 = Vector_(3, 6.0, 7.0, 0.3); // Check error calculation - VectorValues delta = linearizationPoint.zeroVectors(ordering); - delta.at(ordering[l1]) = delta_l1; - delta.at(ordering[x1]) = delta_x1; - delta.at(ordering[x2]) = delta_x2; + VectorValues delta = linearizationPoint.zeroVectors(); + delta.at(l1) = delta_l1; + delta.at(x1) = delta_x1; + delta.at(x2) = delta_x2; EXPECT(assert_equal(Vector_(5, 3.0, 4.0, 0.5, 1.0, 2.0), delta.vector(initFactor.keys()))); - Values noisyValues = linearizationPoint.retract(delta, ordering); + Values noisyValues = linearizationPoint.retract(delta); double expError = initFactor.error(delta); EXPECT_DOUBLES_EQUAL(expError, actFactor.error(noisyValues), tol); - EXPECT_DOUBLES_EQUAL(initFactor.error(linearizationPoint.zeroVectors(ordering)), actFactor.error(linearizationPoint), tol); + EXPECT_DOUBLES_EQUAL(initFactor.error(linearizationPoint.zeroVectors()), actFactor.error(linearizationPoint), tol); // Compute updated versions Vector dv = Vector_(5, 3.0, 4.0, 0.5, 1.0, 2.0); @@ -239,8 +222,8 @@ TEST( testLinearContainerFactor, hessian_factor_withlinpoints ) { Vector g1_prime = g_prime.head(3); Vector g2_prime = g_prime.tail(2); double f_prime = f + dv.transpose() * G.selfadjointView() * dv - 2.0 * dv.transpose() * g; - HessianFactor expNewFactor(ordering[x1], ordering[l1], G11, G12, g1_prime, G22, g2_prime, f_prime); - EXPECT(assert_equal(*expNewFactor.clone(), *actFactor.linearize(noisyValues, ordering), tol)); + HessianFactor expNewFactor(x1, l1, G11, G12, g1_prime, G22, g2_prime, f_prime); + EXPECT(assert_equal(*expNewFactor.clone(), *actFactor.linearize(noisyValues), tol)); } /* ************************************************************************* */ @@ -251,14 +234,10 @@ TEST( testLinearContainerFactor, creation ) { l5 = 15, l6 = 16, l7 = 17, l8 = 18; - // creating an ordering to decode the linearized factor - Ordering ordering; - ordering += l1,l2,l3,l4,l5,l6,l7,l8; - // create a linear factor SharedDiagonal model = noiseModel::Unit::Create(2); JacobianFactor::shared_ptr linear_factor(new JacobianFactor( - ordering[l3], eye(2,2), ordering[l5], 2.0 * eye(2,2), zero(2), model)); + l3, eye(2,2), l5, 2.0 * eye(2,2), zero(2), model)); // create a set of values - build with full set of values gtsam::Values full_values, exp_values; @@ -267,7 +246,7 @@ TEST( testLinearContainerFactor, creation ) { exp_values = full_values; full_values.insert(l1, Point2(3.0, 7.0)); - LinearContainerFactor actual(linear_factor, ordering, full_values); + LinearContainerFactor actual(linear_factor, full_values); // Verify the keys std::vector expKeys; @@ -284,9 +263,6 @@ TEST( testLinearContainerFactor, jacobian_relinearize ) // Create a Between Factor from a Point3. This is actually a linear factor. gtsam::Key key1(1); gtsam::Key key2(2); - gtsam::Ordering ordering; - ordering.push_back(key1); - ordering.push_back(key2); gtsam::Values linpoint1; linpoint1.insert(key1, gtsam::Point3(-22.4, +8.5, +2.4)); linpoint1.insert(key2, gtsam::Point3(-21.0, +5.0, +21.0)); @@ -296,8 +272,8 @@ TEST( testLinearContainerFactor, jacobian_relinearize ) gtsam::BetweenFactor betweenFactor(key1, key2, measured, model); // Create a jacobian container factor at linpoint 1 - gtsam::JacobianFactor::shared_ptr jacobian(new gtsam::JacobianFactor(*betweenFactor.linearize(linpoint1, ordering))); - gtsam::LinearContainerFactor jacobianContainer(jacobian, ordering, linpoint1); + gtsam::JacobianFactor::shared_ptr jacobian(new gtsam::JacobianFactor(*betweenFactor.linearize(linpoint1))); + gtsam::LinearContainerFactor jacobianContainer(jacobian, linpoint1); // Create a second linearization point gtsam::Values linpoint2; @@ -310,8 +286,8 @@ TEST( testLinearContainerFactor, jacobian_relinearize ) EXPECT_DOUBLES_EQUAL(expected_error, actual_error, 1e-9 ); // Re-linearize around the new point and check the factors - gtsam::GaussianFactor::shared_ptr expected_factor = betweenFactor.linearize(linpoint2, ordering); - gtsam::GaussianFactor::shared_ptr actual_factor = jacobianContainer.linearize(linpoint2, ordering); + gtsam::GaussianFactor::shared_ptr expected_factor = betweenFactor.linearize(linpoint2); + gtsam::GaussianFactor::shared_ptr actual_factor = jacobianContainer.linearize(linpoint2); CHECK(gtsam::assert_equal(*expected_factor, *actual_factor)); } @@ -321,9 +297,6 @@ TEST( testLinearContainerFactor, hessian_relinearize ) // Create a Between Factor from a Point3. This is actually a linear factor. gtsam::Key key1(1); gtsam::Key key2(2); - gtsam::Ordering ordering; - ordering.push_back(key1); - ordering.push_back(key2); gtsam::Values linpoint1; linpoint1.insert(key1, gtsam::Point3(-22.4, +8.5, +2.4)); linpoint1.insert(key2, gtsam::Point3(-21.0, +5.0, +21.0)); @@ -333,8 +306,8 @@ TEST( testLinearContainerFactor, hessian_relinearize ) gtsam::BetweenFactor betweenFactor(key1, key2, measured, model); // Create a hessian container factor at linpoint 1 - gtsam::HessianFactor::shared_ptr hessian(new gtsam::HessianFactor(*betweenFactor.linearize(linpoint1, ordering))); - gtsam::LinearContainerFactor hessianContainer(hessian, ordering, linpoint1); + gtsam::HessianFactor::shared_ptr hessian(new gtsam::HessianFactor(*betweenFactor.linearize(linpoint1))); + gtsam::LinearContainerFactor hessianContainer(hessian, linpoint1); // Create a second linearization point gtsam::Values linpoint2; @@ -347,8 +320,8 @@ TEST( testLinearContainerFactor, hessian_relinearize ) EXPECT_DOUBLES_EQUAL(expected_error, actual_error, 1e-9 ); // Re-linearize around the new point and check the factors - gtsam::GaussianFactor::shared_ptr expected_factor = gtsam::HessianFactor::shared_ptr(new gtsam::HessianFactor(*betweenFactor.linearize(linpoint2, ordering))); - gtsam::GaussianFactor::shared_ptr actual_factor = hessianContainer.linearize(linpoint2, ordering); + gtsam::GaussianFactor::shared_ptr expected_factor = gtsam::HessianFactor::shared_ptr(new gtsam::HessianFactor(*betweenFactor.linearize(linpoint2))); + gtsam::GaussianFactor::shared_ptr actual_factor = hessianContainer.linearize(linpoint2); CHECK(gtsam::assert_equal(*expected_factor, *actual_factor)); } diff --git a/gtsam/nonlinear/tests/testOrdering.cpp b/gtsam/nonlinear/tests/testOrdering.cpp deleted file mode 100644 index a62278d1e..000000000 --- a/gtsam/nonlinear/tests/testOrdering.cpp +++ /dev/null @@ -1,113 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testOrdering - * @author Alex Cunningham - */ - -#include -#include -#include -#include - -using namespace std; -using namespace gtsam; - -/* ************************************************************************* */ -TEST( Ordering, simple_modifications ) { - Ordering ordering; - - // create an ordering - Symbol x1('x', 1), x2('x', 2), x3('x', 3), x4('x', 4); - ordering += x1, x2, x3, x4; - - EXPECT_LONGS_EQUAL(0, ordering[x1]); - EXPECT_LONGS_EQUAL(1, ordering[x2]); - EXPECT_LONGS_EQUAL(2, ordering[x3]); - EXPECT_LONGS_EQUAL(3, ordering[x4]); - EXPECT_LONGS_EQUAL(Key(x1), ordering.key(0)); - EXPECT_LONGS_EQUAL(Key(x2), ordering.key(1)); - EXPECT_LONGS_EQUAL(Key(x3), ordering.key(2)); - EXPECT_LONGS_EQUAL(Key(x4), ordering.key(3)); - - // pop the last two elements - Ordering::value_type x4p = ordering.pop_back(); - EXPECT_LONGS_EQUAL(3, ordering.size()); - EXPECT(assert_equal(x4, x4p.first)); - - Index x3p = ordering.pop_back().second; - EXPECT_LONGS_EQUAL(2, ordering.size()); - EXPECT_LONGS_EQUAL(2, (int)x3p); - - // reassemble back make the ordering 1, 2, 4, 3 - EXPECT_LONGS_EQUAL(2, ordering.push_back(x4)); - EXPECT_LONGS_EQUAL(3, ordering.push_back(x3)); - - EXPECT_LONGS_EQUAL(2, ordering[x4]); - EXPECT_LONGS_EQUAL(3, ordering[x3]); - - EXPECT_LONGS_EQUAL(Key(x4), ordering.key(2)); - EXPECT_LONGS_EQUAL(Key(x3), ordering.key(3)); - - // verify - Ordering expectedFinal; - expectedFinal += x1, x2, x4, x3; - EXPECT(assert_equal(expectedFinal, ordering)); -} - -/* ************************************************************************* */ -TEST(Ordering, permute) { - Ordering ordering; - ordering += 2, 4, 6, 8; - - Ordering expected; - expected += 2, 8, 6, 4; - - Permutation permutation(4); - permutation[0] = 0; - permutation[1] = 3; - permutation[2] = 2; - permutation[3] = 1; - - Ordering actual = ordering; - actual.permuteInPlace(permutation); - - EXPECT(assert_equal(expected, actual)); - - EXPECT_LONGS_EQUAL(0, actual[2]); - EXPECT_LONGS_EQUAL(1, actual[8]); - EXPECT_LONGS_EQUAL(2, actual[6]); - EXPECT_LONGS_EQUAL(3, actual[4]); - EXPECT_LONGS_EQUAL(2, actual.key(0)); - EXPECT_LONGS_EQUAL(8, actual.key(1)); - EXPECT_LONGS_EQUAL(6, actual.key(2)); - EXPECT_LONGS_EQUAL(4, actual.key(3)); -} - -/* ************************************************************************* */ -TEST( Ordering, invert ) { - // creates a map with the opposite mapping: Index->Key - Ordering ordering; - - // create an ordering - Symbol x1('x', 1), x2('x', 2), x3('x', 3), x4('x', 4); - ordering += x1, x2, x3, x4; - - EXPECT_LONGS_EQUAL(Key(x1), ordering.key(0)); - EXPECT_LONGS_EQUAL(Key(x2), ordering.key(1)); - EXPECT_LONGS_EQUAL(Key(x3), ordering.key(2)); - EXPECT_LONGS_EQUAL(Key(x4), ordering.key(3)); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr); } -/* ************************************************************************* */ diff --git a/gtsam/nonlinear/tests/testValues.cpp b/gtsam/nonlinear/tests/testValues.cpp index 227f20834..cb4973ef5 100644 --- a/gtsam/nonlinear/tests/testValues.cpp +++ b/gtsam/nonlinear/tests/testValues.cpp @@ -16,6 +16,7 @@ #include #include +#include #include #include #include @@ -24,6 +25,7 @@ #include #include // for operator += +#include using namespace boost::assign; #include #include @@ -168,16 +170,15 @@ TEST(Values, expmap_a) config0.insert(key1, LieVector(3, 1.0, 2.0, 3.0)); config0.insert(key2, LieVector(3, 5.0, 6.0, 7.0)); - Ordering ordering(*config0.orderingArbitrary()); - VectorValues increment(config0.dims(ordering)); - increment[ordering[key1]] = Vector_(3, 1.0, 1.1, 1.2); - increment[ordering[key2]] = Vector_(3, 1.3, 1.4, 1.5); + VectorValues increment = pair_list_of + (key1, Vector_(3, 1.0, 1.1, 1.2)) + (key2, Vector_(3, 1.3, 1.4, 1.5)); Values expected; expected.insert(key1, LieVector(3, 2.0, 3.1, 4.2)); expected.insert(key2, LieVector(3, 6.3, 7.4, 8.5)); - CHECK(assert_equal(expected, config0.retract(increment, ordering))); + CHECK(assert_equal(expected, config0.retract(increment))); } /* ************************************************************************* */ @@ -187,15 +188,14 @@ TEST(Values, expmap_b) config0.insert(key1, LieVector(3, 1.0, 2.0, 3.0)); config0.insert(key2, LieVector(3, 5.0, 6.0, 7.0)); - Ordering ordering(*config0.orderingArbitrary()); - VectorValues increment(VectorValues::Zero(config0.dims(ordering))); - increment[ordering[key2]] = LieVector(3, 1.3, 1.4, 1.5); + VectorValues increment = pair_list_of + (key2, LieVector(3, 1.3, 1.4, 1.5)); Values expected; expected.insert(key1, LieVector(3, 1.0, 2.0, 3.0)); expected.insert(key2, LieVector(3, 6.3, 7.4, 8.5)); - CHECK(assert_equal(expected, config0.retract(increment, ordering))); + CHECK(assert_equal(expected, config0.retract(increment))); } /* ************************************************************************* */ @@ -241,15 +241,13 @@ TEST(Values, localCoordinates) valuesA.insert(key1, LieVector(3, 1.0, 2.0, 3.0)); valuesA.insert(key2, LieVector(3, 5.0, 6.0, 7.0)); - Ordering ordering = *valuesA.orderingArbitrary(); + VectorValues expDelta = pair_list_of + (key1, Vector_(3, 0.1, 0.2, 0.3)) + (key2, Vector_(3, 0.4, 0.5, 0.6)); - VectorValues expDelta = valuesA.zeroVectors(ordering); -// expDelta.at(ordering[key1]) = Vector_(3, 0.1, 0.2, 0.3); -// expDelta.at(ordering[key2]) = Vector_(3, 0.4, 0.5, 0.6); + Values valuesB = valuesA.retract(expDelta); - Values valuesB = valuesA.retract(expDelta, ordering); - - EXPECT(assert_equal(expDelta, valuesA.localCoordinates(valuesB, ordering))); + EXPECT(assert_equal(expDelta, valuesA.localCoordinates(valuesB))); } /* ************************************************************************* */ diff --git a/gtsam/slam/AntiFactor.h b/gtsam/slam/AntiFactor.h index fe1b93bde..ae794db6a 100644 --- a/gtsam/slam/AntiFactor.h +++ b/gtsam/slam/AntiFactor.h @@ -47,7 +47,7 @@ namespace gtsam { AntiFactor() {} /** constructor - Creates the equivalent AntiFactor from an existing factor */ - AntiFactor(NonlinearFactor::shared_ptr factor) : Base(factor->begin(), factor->end()), factor_(factor) {} + AntiFactor(NonlinearFactor::shared_ptr factor) : Base(factor->keys()), factor_(factor) {} virtual ~AntiFactor() {} @@ -94,11 +94,10 @@ namespace gtsam { * returns a Jacobian instead of a full Hessian), but with the opposite sign. This * effectively cancels the effect of the original factor on the factor graph. */ - boost::shared_ptr - linearize(const Values& c, const Ordering& ordering) const { + boost::shared_ptr linearize(const Values& c) const { // Generate the linearized factor from the contained nonlinear factor - GaussianFactor::shared_ptr gaussianFactor = factor_->linearize(c, ordering); + GaussianFactor::shared_ptr gaussianFactor = factor_->linearize(c); // return the negated version of the factor return gaussianFactor->negate(); diff --git a/gtsam/slam/BetweenFactor.h b/gtsam/slam/BetweenFactor.h index fd53ab1c7..1b630374c 100644 --- a/gtsam/slam/BetweenFactor.h +++ b/gtsam/slam/BetweenFactor.h @@ -20,7 +20,6 @@ #include #include #include -#include namespace gtsam { diff --git a/gtsam/slam/dataset.cpp b/gtsam/slam/dataset.cpp index e22d0b42c..36d26f308 100644 --- a/gtsam/slam/dataset.cpp +++ b/gtsam/slam/dataset.cpp @@ -179,7 +179,7 @@ pair load2D( noiseModel::Diagonal::shared_ptr measurementNoise = noiseModel::Diagonal::Sigmas(Vector_(2, bearing_std, range_std)); - graph->add(BearingRangeFactor(id1, id2, bearing, range, measurementNoise)); + *graph += BearingRangeFactor(id1, id2, bearing, range, measurementNoise); // Insert poses or points if they do not exist yet if (!initial->exists(id1)) @@ -222,7 +222,7 @@ pair load2D( } // Add to graph - graph->add(BearingRangeFactor(id1, L(id2), bearing, range, measurementNoise)); + *graph += BearingRangeFactor(id1, L(id2), bearing, range, measurementNoise); } is.ignore(LINESIZE, '\n'); } @@ -386,7 +386,7 @@ pair load2D_robust( noiseModel::Diagonal::shared_ptr measurementNoise = noiseModel::Diagonal::Sigmas(Vector_(2, bearing_std, range_std)); - graph->add(BearingRangeFactor(id1, id2, bearing, range, measurementNoise)); + *graph += BearingRangeFactor(id1, id2, bearing, range, measurementNoise); // Insert poses or points if they do not exist yet if (!initial->exists(id1)) diff --git a/gtsam/slam/tests/testAntiFactor.cpp b/gtsam/slam/tests/testAntiFactor.cpp index d918dbd99..1a455001b 100644 --- a/gtsam/slam/tests/testAntiFactor.cpp +++ b/gtsam/slam/tests/testAntiFactor.cpp @@ -23,7 +23,10 @@ #include #include #include -#include +#include +#include +#include +#include #include using namespace std; @@ -41,21 +44,15 @@ TEST( AntiFactor, NegativeHessian) SharedNoiseModel sigma(noiseModel::Unit::Create(Pose3::Dim())); // Create a configuration corresponding to the ground truth - boost::shared_ptr values(new Values()); - values->insert(1, pose1); - values->insert(2, pose2); - - // Define an elimination ordering - Ordering::shared_ptr ordering(new Ordering()); - ordering->insert(1, 0); - ordering->insert(2, 1); - + Values values; + values.insert(1, pose1); + values.insert(2, pose2); // Create a "standard" factor BetweenFactor::shared_ptr originalFactor(new BetweenFactor(1, 2, z, sigma)); // Linearize it into a Jacobian Factor - GaussianFactor::shared_ptr originalJacobian = originalFactor->linearize(*values, *ordering); + GaussianFactor::shared_ptr originalJacobian = originalFactor->linearize(values); // Convert it to a Hessian Factor HessianFactor::shared_ptr originalHessian = HessianFactor::shared_ptr(new HessianFactor(*originalJacobian)); @@ -64,7 +61,7 @@ TEST( AntiFactor, NegativeHessian) AntiFactor::shared_ptr antiFactor(new AntiFactor(originalFactor)); // Linearize the AntiFactor into a Hessian Factor - GaussianFactor::shared_ptr antiGaussian = antiFactor->linearize(*values, *ordering); + GaussianFactor::shared_ptr antiGaussian = antiFactor->linearize(values); HessianFactor::shared_ptr antiHessian = boost::dynamic_pointer_cast(antiGaussian); @@ -97,39 +94,39 @@ TEST( AntiFactor, EquivalentBayesNet) Pose3 z(Rot3(), Point3(1, 1, 1)); SharedNoiseModel sigma(noiseModel::Unit::Create(Pose3::Dim())); - NonlinearFactorGraph::shared_ptr graph(new NonlinearFactorGraph()); - graph->add(PriorFactor(1, pose1, sigma)); - graph->add(BetweenFactor(1, 2, pose1.between(pose2), sigma)); + NonlinearFactorGraph graph; + graph.push_back(PriorFactor(1, pose1, sigma)); + graph.push_back(BetweenFactor(1, 2, pose1.between(pose2), sigma)); // Create a configuration corresponding to the ground truth - Values::shared_ptr values(new Values()); - values->insert(1, pose1); - values->insert(2, pose2); + Values values; + values.insert(1, pose1); + values.insert(2, pose2); // Define an elimination ordering - Ordering::shared_ptr ordering = graph->orderingCOLAMD(*values); + Ordering ordering = graph.orderingCOLAMD(); // Eliminate into a BayesNet - GaussianSequentialSolver solver1(*graph->linearize(*values, *ordering)); - GaussianBayesNet::shared_ptr expectedBayesNet = solver1.eliminate(); + GaussianFactorGraph lin_graph = *graph.linearize(values); + GaussianBayesNet::shared_ptr expectedBayesNet = lin_graph.eliminateSequential(ordering); // Back-substitute to find the optimal deltas - VectorValues expectedDeltas = optimize(*expectedBayesNet); + VectorValues expectedDeltas = expectedBayesNet->optimize(); // Add an additional factor between Pose1 and Pose2 BetweenFactor::shared_ptr f1(new BetweenFactor(1, 2, z, sigma)); - graph->push_back(f1); + graph.push_back(f1); // Add the corresponding AntiFactor between Pose1 and Pose2 AntiFactor::shared_ptr f2(new AntiFactor(f1)); - graph->push_back(f2); + graph.push_back(f2); // Again, Eliminate into a BayesNet - GaussianSequentialSolver solver2(*graph->linearize(*values, *ordering)); - GaussianBayesNet::shared_ptr actualBayesNet = solver2.eliminate(); + GaussianFactorGraph lin_graph1 = *graph.linearize(values); + GaussianBayesNet::shared_ptr actualBayesNet = lin_graph1.eliminateSequential(ordering); // Back-substitute to find the optimal deltas - VectorValues actualDeltas = optimize(*actualBayesNet); + VectorValues actualDeltas = actualBayesNet->optimize(); // Verify the BayesNets are identical CHECK(assert_equal(*expectedBayesNet, *actualBayesNet, 1e-5)); diff --git a/gtsam/slam/tests/testBetweenFactor.cpp b/gtsam/slam/tests/testBetweenFactor.cpp index 299363dcd..8b449e037 100644 --- a/gtsam/slam/tests/testBetweenFactor.cpp +++ b/gtsam/slam/tests/testBetweenFactor.cpp @@ -29,19 +29,19 @@ TEST(BetweenFactor, Rot3) { Vector actual = factor.evaluateError(R1, R2, actualH1, actualH2); Vector expected = Rot3::Logmap(measured.inverse() * R1.between(R2)); - CHECK(assert_equal(expected,actual, 1e-100)); + EXPECT(assert_equal(expected,actual, 1e-100)); Matrix numericalH1 = numericalDerivative21( boost::function(boost::bind( &BetweenFactor::evaluateError, factor, _1, _2, boost::none, boost::none)), R1, R2, 1e-5); - CHECK(assert_equal(numericalH1,actualH1)); + EXPECT(assert_equal(numericalH1,actualH1)); Matrix numericalH2 = numericalDerivative22( boost::function(boost::bind( &BetweenFactor::evaluateError, factor, _1, _2, boost::none, boost::none)), R1, R2, 1e-5); - CHECK(assert_equal(numericalH2,actualH2)); + EXPECT(assert_equal(numericalH2,actualH2)); } /* ************************************************************************* */ diff --git a/gtsam/slam/tests/testGeneralSFMFactor.cpp b/gtsam/slam/tests/testGeneralSFMFactor.cpp index 0aeadfa6a..e6d134ee6 100644 --- a/gtsam/slam/tests/testGeneralSFMFactor.cpp +++ b/gtsam/slam/tests/testGeneralSFMFactor.cpp @@ -371,7 +371,7 @@ TEST( GeneralSFMFactor, optimize_varK_BA ) { graph.addCameraConstraint(0, cameras[0]); // Constrain the scale of the problem with a soft range factor of 1m between the cameras - graph.add(RangeFactor(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 10.0))); + graph.push_back(RangeFactor(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 10.0))); const double reproj_error = 1e-5 ; @@ -386,8 +386,8 @@ TEST(GeneralSFMFactor, GeneralCameraPoseRange) { // Tests range factor between a GeneralCamera and a Pose3 Graph graph; graph.addCameraConstraint(0, GeneralCamera()); - graph.add(RangeFactor(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 1.0))); - graph.add(PriorFactor(X(1), Pose3(Rot3(), Point3(1.0, 0.0, 0.0)), noiseModel::Isotropic::Sigma(6, 1.0))); + graph.push_back(RangeFactor(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 1.0))); + graph.push_back(PriorFactor(X(1), Pose3(Rot3(), Point3(1.0, 0.0, 0.0)), noiseModel::Isotropic::Sigma(6, 1.0))); Values init; init.insert(X(0), GeneralCamera()); @@ -410,9 +410,9 @@ TEST(GeneralSFMFactor, GeneralCameraPoseRange) { TEST(GeneralSFMFactor, CalibratedCameraPoseRange) { // Tests range factor between a CalibratedCamera and a Pose3 NonlinearFactorGraph graph; - graph.add(PriorFactor(X(0), CalibratedCamera(), noiseModel::Isotropic::Sigma(6, 1.0))); - graph.add(RangeFactor(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 1.0))); - graph.add(PriorFactor(X(1), Pose3(Rot3(), Point3(1.0, 0.0, 0.0)), noiseModel::Isotropic::Sigma(6, 1.0))); + graph.push_back(PriorFactor(X(0), CalibratedCamera(), noiseModel::Isotropic::Sigma(6, 1.0))); + graph.push_back(RangeFactor(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 1.0))); + graph.push_back(PriorFactor(X(1), Pose3(Rot3(), Point3(1.0, 0.0, 0.0)), noiseModel::Isotropic::Sigma(6, 1.0))); Values init; init.insert(X(0), CalibratedCamera()); diff --git a/gtsam/slam/tests/testGeneralSFMFactor_Cal3Bundler.cpp b/gtsam/slam/tests/testGeneralSFMFactor_Cal3Bundler.cpp index 6d8f36634..98ae401fc 100644 --- a/gtsam/slam/tests/testGeneralSFMFactor_Cal3Bundler.cpp +++ b/gtsam/slam/tests/testGeneralSFMFactor_Cal3Bundler.cpp @@ -20,7 +20,6 @@ #include #include #include -#include #include #include #include @@ -369,7 +368,7 @@ TEST( GeneralSFMFactor_Cal3Bundler, optimize_varK_BA ) { graph.addCameraConstraint(0, cameras[0]); // Constrain the scale of the problem with a soft range factor of 1m between the cameras - graph.add(RangeFactor(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 10.0))); + graph.push_back(RangeFactor(X(0), X(1), 2.0, noiseModel::Isotropic::Sigma(1, 10.0))); const double reproj_error = 1e-5 ; diff --git a/gtsam/slam/tests/testProjectionFactor.cpp b/gtsam/slam/tests/testProjectionFactor.cpp index b53fde536..fc1d9a51b 100644 --- a/gtsam/slam/tests/testProjectionFactor.cpp +++ b/gtsam/slam/tests/testProjectionFactor.cpp @@ -23,6 +23,7 @@ #include #include #include +#include #include using namespace std; diff --git a/gtsam/slam/tests/testRangeFactor.cpp b/gtsam/slam/tests/testRangeFactor.cpp index 75fe40aa8..b4cda70ff 100644 --- a/gtsam/slam/tests/testRangeFactor.cpp +++ b/gtsam/slam/tests/testRangeFactor.cpp @@ -24,6 +24,7 @@ #include #include #include +#include #include using namespace std; diff --git a/gtsam/slam/tests/testReferenceFrameFactor.cpp b/gtsam/slam/tests/testReferenceFrameFactor.cpp index 815ce391a..5735ac21c 100644 --- a/gtsam/slam/tests/testReferenceFrameFactor.cpp +++ b/gtsam/slam/tests/testReferenceFrameFactor.cpp @@ -116,7 +116,7 @@ TEST( ReferenceFrameFactor, jacobians_zero ) { } /* ************************************************************************* */ -TEST_UNSAFE( ReferenceFrameFactor, converge_trans ) { +TEST( ReferenceFrameFactor, converge_trans ) { // initial points Point2 local1(2.0, 2.0), local2(4.0, 5.0), @@ -136,15 +136,15 @@ TEST_UNSAFE( ReferenceFrameFactor, converge_trans ) { // Pose2 trans = transIdeal * Pose2(-200.0, 100.0, 2.0); // beyond pi/2 - fails NonlinearFactorGraph graph; - graph.add(PointReferenceFrameFactor(lB1, tA1, lA1)); - graph.add(PointReferenceFrameFactor(lB2, tA1, lA2)); + graph.push_back(PointReferenceFrameFactor(lB1, tA1, lA1)); + graph.push_back(PointReferenceFrameFactor(lB2, tA1, lA2)); // hard constraints on points double error_gain = 1000.0; - graph.add(NonlinearEquality(lA1, local1, error_gain)); - graph.add(NonlinearEquality(lA2, local2, error_gain)); - graph.add(NonlinearEquality(lB1, global1, error_gain)); - graph.add(NonlinearEquality(lB2, global2, error_gain)); + graph.push_back(NonlinearEquality(lA1, local1, error_gain)); + graph.push_back(NonlinearEquality(lA2, local2, error_gain)); + graph.push_back(NonlinearEquality(lB1, global1, error_gain)); + graph.push_back(NonlinearEquality(lB2, global2, error_gain)); // create initial estimate Values init; @@ -186,9 +186,9 @@ TEST( ReferenceFrameFactor, converge_local ) { NonlinearFactorGraph graph; double error_gain = 1000.0; - graph.add(PointReferenceFrameFactor(lB1, tA1, lA1)); - graph.add(NonlinearEquality(lB1, global, error_gain)); - graph.add(NonlinearEquality(tA1, trans, error_gain)); + graph.push_back(PointReferenceFrameFactor(lB1, tA1, lA1)); + graph.push_back(NonlinearEquality(lB1, global, error_gain)); + graph.push_back(NonlinearEquality(tA1, trans, error_gain)); // create initial estimate Values init; @@ -222,9 +222,9 @@ TEST( ReferenceFrameFactor, converge_global ) { NonlinearFactorGraph graph; double error_gain = 1000.0; - graph.add(PointReferenceFrameFactor(lB1, tA1, lA1)); - graph.add(NonlinearEquality(lA1, local, error_gain)); - graph.add(NonlinearEquality(tA1, trans, error_gain)); + graph.push_back(PointReferenceFrameFactor(lB1, tA1, lA1)); + graph.push_back(NonlinearEquality(lA1, local, error_gain)); + graph.push_back(NonlinearEquality(tA1, trans, error_gain)); // create initial estimate Values init; diff --git a/gtsam/slam/tests/testStereoFactor.cpp b/gtsam/slam/tests/testStereoFactor.cpp index 3936d8f45..3c28f6da9 100644 --- a/gtsam/slam/tests/testStereoFactor.cpp +++ b/gtsam/slam/tests/testStereoFactor.cpp @@ -189,11 +189,11 @@ TEST( StereoFactor, singlePoint) { NonlinearFactorGraph graph; - graph.add(NonlinearEquality(X(1), camera1)); + graph.push_back(NonlinearEquality(X(1), camera1)); StereoPoint2 measurement(320, 320.0-50, 240); // arguments: measurement, sigma, cam#, measurement #, K, baseline (m) - graph.add(GenericStereoFactor(measurement, model, X(1), L(1), K)); + graph.push_back(GenericStereoFactor(measurement, model, X(1), L(1), K)); // Create a configuration corresponding to the ground truth Values values; diff --git a/gtsam/symbolic/CMakeLists.txt b/gtsam/symbolic/CMakeLists.txt new file mode 100644 index 000000000..cf16b03aa --- /dev/null +++ b/gtsam/symbolic/CMakeLists.txt @@ -0,0 +1,29 @@ +# Install headers +file(GLOB symbolic_headers "*.h") +install(FILES ${symbolic_headers} DESTINATION include/gtsam/symbolic) + +# Components to link tests in this subfolder against +set(symbolic_local_libs + inference + geometry + base + ccolamd + symbolic +) + +# Files to exclude from compilation of tests and timing scripts +set(symbolic_excluded_files +# "${CMAKE_CURRENT_SOURCE_DIR}/tests/testTypedDiscreteFactor.cpp" # Example of excluding a test + "" # Add to this list, with full path, to exclude +) + +# Build tests +if (GTSAM_BUILD_TESTS) + gtsam_add_subdir_tests(symbolic "${symbolic_local_libs}" "${gtsam-default}" "${symbolic_excluded_files}") +endif(GTSAM_BUILD_TESTS) + +# Build timing scripts +if (GTSAM_BUILD_TIMING) + gtsam_add_subdir_timing(symbolic "${symbolic_local_libs}" "${gtsam-default}" "${symbolic_excluded_files}") +endif(GTSAM_BUILD_TIMING) + diff --git a/gtsam/symbolic/SymbolicBayesNet.cpp b/gtsam/symbolic/SymbolicBayesNet.cpp new file mode 100644 index 000000000..c5a04eb0d --- /dev/null +++ b/gtsam/symbolic/SymbolicBayesNet.cpp @@ -0,0 +1,55 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicBayesNet.cpp + * @date Oct 29, 2009 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#include +#include +#include + +#include + +namespace gtsam { + + // Instantiate base class + template class FactorGraph; + + /* ************************************************************************* */ + bool SymbolicBayesNet::equals(const This& bn, double tol) const + { + return Base::equals(bn, tol); + } + + /* ************************************************************************* */ + void SymbolicBayesNet::saveGraph(const std::string &s, const KeyFormatter& keyFormatter) const + { + std::ofstream of(s.c_str()); + of << "digraph G{\n"; + + BOOST_REVERSE_FOREACH(const sharedConditional& conditional, *this) { + SymbolicConditional::Frontals frontals = conditional->frontals(); + Key me = frontals.front(); + SymbolicConditional::Parents parents = conditional->parents(); + BOOST_FOREACH(Key p, parents) + of << p << "->" << me << std::endl; + } + + of << "}"; + of.close(); + } + + +} diff --git a/gtsam/symbolic/SymbolicBayesNet.h b/gtsam/symbolic/SymbolicBayesNet.h new file mode 100644 index 000000000..2e59a98a4 --- /dev/null +++ b/gtsam/symbolic/SymbolicBayesNet.h @@ -0,0 +1,84 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicBayesNet.h + * @date Oct 29, 2009 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#pragma once + +#include +#include +#include + +namespace gtsam { + + /** Symbolic Bayes Net + * \nosubgrouping + */ + class GTSAM_EXPORT SymbolicBayesNet : public FactorGraph { + + public: + + typedef FactorGraph Base; + typedef SymbolicBayesNet This; + typedef SymbolicConditional ConditionalType; + typedef boost::shared_ptr shared_ptr; + typedef boost::shared_ptr sharedConditional; + + /// @name Standard Constructors + /// @{ + + /** Construct empty factor graph */ + SymbolicBayesNet() {} + + /** Construct from iterator over conditionals */ + template + SymbolicBayesNet(ITERATOR firstConditional, ITERATOR lastConditional) : Base(firstConditional, lastConditional) {} + + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit SymbolicBayesNet(const CONTAINER& conditionals) : Base(conditionals) {} + + /** Implicit copy/downcast constructor to override explicit template container constructor */ + template + SymbolicBayesNet(const FactorGraph& graph) : Base(graph) {} + + /// @} + + /// @name Testable + /// @{ + + /** Check equality */ + bool equals(const This& bn, double tol = 1e-9) const; + + /// @} + + /// @name Standard Interface + /// @{ + + void saveGraph(const std::string &s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + + /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } +}; + +} // namespace gtsam diff --git a/gtsam/symbolic/SymbolicBayesTree.cpp b/gtsam/symbolic/SymbolicBayesTree.cpp new file mode 100644 index 000000000..8797ba363 --- /dev/null +++ b/gtsam/symbolic/SymbolicBayesTree.cpp @@ -0,0 +1,40 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicBayesTree.h + * @date Oct 29, 2009 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#include + +#include +#include +#include +#include +#include +#include + +namespace gtsam { + + // Instantiate base classes + template class BayesTreeCliqueBase; + template class BayesTree; + + /* ************************************************************************* */\ + bool SymbolicBayesTree::equals(const This& other, double tol /* = 1e-9 */) const + { + return Base::equals(other, tol); + } + +} diff --git a/gtsam/symbolic/SymbolicBayesTree.h b/gtsam/symbolic/SymbolicBayesTree.h new file mode 100644 index 000000000..526120a30 --- /dev/null +++ b/gtsam/symbolic/SymbolicBayesTree.h @@ -0,0 +1,73 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicBayesTree.h + * @date Oct 29, 2009 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#pragma once + +#include +#include +#include +#include + +namespace gtsam { + + // Forward declarations + class SymbolicConditional; + + /* ************************************************************************* */ + /// A clique in a SymbolicBayesTree + class GTSAM_EXPORT SymbolicBayesTreeClique : + public BayesTreeCliqueBase + { + public: + typedef SymbolicBayesTreeClique This; + typedef BayesTreeCliqueBase Base; + typedef boost::shared_ptr shared_ptr; + typedef boost::weak_ptr weak_ptr; + SymbolicBayesTreeClique() {} + SymbolicBayesTreeClique(const boost::shared_ptr& conditional) : Base(conditional) {} + }; + + /* ************************************************************************* */ + /// A Bayes tree that represents the connectivity between variables but is not associated with any + /// probability functions. + class GTSAM_EXPORT SymbolicBayesTree : + public BayesTree + { + private: + typedef BayesTree Base; + + public: + typedef SymbolicBayesTree This; + typedef boost::shared_ptr shared_ptr; + + /** Default constructor, creates an empty Bayes tree */ + SymbolicBayesTree() {} + + /** check equality */ + bool equals(const This& other, double tol = 1e-9) const; + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } + }; + +} diff --git a/gtsam/symbolic/SymbolicConditional.cpp b/gtsam/symbolic/SymbolicConditional.cpp new file mode 100644 index 000000000..d733c0937 --- /dev/null +++ b/gtsam/symbolic/SymbolicConditional.cpp @@ -0,0 +1,37 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicConditional.cpp + * @author Richard Roberts + * @date Oct 17, 2010 + */ + +#include +#include + +namespace gtsam { + + using namespace std; + + /* ************************************************************************* */ + void SymbolicConditional::print(const std::string& str, const KeyFormatter& keyFormatter) const + { + BaseConditional::print(str, keyFormatter); + } + + /* ************************************************************************* */ + bool SymbolicConditional::equals(const This& c, double tol) const + { + return BaseFactor::equals(c) && BaseConditional::equals(c); + } + +} diff --git a/gtsam/symbolic/SymbolicConditional.h b/gtsam/symbolic/SymbolicConditional.h new file mode 100644 index 000000000..76c7583a7 --- /dev/null +++ b/gtsam/symbolic/SymbolicConditional.h @@ -0,0 +1,106 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicConditional.h + * @author Richard Roberts + * @date Oct 17, 2010 + */ + +#pragma once + +#include +#include +#include + +namespace gtsam { + + /** + * SymbolicConditional is a conditional with keys but no probability + * data, produced by symbolic elimination of SymbolicFactor. + * + * It is also a SymbolicFactor, and thus derives from it. It + * derives also from Conditional, which is a generic interface + * class for conditionals. + * \nosubgrouping + */ + class GTSAM_EXPORT SymbolicConditional : + public SymbolicFactor, + public Conditional { + + public: + typedef SymbolicConditional This; /// Typedef to this class + typedef SymbolicFactor BaseFactor; /// Typedef to the factor base class + typedef Conditional BaseConditional; /// Typedef to the conditional base class + typedef boost::shared_ptr shared_ptr; /// Boost shared_ptr to this class + typedef BaseFactor::iterator iterator; /// iterator to keys + typedef BaseFactor::const_iterator const_iterator; /// const_iterator to keys + + /// @name Standard Constructors + /// @{ + + /** Empty Constructor to make serialization possible */ + SymbolicConditional() {} + + /** No parents */ + SymbolicConditional(Index j) : BaseFactor(j), BaseConditional(1) {} + + /** Single parent */ + SymbolicConditional(Index j, Index parent) : BaseFactor(j, parent), BaseConditional(1) {} + + /** Two parents */ + SymbolicConditional(Index j, Index parent1, Index parent2) : BaseFactor(j, parent1, parent2), BaseConditional(1) {} + + /** Three parents */ + SymbolicConditional(Index j, Index parent1, Index parent2, Index parent3) : BaseFactor(j, parent1, parent2, parent3), BaseConditional(1) {} + + /** Named constructor from an arbitrary number of keys and frontals */ + template + static SymbolicConditional FromIterators(ITERATOR firstKey, ITERATOR lastKey, size_t nrFrontals) + { + SymbolicConditional result; + (BaseFactor&)result = BaseFactor::FromIterators(firstKey, lastKey); + result.nrFrontals_ = nrFrontals; + return result; } + + /** Named constructor from an arbitrary number of keys and frontals */ + template + static SymbolicConditional FromKeys(const CONTAINER& keys, size_t nrFrontals) { + return FromIterators(keys.begin(), keys.end(), nrFrontals); } + + virtual ~SymbolicConditional() {} + + /// Copy this object as its actual derived type. + SymbolicFactor::shared_ptr clone() const { return boost::make_shared(*this); } + + /// @} + + /// @name Testable + + /** Print with optional formatter */ + void print(const std::string& str = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; + + /** Check equality */ + bool equals(const This& c, double tol = 1e-9) const; + + /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(Archive & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseFactor); + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(BaseConditional); + } + }; + +} diff --git a/gtsam/symbolic/SymbolicEliminationTree.cpp b/gtsam/symbolic/SymbolicEliminationTree.cpp new file mode 100644 index 000000000..bd11274bd --- /dev/null +++ b/gtsam/symbolic/SymbolicEliminationTree.cpp @@ -0,0 +1,44 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicEliminationTree.cpp + * @date Mar 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#include +#include + +namespace gtsam { + + // Instantiate base class + template class EliminationTree; + + /* ************************************************************************* */ + SymbolicEliminationTree::SymbolicEliminationTree( + const SymbolicFactorGraph& factorGraph, const VariableIndex& structure, + const Ordering& order) : + Base(factorGraph, structure, order) {} + + /* ************************************************************************* */ + SymbolicEliminationTree::SymbolicEliminationTree( + const SymbolicFactorGraph& factorGraph, const Ordering& order) : + Base(factorGraph, order) {} + + /* ************************************************************************* */ + bool SymbolicEliminationTree::equals(const This& other, double tol) const + { + return Base::equals(other, tol); + } + +} diff --git a/gtsam/symbolic/SymbolicEliminationTree.h b/gtsam/symbolic/SymbolicEliminationTree.h new file mode 100644 index 000000000..5a44d451d --- /dev/null +++ b/gtsam/symbolic/SymbolicEliminationTree.h @@ -0,0 +1,62 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicEliminationTree.h + * @date Mar 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#pragma once + +#include +#include +#include + +namespace gtsam { + + class GTSAM_EXPORT SymbolicEliminationTree : + public EliminationTree + { + public: + typedef EliminationTree Base; ///< Base class + typedef SymbolicEliminationTree This; ///< This class + typedef boost::shared_ptr shared_ptr; ///< Shared pointer to this class + + /** Build the elimination tree of a factor graph using pre-computed column structure. + * @param factorGraph The factor graph for which to build the elimination tree + * @param structure The set of factors involving each variable. If this is not precomputed, + * you can call the Create(const FactorGraph&) named constructor instead. + * @return The elimination tree */ + SymbolicEliminationTree(const SymbolicFactorGraph& factorGraph, + const VariableIndex& structure, const Ordering& order); + + /** Build the elimination tree of a factor graph. Note that this has to compute the column + * structure as a VariableIndex, so if you already have this precomputed, use the other + * constructor instead. + * @param factorGraph The factor graph for which to build the elimination tree */ + SymbolicEliminationTree(const SymbolicFactorGraph& factorGraph, + const Ordering& order); + + /** Test whether the tree is equal to another */ + bool equals(const This& other, double tol = 1e-9) const; + + private: + + /** Private default constructor for testing */ + SymbolicEliminationTree() {} + + friend class ::EliminationTreeTester; + + }; + +} diff --git a/gtsam/symbolic/SymbolicFactor.cpp b/gtsam/symbolic/SymbolicFactor.cpp new file mode 100644 index 000000000..4b475a26a --- /dev/null +++ b/gtsam/symbolic/SymbolicFactor.cpp @@ -0,0 +1,77 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicFactor.cpp + * @author Richard Roberts + * @date Oct 17, 2010 + */ + +#include +#include + +#include +#include +#include +#include + +using namespace std; + +namespace gtsam { + + /* ************************************************************************* */ + std::pair, boost::shared_ptr > + EliminateSymbolic(const SymbolicFactorGraph& factors, const Ordering& keys) + { + // Gather all keys + FastSet allKeys; + BOOST_FOREACH(const SymbolicFactor::shared_ptr& factor, factors) { + allKeys.insert(factor->begin(), factor->end()); } + + // Check keys + BOOST_FOREACH(Key key, keys) { + if(allKeys.find(key) == allKeys.end()) + throw std::runtime_error("Requested to eliminate a key that is not in the factors"); + } + + // Sort frontal keys + FastSet frontals(keys); + const size_t nFrontals = keys.size(); + + // Build a key vector with the frontals followed by the separator + vector orderedKeys(allKeys.size()); + std::copy(keys.begin(), keys.end(), orderedKeys.begin()); + std::set_difference(allKeys.begin(), allKeys.end(), frontals.begin(), frontals.end(), orderedKeys.begin() + nFrontals); + + // Return resulting conditional and factor + return make_pair( + boost::make_shared( + SymbolicConditional::FromKeys(orderedKeys, nFrontals)), + boost::make_shared( + SymbolicFactor::FromIterators(orderedKeys.begin() + nFrontals, orderedKeys.end()))); + } + + /* ************************************************************************* */ + bool SymbolicFactor::equals(const This& other, double tol) const + { + return Base::equals(other, tol); + } + + /* ************************************************************************* */ + std::pair, boost::shared_ptr > + SymbolicFactor::eliminate(const Ordering& keys) const + { + SymbolicFactorGraph graph; + graph += *this; // TODO: Is there a way to avoid copying this factor? + return EliminateSymbolic(graph, keys); + } + +} // gtsam diff --git a/gtsam/symbolic/SymbolicFactor.h b/gtsam/symbolic/SymbolicFactor.h new file mode 100644 index 000000000..a9551d5f9 --- /dev/null +++ b/gtsam/symbolic/SymbolicFactor.h @@ -0,0 +1,145 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicFactor.h + * @author Richard Roberts + * @date Oct 17, 2010 + */ + +#pragma once + +#include +#include +#include +#include + +#include +#include + +namespace gtsam { + + // Forward declarations + class SymbolicConditional; + class Ordering; + + /** SymbolicFactor represents a symbolic factor that specifies graph topology but is not + * associated with any numerical function. + * \nosubgrouping */ + class GTSAM_EXPORT SymbolicFactor : public Factor { + + public: + + typedef SymbolicFactor This; + typedef Factor Base; + typedef SymbolicConditional ConditionalType; + + /** Overriding the shared_ptr typedef */ + typedef boost::shared_ptr shared_ptr; + + /// @name Standard Interface + /// @{ + + /** Default constructor for I/O */ + SymbolicFactor() {} + + /** Construct unary factor */ + SymbolicFactor(Key j) : + Base(boost::assign::cref_list_of<1>(j)) {} + + /** Construct binary factor */ + SymbolicFactor(Key j1, Key j2) : + Base(boost::assign::cref_list_of<2>(j1)(j2)) {} + + /** Construct ternary factor */ + SymbolicFactor(Key j1, Key j2, Key j3) : + Base(boost::assign::cref_list_of<3>(j1)(j2)(j3)) {} + + /** Construct 4-way factor */ + SymbolicFactor(Key j1, Key j2, Key j3, Key j4) : + Base(boost::assign::cref_list_of<4>(j1)(j2)(j3)(j4)) {} + + /** Construct 5-way factor */ + SymbolicFactor(Key j1, Key j2, Key j3, Key j4, Key j5) : + Base(boost::assign::cref_list_of<5>(j1)(j2)(j3)(j4)(j5)) {} + + /** Construct 6-way factor */ + SymbolicFactor(Key j1, Key j2, Key j3, Key j4, Key j5, Key j6) : + Base(boost::assign::cref_list_of<6>(j1)(j2)(j3)(j4)(j5)(j6)) {} + + virtual ~SymbolicFactor() {} + + /// Copy this object as its actual derived type. + SymbolicFactor::shared_ptr clone() const { return boost::make_shared(*this); } + + /// @} + + /// @name Testable + /// @{ + + bool equals(const This& other, double tol = 1e-9) const; + + /// @} + + /// @name Advanced Constructors + /// @{ + private: + explicit SymbolicFactor(const Base& base) : + Base(base) {} + + public: + /** Constructor from a collection of keys */ + template + static SymbolicFactor FromIterators(KEYITERATOR beginKey, KEYITERATOR endKey) { + return SymbolicFactor(Base::FromIterators(beginKey, endKey)); } + + /** Constructor from a collection of keys - compatible with boost::assign::list_of and + * boost::assign::cref_list_of */ + template + static SymbolicFactor FromKeys(const CONTAINER& keys) { + return SymbolicFactor(Base::FromKeys(keys)); } + + /// @} + + /// @name Standard Interface + /// @{ + + /** Whether the factor is empty (involves zero variables). */ + bool empty() const { return keys_.empty(); } + + /** Eliminate the variables in \c keys, in the order specified in \c keys, returning a + * conditional and marginal. */ + std::pair, boost::shared_ptr > + eliminate(const Ordering& keys) const; + + /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } + }; // IndexFactor + + // Forward declarations + class SymbolicFactorGraph; + class Ordering; + + /** Dense elimination function for symbolic factors. This is usually provided as an argument to + * one of the factor graph elimination functions (see EliminateableFactorGraph). The factor + * graph elimination functions do sparse variable elimination, and use this function to eliminate + * single variables or variable cliques. */ + GTSAM_EXPORT std::pair, boost::shared_ptr > + EliminateSymbolic(const SymbolicFactorGraph& factors, const Ordering& keys); + +} diff --git a/gtsam/symbolic/SymbolicFactorGraph.cpp b/gtsam/symbolic/SymbolicFactorGraph.cpp new file mode 100644 index 000000000..270930472 --- /dev/null +++ b/gtsam/symbolic/SymbolicFactorGraph.cpp @@ -0,0 +1,110 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicFactorGraph.cpp + * @date Oct 29, 2009 + * @author Frank Dellaert + */ + +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace gtsam { + + // Instantiate base classes + template class FactorGraph; + template class EliminateableFactorGraph; + + using namespace std; + + /* ************************************************************************* */ + bool SymbolicFactorGraph::equals(const This& fg, double tol) const + { + return Base::equals(fg, tol); + } + + /* ************************************************************************* */ + void SymbolicFactorGraph::push_factor(Key key) { + push_back(boost::make_shared(key)); + } + + /* ************************************************************************* */ + void SymbolicFactorGraph::push_factor(Key key1, Key key2) { + push_back(boost::make_shared(key1,key2)); + } + + /* ************************************************************************* */ + void SymbolicFactorGraph::push_factor(Key key1, Key key2, Key key3) { + push_back(boost::make_shared(key1,key2,key3)); + } + + /* ************************************************************************* */ + void SymbolicFactorGraph::push_factor(Key key1, Key key2, Key key3, Key key4) { + push_back(boost::make_shared(key1,key2,key3,key4)); + } + +// /* ************************************************************************* */ +// std::pair +// SymbolicFactorGraph::eliminateFrontals(size_t nFrontals) const +// { +// return FactorGraph::eliminateFrontals(nFrontals, EliminateSymbolic); +// } +// +// /* ************************************************************************* */ +// std::pair +// SymbolicFactorGraph::eliminate(const std::vector& variables) const +// { +// return FactorGraph::eliminate(variables, EliminateSymbolic); +// } +// +// /* ************************************************************************* */ +// std::pair +// SymbolicFactorGraph::eliminateOne(Index variable) const +// { +// return FactorGraph::eliminateOne(variable, EliminateSymbolic); +// } +// +// /* ************************************************************************* */ +// IndexFactor::shared_ptr CombineSymbolic( +// const FactorGraph& factors, const FastMap >& variableSlots) { +// IndexFactor::shared_ptr combined(Combine (factors, variableSlots)); +//// combined->assertInvariants(); +// return combined; +// } +// +// /* ************************************************************************* */ +// pair // +// EliminateSymbolic(const FactorGraph& factors, size_t nrFrontals) { +// +// FastSet keys; +// BOOST_FOREACH(const IndexFactor::shared_ptr& factor, factors) +// BOOST_FOREACH(Index var, *factor) +// keys.insert(var); +// +// if (keys.size() < nrFrontals) throw invalid_argument( +// "EliminateSymbolic requested to eliminate more variables than exist in graph."); +// +// vector newKeys(keys.begin(), keys.end()); +// return make_pair(boost::make_shared(newKeys, nrFrontals), +// boost::make_shared(newKeys.begin() + nrFrontals, newKeys.end())); +// } + + /* ************************************************************************* */ +} diff --git a/gtsam/symbolic/SymbolicFactorGraph.h b/gtsam/symbolic/SymbolicFactorGraph.h new file mode 100644 index 000000000..37aae2400 --- /dev/null +++ b/gtsam/symbolic/SymbolicFactorGraph.h @@ -0,0 +1,119 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicFactorGraph.h + * @date Oct 29, 2009 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#pragma once + +#include +#include +#include +#include + +namespace gtsam { + + class SymbolicFactorGraph; + class SymbolicConditional; + class SymbolicBayesNet; + class SymbolicEliminationTree; + class SymbolicBayesTree; + class SymbolicJunctionTree; + + /* ************************************************************************* */ + template<> struct EliminationTraits + { + typedef SymbolicFactor FactorType; ///< Type of factors in factor graph + typedef SymbolicFactorGraph FactorGraphType; ///< Type of the factor graph (e.g. GaussianFactorGraph) + typedef SymbolicConditional ConditionalType; ///< Type of conditionals from elimination + typedef SymbolicBayesNet BayesNetType; ///< Type of Bayes net from sequential elimination + typedef SymbolicEliminationTree EliminationTreeType; ///< Type of elimination tree + typedef SymbolicBayesTree BayesTreeType; ///< Type of Bayes tree + typedef SymbolicJunctionTree JunctionTreeType; ///< Type of Junction tree + /// The default dense elimination function + static std::pair, boost::shared_ptr > + DefaultEliminate(const FactorGraphType& factors, const Ordering& keys) { + return EliminateSymbolic(factors, keys); } + }; + + /* ************************************************************************* */ + /** Symbolic Factor Graph + * \nosubgrouping + */ + class GTSAM_EXPORT SymbolicFactorGraph : + public FactorGraph, + public EliminateableFactorGraph + { + public: + + typedef SymbolicFactorGraph This; ///< Typedef to this class + typedef FactorGraph Base; ///< Typedef to base factor graph type + typedef EliminateableFactorGraph BaseEliminateable; ///< Typedef to base elimination class + typedef boost::shared_ptr shared_ptr; ///< shared_ptr to this class + + /// @name Standard Constructors + /// @{ + + /** Construct empty factor graph */ + SymbolicFactorGraph() {} + + /** Construct from iterator over factors */ + template + SymbolicFactorGraph(ITERATOR firstFactor, ITERATOR lastFactor) : Base(firstFactor, lastFactor) {} + + /** Construct from container of factors (shared_ptr or plain objects) */ + template + explicit SymbolicFactorGraph(const CONTAINER& factors) : Base(factors) {} + + /** Implicit copy/downcast constructor to override explicit template container constructor */ + template + SymbolicFactorGraph(const FactorGraph& graph) : Base(graph) {} + + /// @} + + /// @name Testable + /// @{ + + bool equals(const This& fg, double tol = 1e-9) const; + + /// @} + + /// @name Standard Interface + /// @{ + + /** Push back unary factor */ + void push_factor(Key key); + + /** Push back binary factor */ + void push_factor(Key key1, Key key2); + + /** Push back ternary factor */ + void push_factor(Key key1, Key key2, Key key3); + + /** Push back 4-way factor */ + void push_factor(Key key1, Key key2, Key key3, Key key4); + + /// @} + + private: + /** Serialization function */ + friend class boost::serialization::access; + template + void serialize(ARCHIVE & ar, const unsigned int version) { + ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base); + } + }; + +} // namespace gtsam diff --git a/gtsam/linear/GaussianFactor.cpp b/gtsam/symbolic/SymbolicISAM.cpp similarity index 52% rename from gtsam/linear/GaussianFactor.cpp rename to gtsam/symbolic/SymbolicISAM.cpp index 1bf47ec51..d7af82e5c 100644 --- a/gtsam/linear/GaussianFactor.cpp +++ b/gtsam/symbolic/SymbolicISAM.cpp @@ -10,21 +10,25 @@ * -------------------------------------------------------------------------- */ /** - * @file GaussianFactor.cpp - * @brief Linear Factor....A Gaussian - * @brief linearFactor - * @author Richard Roberts, Christian Potthast + * @file SymbolicISAM.cpp + * @date July 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts */ -#include -#include +#include +#include namespace gtsam { - using namespace std; + // Instantiate base class + template class ISAM; /* ************************************************************************* */ - GaussianFactor::GaussianFactor(const GaussianConditional& c) : - IndexFactor(c) {} + SymbolicISAM::SymbolicISAM() {} + + /* ************************************************************************* */ + SymbolicISAM::SymbolicISAM(const SymbolicBayesTree& bayesTree) : + Base(bayesTree) {} } diff --git a/gtsam/symbolic/SymbolicISAM.h b/gtsam/symbolic/SymbolicISAM.h new file mode 100644 index 000000000..3f85facbf --- /dev/null +++ b/gtsam/symbolic/SymbolicISAM.h @@ -0,0 +1,46 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicISAM.h + * @date July 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#pragma once + +#include +#include + +namespace gtsam { + + class GTSAM_EXPORT SymbolicISAM : public ISAM + { + public: + typedef ISAM Base; + typedef SymbolicISAM This; + typedef boost::shared_ptr shared_ptr; + + /// @name Standard Constructors + /// @{ + + /** Create an empty Bayes Tree */ + SymbolicISAM(); + + /** Copy constructor */ + SymbolicISAM(const SymbolicBayesTree& bayesTree); + + /// @} + + }; + +} diff --git a/gtsam/symbolic/SymbolicJunctionTree.cpp b/gtsam/symbolic/SymbolicJunctionTree.cpp new file mode 100644 index 000000000..a8e9803fc --- /dev/null +++ b/gtsam/symbolic/SymbolicJunctionTree.cpp @@ -0,0 +1,33 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicJunctionTree.cpp + * @date Mar 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#include +#include +#include + +namespace gtsam { + + // Instantiate base class + template class JunctionTree; + + /* ************************************************************************* */ + SymbolicJunctionTree::SymbolicJunctionTree( + const SymbolicEliminationTree& eliminationTree) : + Base(Base::FromEliminationTree(eliminationTree)) {} + +} diff --git a/gtsam/symbolic/SymbolicJunctionTree.h b/gtsam/symbolic/SymbolicJunctionTree.h new file mode 100644 index 000000000..709488cbf --- /dev/null +++ b/gtsam/symbolic/SymbolicJunctionTree.h @@ -0,0 +1,66 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file SymbolicJunctionTree.h + * @date Mar 29, 2013 + * @author Frank Dellaert + * @author Richard Roberts + */ + +#include +#include +#include + +namespace gtsam { + + // Forward declarations + class SymbolicEliminationTree; + + /** + * A ClusterTree, i.e., a set of variable clusters with factors, arranged in a tree, with + * the additional property that it represents the clique tree associated with a Bayes net. + * + * In GTSAM a junction tree is an intermediate data structure in multifrontal + * variable elimination. Each node is a cluster of factors, along with a + * clique of variables that are eliminated all at once. In detail, every node k represents + * a clique (maximal fully connected subset) of an associated chordal graph, such as a + * chordal Bayes net resulting from elimination. + * + * The difference with the BayesTree is that a JunctionTree stores factors, whereas a + * BayesTree stores conditionals, that are the product of eliminating the factors in the + * corresponding JunctionTree cliques. + * + * The tree structure and elimination method are exactly analagous to the EliminationTree, + * except that in the JunctionTree, at each node multiple variables are eliminated at a time. + * + * \addtogroup Multifrontal + * \nosubgrouping + */ + class GTSAM_EXPORT SymbolicJunctionTree : + public JunctionTree { + public: + typedef JunctionTree Base; ///< Base class + typedef SymbolicJunctionTree This; ///< This class + typedef boost::shared_ptr shared_ptr; ///< Shared pointer to this class + + /** + * Build the elimination tree of a factor graph using pre-computed column structure. + * @param factorGraph The factor graph for which to build the elimination tree + * @param structure The set of factors involving each variable. If this is not + * precomputed, you can call the Create(const FactorGraph&) + * named constructor instead. + * @return The elimination tree + */ + SymbolicJunctionTree(const SymbolicEliminationTree& eliminationTree); + }; + +} diff --git a/gtsam/symbolic/tests/symbolicExampleGraphs.h b/gtsam/symbolic/tests/symbolicExampleGraphs.h new file mode 100644 index 000000000..c49633b53 --- /dev/null +++ b/gtsam/symbolic/tests/symbolicExampleGraphs.h @@ -0,0 +1,126 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/* + * @file symbolicExampleGraphs.cpp + * @date sept 15, 2012 + * @author Frank Dellaert + * @author Michael Kaess + * @author Viorela Ila + * @author Richard Roberts + */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +namespace gtsam { + namespace { + + const SymbolicFactorGraph simpleTestGraph1 = boost::assign::list_of + (boost::make_shared(0,1)) + (boost::make_shared(0,2)) + (boost::make_shared(1,4)) + (boost::make_shared(2,4)) + (boost::make_shared(3,4)); + + const SymbolicBayesNet simpleTestGraph1BayesNet = boost::assign::list_of + (boost::make_shared(0,1,2)) + (boost::make_shared(1,2,4)) + (boost::make_shared(2,4)) + (boost::make_shared(3,4)) + (boost::make_shared(4)); + + const SymbolicFactorGraph simpleTestGraph2 = boost::assign::list_of + (boost::make_shared(0,1)) + (boost::make_shared(0,2)) + (boost::make_shared(1,3)) + (boost::make_shared(1,4)) + (boost::make_shared(2,3)) + (boost::make_shared(4,5)); + + /** 1 - 0 - 2 - 3 */ + const SymbolicFactorGraph simpleChain = boost::assign::list_of + (boost::make_shared(1,0)) + (boost::make_shared(0,2)) + (boost::make_shared(2,3)); + + /* ************************************************************************* * + * 2 3 + * 0 1 : 2 + ****************************************************************************/ + SymbolicBayesTree __simpleChainBayesTree() { + SymbolicBayesTree result; + result.insertRoot(boost::make_shared( + boost::make_shared( + SymbolicConditional::FromKeys(boost::assign::list_of(2)(3), 2)))); + result.addClique(boost::make_shared( + boost::make_shared( + SymbolicConditional::FromKeys(boost::assign::list_of(0)(1)(2), 2))), + result.roots().front()); + return result; + } + + const SymbolicBayesTree simpleChainBayesTree = __simpleChainBayesTree(); + + /* ************************************************************************* */ + // Keys for ASIA example from the tutorial with A and D evidence + const Key _X_=gtsam::symbol_shorthand::X(0), _T_=gtsam::symbol_shorthand::T(0), + _S_=gtsam::symbol_shorthand::S(0), _E_=gtsam::symbol_shorthand::E(0), + _L_=gtsam::symbol_shorthand::L(0), _B_=gtsam::symbol_shorthand::B(0); + + // Factor graph for Asia example + const SymbolicFactorGraph asiaGraph = boost::assign::list_of + (boost::make_shared(_T_)) + (boost::make_shared(_S_)) + (boost::make_shared(_T_, _E_, _L_)) + (boost::make_shared(_L_, _S_)) + (boost::make_shared(_S_, _B_)) + (boost::make_shared(_E_, _B_)) + (boost::make_shared(_E_, _X_)); + + const SymbolicBayesNet asiaBayesNet = boost::assign::list_of + (boost::make_shared(_T_, _E_, _L_)) + (boost::make_shared(_X_, _E_)) + (boost::make_shared(_E_, _B_, _L_)) + (boost::make_shared(_S_, _B_, _L_)) + (boost::make_shared(_L_, _B_)) + (boost::make_shared(_B_)); + + SymbolicBayesTree __asiaBayesTree() { + SymbolicBayesTree result; + result.insertRoot(boost::make_shared( + boost::make_shared( + SymbolicConditional::FromKeys(boost::assign::list_of(_S_)(_E_)(_L_)(_B_), 4)))); + result.addClique(boost::make_shared( + boost::make_shared( + SymbolicConditional::FromKeys(boost::assign::list_of(_T_)(_E_)(_L_), 1))), + result.roots().front()); + result.addClique(boost::make_shared( + boost::make_shared( + SymbolicConditional::FromKeys(boost::assign::list_of(_X_)(_E_), 1))), + result.roots().front()); + return result; + } + + const SymbolicBayesTree asiaBayesTree = __asiaBayesTree(); + + /* ************************************************************************* */ + const Ordering asiaOrdering = boost::assign::list_of(_X_)(_T_)(_S_)(_E_)(_L_)(_B_); + + } +} diff --git a/gtsam/symbolic/tests/testSerializationSymbolic.cpp b/gtsam/symbolic/tests/testSerializationSymbolic.cpp new file mode 100644 index 000000000..64b06deec --- /dev/null +++ b/gtsam/symbolic/tests/testSerializationSymbolic.cpp @@ -0,0 +1,51 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testSerializationInference.cpp + * @brief + * @author Richard Roberts + * @date Feb 7, 2012 + */ + +#include + +#include +#include + +using namespace std; +using namespace gtsam; +using namespace gtsam::serializationTestHelpers; + +/* ************************************************************************* */ +TEST (Serialization, symbolic_graph) { + EXPECT(equalsObj(asiaGraph)); + EXPECT(equalsXML(asiaGraph)); + EXPECT(equalsBinary(asiaGraph)); +} + +/* ************************************************************************* */ +TEST (Serialization, symbolic_bn) { + EXPECT(equalsObj(asiaBayesNet)); + EXPECT(equalsXML(asiaBayesNet)); + EXPECT(equalsBinary(asiaBayesNet)); +} + +/* ************************************************************************* */ +TEST (Serialization, symbolic_bayes_tree ) { + EXPECT(equalsObj(asiaBayesTree)); + EXPECT(equalsXML(asiaBayesTree)); + EXPECT(equalsBinary(asiaBayesTree)); +} + +/* ************************************************************************* */ +int main() { TestResult tr; return TestRegistry::runAllTests(tr); } +/* ************************************************************************* */ diff --git a/gtsam/symbolic/tests/testSymbolicBayesNet.cpp b/gtsam/symbolic/tests/testSymbolicBayesNet.cpp new file mode 100644 index 000000000..537733286 --- /dev/null +++ b/gtsam/symbolic/tests/testSymbolicBayesNet.cpp @@ -0,0 +1,100 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testSymbolicBayesNet.cpp + * @brief Unit tests for a symbolic Bayes chain + * @author Frank Dellaert + */ + +#include + +#include + +#include +#include +#include + +using namespace std; +using namespace gtsam; + +static const Key _L_ = 0; +static const Key _A_ = 1; +static const Key _B_ = 2; +static const Key _C_ = 3; +static const Key _D_ = 4; +static const Key _E_ = 5; + +static SymbolicConditional::shared_ptr + B(new SymbolicConditional(_B_)), + L(new SymbolicConditional(_L_, _B_)); + +/* ************************************************************************* */ +TEST( SymbolicBayesNet, equals ) +{ + SymbolicBayesNet f1; + f1.push_back(B); + f1.push_back(L); + SymbolicBayesNet f2; + f2.push_back(L); + f2.push_back(B); + CHECK(f1.equals(f1)); + CHECK(!f1.equals(f2)); +} + +/* ************************************************************************* */ +TEST( SymbolicBayesNet, combine ) +{ + SymbolicConditional::shared_ptr + A(new SymbolicConditional(_A_,_B_,_C_)), + B(new SymbolicConditional(_B_,_C_)), + C(new SymbolicConditional(_C_)); + + // p(A|BC) + SymbolicBayesNet p_ABC; + p_ABC.push_back(A); + + // P(BC)=P(B|C)P(C) + SymbolicBayesNet p_BC; + p_BC.push_back(B); + p_BC.push_back(C); + + // P(ABC) = P(A|BC) P(BC) + p_ABC.push_back(p_BC); + + SymbolicBayesNet expected; + expected.push_back(A); + expected.push_back(B); + expected.push_back(C); + + CHECK(assert_equal(expected,p_ABC)); +} + +/* ************************************************************************* */ +TEST(SymbolicBayesNet, saveGraph) { + SymbolicBayesNet bn; + bn += SymbolicConditional(_A_, _B_); + std::vector keys; + keys.push_back(_B_); + keys.push_back(_C_); + keys.push_back(_D_); + bn += SymbolicConditional::FromKeys(keys,2); + bn += SymbolicConditional(_D_); + + bn.saveGraph("SymbolicBayesNet.dot"); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/symbolic/tests/testSymbolicBayesTree.cpp b/gtsam/symbolic/tests/testSymbolicBayesTree.cpp new file mode 100644 index 000000000..43598cb16 --- /dev/null +++ b/gtsam/symbolic/tests/testSymbolicBayesTree.cpp @@ -0,0 +1,662 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/* + * @file testSymbolicBayesTree.cpp + * @date sept 15, 2012 + * @author Frank Dellaert + * @author Michael Kaess + * @author Viorela Ila + */ + +#include +#include +#include +#include + +#include +#include +#include +#include +using namespace boost::assign; +using boost::adaptors::indirected; + +#include +#include + +using namespace std; +using namespace gtsam; +using namespace gtsam::symbol_shorthand; + +static bool debug = false; + +namespace { + + /* ************************************************************************* */ + // Helper functions for below + template + SymbolicBayesTreeClique::shared_ptr MakeClique(const KEYS& keys, DenseIndex nrFrontals) + { + return boost::make_shared( + boost::make_shared( + SymbolicConditional::FromKeys(keys, nrFrontals))); + } + + template + SymbolicBayesTreeClique::shared_ptr MakeClique( + const KEYS& keys, DenseIndex nrFrontals, const CHILDREN& children) + { + SymbolicBayesTreeClique::shared_ptr clique = + boost::make_shared( + boost::make_shared( + SymbolicConditional::FromKeys(keys, nrFrontals))); + clique->children.assign(children.begin(), children.end()); + BOOST_FOREACH(const SymbolicBayesTreeClique::shared_ptr& child, children) + child->parent_ = clique; + return clique; + } + +} + +/* ************************************************************************* */ +TEST(SymbolicBayesTree, clear) +{ + SymbolicBayesTree bayesTree = asiaBayesTree; + bayesTree.clear(); + + SymbolicBayesTree expected; + + // Check whether cleared BayesTree is equal to a new BayesTree + CHECK(assert_equal(expected, bayesTree)); +} + +/* ************************************************************************* * +Bayes Tree for testing conversion to a forest of orphans needed for incremental. + A,B + C|A E|B + D|C F|E + */ +/* ************************************************************************* */ +TEST( BayesTree, removePath ) +{ + const Key _A_=A(0), _B_=B(0), _C_=C(0), _D_=D(0), _E_=E(0), _F_=F(0); + + SymbolicBayesTree bayesTreeOrig; + bayesTreeOrig.insertRoot( + MakeClique(list_of(_A_)(_B_), 2, list_of + (MakeClique(list_of(_C_)(_A_), 1, list_of + (MakeClique(list_of(_D_)(_C_), 1)))) + (MakeClique(list_of(_E_)(_B_), 1, list_of + (MakeClique(list_of(_F_)(_E_), 1)))))); + + SymbolicBayesTree bayesTree = bayesTreeOrig; + + // remove C, expected outcome: factor graph with ABC, + // Bayes Tree now contains two orphan trees: D|C and E|B,F|E + SymbolicFactorGraph expected; + expected += SymbolicFactor(_A_,_B_); + expected += SymbolicFactor(_C_,_A_); + SymbolicBayesTree::Cliques expectedOrphans; + expectedOrphans += bayesTree[_D_], bayesTree[_E_]; + + SymbolicBayesNet bn; + SymbolicBayesTree::Cliques orphans; + bayesTree.removePath(bayesTree[_C_], bn, orphans); + SymbolicFactorGraph factors(bn); + CHECK(assert_equal(expected, factors)); + CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected)); + + bayesTree = bayesTreeOrig; + + // remove E: factor graph with EB; E|B removed from second orphan tree + SymbolicFactorGraph expected2; + expected2 += SymbolicFactor(_A_,_B_); + expected2 += SymbolicFactor(_E_,_B_); + SymbolicBayesTree::Cliques expectedOrphans2; + expectedOrphans2 += bayesTree[_F_]; + expectedOrphans2 += bayesTree[_C_]; + + SymbolicBayesNet bn2; + SymbolicBayesTree::Cliques orphans2; + bayesTree.removePath(bayesTree[_E_], bn2, orphans2); + SymbolicFactorGraph factors2(bn2); + CHECK(assert_equal(expected2, factors2)); + CHECK(assert_container_equal(expectedOrphans2|indirected, orphans2|indirected)); +} + +/* ************************************************************************* */ +TEST( BayesTree, removePath2 ) +{ + SymbolicBayesTree bayesTree = asiaBayesTree; + + // Call remove-path with clique B + SymbolicBayesNet bn; + SymbolicBayesTree::Cliques orphans; + bayesTree.removePath(bayesTree[_B_], bn, orphans); + SymbolicFactorGraph factors(bn); + + // Check expected outcome + SymbolicFactorGraph expected; + expected += SymbolicFactor(_S_,_E_,_L_,_B_); + CHECK(assert_equal(expected, factors)); + SymbolicBayesTree::Cliques expectedOrphans; + expectedOrphans += bayesTree[_T_], bayesTree[_X_]; + CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected)); +} + +/* ************************************************************************* */ +TEST( BayesTree, removePath3 ) +{ + SymbolicBayesTree bayesTree = asiaBayesTree; + + // Call remove-path with clique S + SymbolicBayesNet bn; + SymbolicBayesTree::Cliques orphans; + bayesTree.removePath(bayesTree[_T_], bn, orphans); + SymbolicFactorGraph factors(bn); + + // Check expected outcome + SymbolicFactorGraph expected; + expected += SymbolicFactor(_S_,_E_,_L_,_B_); + expected += SymbolicFactor(_T_,_E_,_L_); + CHECK(assert_equal(expected, factors)); + SymbolicBayesTree::Cliques expectedOrphans; + expectedOrphans += bayesTree[_X_]; + CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected)); +} + +void getAllCliques(const SymbolicBayesTree::sharedClique& subtree, SymbolicBayesTree::Cliques& cliques) { + // Check if subtree exists + if (subtree) { + cliques.push_back(subtree); + // Recursive call over all child cliques + BOOST_FOREACH(SymbolicBayesTree::sharedClique& childClique, subtree->children) { + getAllCliques(childClique,cliques); + } + } +} + +/* ************************************************************************* */ +TEST( BayesTree, shortcutCheck ) +{ + const Key _A_=6, _B_=5, _C_=4, _D_=3, _E_=2, _F_=1, _G_=0; + SymbolicFactorGraph chain = list_of + (SymbolicFactor(_A_)) + (SymbolicFactor(_B_, _A_)) + (SymbolicFactor(_C_, _A_)) + (SymbolicFactor(_D_, _C_)) + (SymbolicFactor(_E_, _B_)) + (SymbolicFactor(_F_, _E_)) + (SymbolicFactor(_G_, _F_)); + SymbolicBayesTree bayesTree = *chain.eliminateMultifrontal( + Ordering(list_of(_G_)(_F_)(_E_)(_D_)(_C_)(_B_)(_A_))); + + //bayesTree.saveGraph("BT1.dot"); + + SymbolicBayesTree::sharedClique rootClique = bayesTree.roots().front(); + //rootClique->printTree(); + SymbolicBayesTree::Cliques allCliques; + getAllCliques(rootClique,allCliques); + + BOOST_FOREACH(SymbolicBayesTree::sharedClique& clique, allCliques) { + //clique->print("Clique#"); + SymbolicBayesNet bn = clique->shortcut(rootClique); + //bn.print("Shortcut:\n"); + //cout << endl; + } + + // Check if all the cached shortcuts are cleared + rootClique->deleteCachedShortcuts(); + BOOST_FOREACH(SymbolicBayesTree::sharedClique& clique, allCliques) { + bool notCleared = clique->cachedSeparatorMarginal(); + CHECK( notCleared == false); + } + EXPECT_LONGS_EQUAL(0, (long)rootClique->numCachedSeparatorMarginals()); + +// BOOST_FOREACH(SymbolicBayesTree::sharedClique& clique, allCliques) { +// clique->print("Clique#"); +// if(clique->cachedShortcut()){ +// bn = clique->cachedShortcut().get(); +// bn.print("Shortcut:\n"); +// } +// else +// cout << "Not Initialized" << endl; +// cout << endl; +// } +} + +/* ************************************************************************* */ +TEST( BayesTree, removeTop ) +{ + SymbolicBayesTree bayesTree = asiaBayesTree; + + // create a new factor to be inserted + //boost::shared_ptr newFactor(new IndexFactor(_S_,_B_)); + + // Remove the contaminated part of the Bayes tree + SymbolicBayesNet bn; + SymbolicBayesTree::Cliques orphans; + bayesTree.removeTop(list_of(_B_)(_S_), bn, orphans); + + // Check expected outcome + SymbolicBayesNet expected; + expected += SymbolicConditional::FromKeys(list_of(_S_)(_E_)(_L_)(_B_), 4); + CHECK(assert_equal(expected, bn)); + + SymbolicBayesTree::Cliques expectedOrphans; + expectedOrphans += bayesTree[_T_], bayesTree[_X_]; + CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected)); + + // Try removeTop again with a factor that should not change a thing + //boost::shared_ptr newFactor2(new IndexFactor(_B_)); + SymbolicBayesNet bn2; + SymbolicBayesTree::Cliques orphans2; + bayesTree.removeTop(list_of(_B_), bn2, orphans2); + SymbolicFactorGraph factors2(bn2); + SymbolicFactorGraph expected2; + CHECK(assert_equal(expected2, factors2)); + SymbolicBayesTree::Cliques expectedOrphans2; + CHECK(assert_container_equal(expectedOrphans2|indirected, orphans2|indirected)); +} + +/* ************************************************************************* */ +TEST( BayesTree, removeTop2 ) +{ + SymbolicBayesTree bayesTree = asiaBayesTree; + + // create two factors to be inserted + //SymbolicFactorGraph newFactors; + //newFactors.push_factor(_B_); + //newFactors.push_factor(_S_); + + // Remove the contaminated part of the Bayes tree + SymbolicBayesNet bn; + SymbolicBayesTree::Cliques orphans; + bayesTree.removeTop(list_of(_T_), bn, orphans); + + // Check expected outcome + SymbolicBayesNet expected = list_of + (SymbolicConditional::FromKeys(list_of(_S_)(_E_)(_L_)(_B_), 4)) + (SymbolicConditional::FromKeys(list_of(_T_)(_E_)(_L_), 1)); + CHECK(assert_equal(expected, bn)); + + SymbolicBayesTree::Cliques expectedOrphans; + expectedOrphans += bayesTree[_X_]; + CHECK(assert_container_equal(expectedOrphans|indirected, orphans|indirected)); +} + +/* ************************************************************************* */ +TEST( BayesTree, removeTop3 ) +{ + SymbolicFactorGraph graph = list_of + (SymbolicFactor(L(5))) + (SymbolicFactor(X(4), L(5))) + (SymbolicFactor(X(2), X(4))) + (SymbolicFactor(X(3), X(2))); + SymbolicBayesTree bayesTree = *graph.eliminateMultifrontal( + Ordering(list_of (X(3)) (X(2)) (X(4)) (L(5)) )); + + // remove all + SymbolicBayesNet bn; + SymbolicBayesTree::Cliques orphans; + bayesTree.removeTop(list_of(L(5))(X(4))(X(2))(X(3)), bn, orphans); + + SymbolicBayesNet expectedBn = list_of + (SymbolicConditional::FromKeys(list_of(X(4))(L(5)), 2)) + (SymbolicConditional(X(2), X(4))) + (SymbolicConditional(X(3), X(2))); + EXPECT(assert_equal(expectedBn, bn)); + EXPECT(orphans.empty()); +} + +/* ************************************************************************* */ +TEST( BayesTree, removeTop4 ) +{ + SymbolicFactorGraph graph = list_of + (SymbolicFactor(L(5))) + (SymbolicFactor(X(4), L(5))) + (SymbolicFactor(X(2), X(4))) + (SymbolicFactor(X(3), X(2))); + SymbolicBayesTree bayesTree = *graph.eliminateMultifrontal( + Ordering(list_of (X(3)) (X(2)) (X(4)) (L(5)) )); + + // remove all + SymbolicBayesNet bn; + SymbolicBayesTree::Cliques orphans; + bayesTree.removeTop(list_of(X(2))(L(5))(X(4))(X(3)), bn, orphans); + + SymbolicBayesNet expectedBn = list_of + (SymbolicConditional::FromKeys(list_of(X(4))(L(5)), 2)) + (SymbolicConditional(X(2), X(4))) + (SymbolicConditional(X(3), X(2))); + EXPECT(assert_equal(expectedBn, bn)); + EXPECT(orphans.empty()); +} + +/* ************************************************************************* */ +TEST( BayesTree, removeTop5 ) +{ + // Remove top called with variables that are not in the Bayes tree + SymbolicFactorGraph graph = list_of + (SymbolicFactor(L(5))) + (SymbolicFactor(X(4), L(5))) + (SymbolicFactor(X(2), X(4))) + (SymbolicFactor(X(3), X(2))); + SymbolicBayesTree bayesTree = *graph.eliminateMultifrontal( + Ordering(list_of (X(3)) (X(2)) (X(4)) (L(5)) )); + + // Remove nonexistant + SymbolicBayesNet bn; + SymbolicBayesTree::Cliques orphans; + bayesTree.removeTop(list_of(X(10)), bn, orphans); + + SymbolicBayesNet expectedBn; + EXPECT(assert_equal(expectedBn, bn)); + EXPECT(orphans.empty()); +} + +/* ************************************************************************* */ +TEST( SymbolicBayesTree, thinTree ) { + + // create a thin-tree Bayesnet, a la Jean-Guillaume + SymbolicBayesNet bayesNet; + bayesNet.push_back(boost::make_shared(14)); + + bayesNet.push_back(boost::make_shared(13, 14)); + bayesNet.push_back(boost::make_shared(12, 14)); + + bayesNet.push_back(boost::make_shared(11, 13, 14)); + bayesNet.push_back(boost::make_shared(10, 13, 14)); + bayesNet.push_back(boost::make_shared(9, 12, 14)); + bayesNet.push_back(boost::make_shared(8, 12, 14)); + + bayesNet.push_back(boost::make_shared(7, 11, 13)); + bayesNet.push_back(boost::make_shared(6, 11, 13)); + bayesNet.push_back(boost::make_shared(5, 10, 13)); + bayesNet.push_back(boost::make_shared(4, 10, 13)); + + bayesNet.push_back(boost::make_shared(3, 9, 12)); + bayesNet.push_back(boost::make_shared(2, 9, 12)); + bayesNet.push_back(boost::make_shared(1, 8, 12)); + bayesNet.push_back(boost::make_shared(0, 8, 12)); + + if (debug) { + GTSAM_PRINT(bayesNet); + bayesNet.saveGraph("/tmp/symbolicBayesNet.dot"); + } + + // create a BayesTree out of a Bayes net + SymbolicBayesTree bayesTree = *SymbolicFactorGraph(bayesNet).eliminateMultifrontal(); + if (debug) { + GTSAM_PRINT(bayesTree); + bayesTree.saveGraph("/tmp/SymbolicBayesTree.dot"); + } + + SymbolicBayesTree::Clique::shared_ptr R = bayesTree.roots().front(); + + { + // check shortcut P(S9||R) to root + SymbolicBayesTree::Clique::shared_ptr c = bayesTree[9]; + SymbolicBayesNet shortcut = c->shortcut(R); + SymbolicBayesNet expected = list_of(SymbolicConditional(14, 11, 13)); + EXPECT(assert_equal(expected, shortcut)); + } + + { + // check shortcut P(S8||R) to root + SymbolicBayesTree::Clique::shared_ptr c = bayesTree[8]; + SymbolicBayesNet shortcut = c->shortcut(R); + SymbolicBayesNet expected = list_of(SymbolicConditional(14, 11, 13)); + EXPECT(assert_equal(expected, shortcut)); + } + + { + // check shortcut P(S4||R) to root + SymbolicBayesTree::Clique::shared_ptr c = bayesTree[4]; + SymbolicBayesNet shortcut = c->shortcut(R); + SymbolicBayesNet expected = list_of(SymbolicConditional(10, 11, 13)); + EXPECT(assert_equal(expected, shortcut)); + } + + { + // check shortcut P(S2||R) to root + SymbolicBayesTree::Clique::shared_ptr c = bayesTree[2]; + SymbolicBayesNet shortcut = c->shortcut(R); + SymbolicBayesNet expected = list_of(SymbolicConditional(9, 11, 12, 13)) + (SymbolicConditional(12, 11, 13)); + EXPECT(assert_equal(expected, shortcut)); + } + + { + // check shortcut P(S0||R) to root + SymbolicBayesTree::Clique::shared_ptr c = bayesTree[0]; + SymbolicBayesNet shortcut = c->shortcut(R); + SymbolicBayesNet expected = list_of(SymbolicConditional(8, 11, 12, 13)) + (SymbolicConditional(12, 11, 13)); + EXPECT(assert_equal(expected, shortcut)); + } + + SymbolicBayesNet::shared_ptr actualJoint; + + // Check joint P(8,2) + if (false) { // TODO, not disjoint + actualJoint = bayesTree.jointBayesNet(8, 2); + SymbolicBayesNet expected; + expected.push_back(boost::make_shared(8)); + expected.push_back(boost::make_shared(2, 8)); + EXPECT(assert_equal(expected, *actualJoint)); + } + + // Check joint P(1,2) + if (false) { // TODO, not disjoint + actualJoint = bayesTree.jointBayesNet(1, 2); + SymbolicBayesNet expected; + expected.push_back(boost::make_shared(2)); + expected.push_back(boost::make_shared(1, 2)); + EXPECT(assert_equal(expected, *actualJoint)); + } + + // Check joint P(2,6) + if (true) { + actualJoint = bayesTree.jointBayesNet(2, 6); + SymbolicBayesNet expected; + expected.push_back(boost::make_shared(2, 6)); + expected.push_back(boost::make_shared(6)); + EXPECT(assert_equal(expected, *actualJoint)); + } + + // Check joint P(4,6) + if (false) { // TODO, not disjoint + actualJoint = bayesTree.jointBayesNet(4, 6); + SymbolicBayesNet expected; + expected.push_back(boost::make_shared(6)); + expected.push_back(boost::make_shared(4, 6)); + EXPECT(assert_equal(expected, *actualJoint)); + } +} + +/* ************************************************************************* */ +TEST(SymbolicBayesTree, forest_joint) +{ + // Create forest + SymbolicBayesTreeClique::shared_ptr root1 = MakeClique(list_of(1), 1); + SymbolicBayesTreeClique::shared_ptr root2 = MakeClique(list_of(2), 1); + SymbolicBayesTree bayesTree; + bayesTree.insertRoot(root1); + bayesTree.insertRoot(root2); + + // Check joint + SymbolicBayesNet expected = list_of + (SymbolicConditional(1)) + (SymbolicConditional(2)); + SymbolicBayesNet actual = *bayesTree.jointBayesNet(1, 2); + + EXPECT(assert_equal(expected, actual)); +} + +/* ************************************************************************* * + Bayes tree for smoother with "natural" ordering: + C1 5 6 + C2 4 : 5 + C3 3 : 4 + C4 2 : 3 + C5 1 : 2 + C6 0 : 1 + **************************************************************************** */ + +TEST( SymbolicBayesTree, linear_smoother_shortcuts ) { + // Create smoother with 7 nodes + SymbolicFactorGraph smoother; + smoother.push_factor(0); + smoother.push_factor(0, 1); + smoother.push_factor(1, 2); + smoother.push_factor(2, 3); + smoother.push_factor(3, 4); + smoother.push_factor(4, 5); + smoother.push_factor(5, 6); + + // Eliminate in numerical order 0..6 + Ordering ordering(smoother.keys()); + SymbolicBayesNet bayesNet = *smoother.eliminateSequential(ordering); + + if (debug) { + GTSAM_PRINT(bayesNet); + bayesNet.saveGraph("/tmp/symbolicBayesNet.dot"); + } + + // create a BayesTree + SymbolicBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering); + if (debug) { + GTSAM_PRINT(bayesTree); + bayesTree.saveGraph("/tmp/SymbolicBayesTree.dot"); + } + + SymbolicBayesTree::Clique::shared_ptr R = bayesTree.roots().front(); + + { + // check shortcut P(S2||R) to root + SymbolicBayesTree::Clique::shared_ptr c = bayesTree[4]; // 4 is frontal in C2 + SymbolicBayesNet shortcut = c->shortcut(R); + SymbolicBayesNet expected; + EXPECT(assert_equal(expected, shortcut)); + } + + { + // check shortcut P(S3||R) to root + SymbolicBayesTree::Clique::shared_ptr c = bayesTree[3]; // 3 is frontal in C3 + SymbolicBayesNet shortcut = c->shortcut(R); + SymbolicBayesNet expected = list_of(SymbolicConditional(4, 5)); + EXPECT(assert_equal(expected, shortcut)); + } + + { + // check shortcut P(S4||R) to root + SymbolicBayesTree::Clique::shared_ptr c = bayesTree[2]; // 2 is frontal in C4 + SymbolicBayesNet shortcut = c->shortcut(R); + SymbolicBayesNet expected = list_of(SymbolicConditional(3, 5)); + EXPECT(assert_equal(expected, shortcut)); + } +} + +/* ************************************************************************* */ +// from testSymbolicJunctionTree, which failed at one point +TEST(SymbolicBayesTree, complicatedMarginal) +{ + // Create the conditionals to go in the BayesTree + SymbolicBayesTreeClique::shared_ptr cur; + SymbolicBayesTreeClique::shared_ptr root = MakeClique(list_of(11)(12), 2); + cur = root; + + root->children += MakeClique(list_of(9)(10)(11)(12), 2); + root->children.back()->parent_ = root; + + root->children += MakeClique(list_of(7)(8)(11), 2); + root->children.back()->parent_ = root; + cur = root->children.back(); + + cur->children += MakeClique(list_of(5)(6)(7)(8), 2); + cur->children.back()->parent_ = cur; + cur = cur->children.back(); + + cur->children += MakeClique(list_of(3)(4)(6), 2); + cur->children.back()->parent_ = cur; + + cur->children += MakeClique(list_of(1)(2)(5), 2); + cur->children.back()->parent_ = cur; + + // Create Bayes Tree + SymbolicBayesTree bt; + bt.insertRoot(root); + if (debug) { + GTSAM_PRINT(bt); + bt.saveGraph("/tmp/SymbolicBayesTree.dot"); + } + + // Shortcut on 9 + { + SymbolicBayesTree::Clique::shared_ptr c = bt[9]; + SymbolicBayesNet shortcut = c->shortcut(root); + EXPECT(assert_equal(SymbolicBayesNet(), shortcut)); + } + + // Shortcut on 7 + { + SymbolicBayesTree::Clique::shared_ptr c = bt[7]; + SymbolicBayesNet shortcut = c->shortcut(root); + EXPECT(assert_equal(SymbolicBayesNet(), shortcut)); + } + + // Shortcut on 5 + { + SymbolicBayesTree::Clique::shared_ptr c = bt[5]; + SymbolicBayesNet shortcut = c->shortcut(root); + SymbolicBayesNet expected = list_of + (SymbolicConditional(7, 8, 11)) + (SymbolicConditional(8, 11)); + EXPECT(assert_equal(expected, shortcut)); + } + + // Shortcut on 3 + { + SymbolicBayesTree::Clique::shared_ptr c = bt[3]; + SymbolicBayesNet shortcut = c->shortcut(root); + SymbolicBayesNet expected = list_of(SymbolicConditional(6, 11)); + EXPECT(assert_equal(expected, shortcut)); + } + + // Shortcut on 1 + { + SymbolicBayesTree::Clique::shared_ptr c = bt[1]; + SymbolicBayesNet shortcut = c->shortcut(root); + SymbolicBayesNet expected = list_of(SymbolicConditional(5, 11)); + EXPECT(assert_equal(expected, shortcut)); + } + + // Marginal on 5 + { + SymbolicFactor::shared_ptr actual = bt.marginalFactor(5); + EXPECT(assert_equal(SymbolicFactor(5), *actual, 1e-1)); + } + + // Shortcut on 6 + { + SymbolicFactor::shared_ptr actual = bt.marginalFactor(6); + EXPECT(assert_equal(SymbolicFactor(6), *actual, 1e-1)); + } + +} +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ + diff --git a/gtsam/inference/tests/testConditional.cpp b/gtsam/symbolic/tests/testSymbolicConditional.cpp similarity index 67% rename from gtsam/inference/tests/testConditional.cpp rename to gtsam/symbolic/tests/testSymbolicConditional.cpp index 5ee695bb5..c173f4a74 100644 --- a/gtsam/inference/tests/testConditional.cpp +++ b/gtsam/symbolic/tests/testSymbolicConditional.cpp @@ -10,86 +10,84 @@ * -------------------------------------------------------------------------- */ /** - * @file testConditional.cpp - * @brief Unit tests for IndexConditional class + * @file testSymbolicConditional.cpp + * @brief Unit tests for SymbolicConditional class * @author Frank Dellaert */ -#include // for operator += -#include // for operator += +#include using namespace boost::assign; +#include #include -#include -#include +#include using namespace std; using namespace gtsam; /* ************************************************************************* */ -TEST( IndexConditional, empty ) +TEST( SymbolicConditional, empty ) { - IndexConditional c0; + SymbolicConditional c0; LONGS_EQUAL(0,c0.nrFrontals()) LONGS_EQUAL(0,c0.nrParents()) } /* ************************************************************************* */ -TEST( IndexConditional, noParents ) +TEST( SymbolicConditional, noParents ) { - IndexConditional c0(0); + SymbolicConditional c0(0); LONGS_EQUAL(1,c0.nrFrontals()) LONGS_EQUAL(0,c0.nrParents()) } /* ************************************************************************* */ -TEST( IndexConditional, oneParents ) +TEST( SymbolicConditional, oneParents ) { - IndexConditional c0(0,1); + SymbolicConditional c0(0,1); LONGS_EQUAL(1,c0.nrFrontals()) LONGS_EQUAL(1,c0.nrParents()) } /* ************************************************************************* */ -TEST( IndexConditional, twoParents ) +TEST( SymbolicConditional, twoParents ) { - IndexConditional c0(0,1,2); + SymbolicConditional c0(0,1,2); LONGS_EQUAL(1,c0.nrFrontals()) LONGS_EQUAL(2,c0.nrParents()) } /* ************************************************************************* */ -TEST( IndexConditional, threeParents ) +TEST( SymbolicConditional, threeParents ) { - IndexConditional c0(0,1,2,3); + SymbolicConditional c0(0,1,2,3); LONGS_EQUAL(1,c0.nrFrontals()) LONGS_EQUAL(3,c0.nrParents()) } /* ************************************************************************* */ -TEST( IndexConditional, fourParents ) +TEST( SymbolicConditional, fourParents ) { - vector parents; - parents += 1,2,3,4; - IndexConditional c0(0,parents); + SymbolicConditional c0 = SymbolicConditional::FromKeys( + list_of(0)(1)(2)(3)(4), 1); LONGS_EQUAL(1,c0.nrFrontals()) LONGS_EQUAL(4,c0.nrParents()) } /* ************************************************************************* */ -TEST( IndexConditional, FromRange ) +TEST( SymbolicConditional, FromRange ) { - vector keys; - keys += 1,2,3,4,5; - IndexConditional::shared_ptr c0(new IndexConditional(keys,2)); + SymbolicConditional::shared_ptr c0 = + boost::make_shared( + SymbolicConditional::FromKeys(list_of(1)(2)(3)(4)(5), 2)); LONGS_EQUAL(2,c0->nrFrontals()) LONGS_EQUAL(3,c0->nrParents()) } /* ************************************************************************* */ -TEST( IndexConditional, equals ) +TEST( SymbolicConditional, equals ) { - IndexConditional c0(0, 1, 2), c1(0, 1, 2), c2(1, 2, 3), c3(3,4); + SymbolicConditional c0(0, 1, 2), c1(0, 1, 2), c2(1, 2, 3), c3(3,4); CHECK(c0.equals(c1)); CHECK(c1.equals(c0)); CHECK(!c0.equals(c2)); diff --git a/gtsam/symbolic/tests/testSymbolicEliminationTree.cpp b/gtsam/symbolic/tests/testSymbolicEliminationTree.cpp new file mode 100644 index 000000000..c6d4c8937 --- /dev/null +++ b/gtsam/symbolic/tests/testSymbolicEliminationTree.cpp @@ -0,0 +1,89 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testSymbolicEliminationTree.cpp + * @brief + * @author Richard Roberts + * @date Oct 14, 2010 + */ + +#include + +#include +#include + +#include +#include +#include + +#include "symbolicExampleGraphs.h" + +using namespace gtsam; +using namespace gtsam::symbol_shorthand; +using namespace std; + +class EliminationTreeTester { +public: + // build hardcoded tree + static SymbolicEliminationTree buildHardcodedTree(const SymbolicFactorGraph& fg) { + + SymbolicEliminationTree::sharedNode leaf0(new SymbolicEliminationTree::Node); + leaf0->key = 0; + leaf0->factors.push_back(fg[0]); + leaf0->factors.push_back(fg[1]); + + SymbolicEliminationTree::sharedNode node1(new SymbolicEliminationTree::Node); + node1->key = 1; + node1->factors.push_back(fg[2]); + node1->children.push_back(leaf0); + + SymbolicEliminationTree::sharedNode node2(new SymbolicEliminationTree::Node); + node2->key = 2; + node2->factors.push_back(fg[3]); + node2->children.push_back(node1); + + SymbolicEliminationTree::sharedNode leaf3(new SymbolicEliminationTree::Node); + leaf3->key = 3; + leaf3->factors.push_back(fg[4]); + + SymbolicEliminationTree::sharedNode root(new SymbolicEliminationTree::Node); + root->key = 4; + root->children.push_back(leaf3); + root->children.push_back(node2); + + SymbolicEliminationTree tree; + tree.roots_.push_back(root); + return tree; + } +}; + + +/* ************************************************************************* */ +TEST(EliminationTree, Create) +{ + SymbolicEliminationTree expected = + EliminationTreeTester::buildHardcodedTree(simpleTestGraph1); + + // Build from factor graph + Ordering order; + order += 0,1,2,3,4; + SymbolicEliminationTree actual(simpleTestGraph1, order); + + CHECK(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/symbolic/tests/testSymbolicFactor.cpp b/gtsam/symbolic/tests/testSymbolicFactor.cpp new file mode 100644 index 000000000..a9f01e2d9 --- /dev/null +++ b/gtsam/symbolic/tests/testSymbolicFactor.cpp @@ -0,0 +1,79 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testSymbolicFactor.cpp + * @brief Unit tests for a symbolic IndexFactor + * @author Frank Dellaert + */ + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +using namespace std; +using namespace gtsam; +using namespace boost::assign; + +/* ************************************************************************* */ +#ifdef TRACK_ELIMINATE +TEST(SymbolicFactor, eliminate) { + vector keys; keys += 2, 3, 4, 6, 7, 9, 10, 11; + IndexFactor actual(keys.begin(), keys.end()); + BayesNet fragment = *actual.eliminate(3); + + IndexFactor expected(keys.begin()+3, keys.end()); + IndexConditional::shared_ptr expected0 = IndexConditional::FromRange(keys.begin(), keys.end(), 1); + IndexConditional::shared_ptr expected1 = IndexConditional::FromRange(keys.begin()+1, keys.end(), 1); + IndexConditional::shared_ptr expected2 = IndexConditional::FromRange(keys.begin()+2, keys.end(), 1); + + CHECK(assert_equal(fragment.size(), size_t(3))); + CHECK(assert_equal(expected, actual)); + BayesNet::const_iterator fragmentCond = fragment.begin(); + CHECK(assert_equal(**fragmentCond++, *expected0)); + CHECK(assert_equal(**fragmentCond++, *expected1)); + CHECK(assert_equal(**fragmentCond++, *expected2)); +} +#endif +/* ************************************************************************* */ +TEST(SymbolicFactor, EliminateSymbolic) +{ + const SymbolicFactorGraph factors = list_of + (SymbolicFactor(2,4,6)) + (SymbolicFactor(1,2,5)) + (SymbolicFactor(0,3)); + + const SymbolicFactor expectedFactor(4,5,6); + const SymbolicConditional expectedConditional = + SymbolicConditional::FromKeys(list_of(0)(1)(2)(3)(4)(5)(6), 4); + + SymbolicFactor::shared_ptr actualFactor; + SymbolicConditional::shared_ptr actualConditional; + boost::tie(actualConditional, actualFactor) = + EliminateSymbolic(factors, list_of(0)(1)(2)(3)); + + CHECK(assert_equal(expectedConditional, *actualConditional)); + CHECK(assert_equal(expectedFactor, *actualFactor)); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp b/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp new file mode 100644 index 000000000..c2eabdd20 --- /dev/null +++ b/gtsam/symbolic/tests/testSymbolicFactorGraph.cpp @@ -0,0 +1,309 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testSymbolicFactorGraph.cpp + * @brief Unit tests for symbolic factor graphs + * @author Christian Potthast + **/ + +#include + +#include +#include +#include +#include +#include + +using namespace std; +using namespace gtsam; +using namespace boost::assign; + +/* ************************************************************************* */ +TEST(SymbolicFactorGraph, eliminateFullSequential) +{ + // Test with simpleTestGraph1 + Ordering order; + order += 0,1,2,3,4; + SymbolicBayesNet actual1 = *simpleTestGraph1.eliminateSequential(order); + EXPECT(assert_equal(simpleTestGraph1BayesNet, actual1)); + + // Test with Asia graph + SymbolicBayesNet actual2 = *asiaGraph.eliminateSequential(asiaOrdering); + EXPECT(assert_equal(asiaBayesNet, actual2)); +} + +/* ************************************************************************* */ +TEST(SymbolicFactorGraph, eliminatePartialSequential) +{ + // Eliminate 0 and 1 + const Ordering order = list_of(0)(1); + + const SymbolicBayesNet expectedBayesNet = list_of + (SymbolicConditional(0,1,2)) + (SymbolicConditional(1,2,3,4)); + + const SymbolicFactorGraph expectedSfg = list_of + (SymbolicFactor(2,3)) + (SymbolicFactor(4,5)) + (SymbolicFactor(2,3,4)); + + SymbolicBayesNet::shared_ptr actualBayesNet; + SymbolicFactorGraph::shared_ptr actualSfg; + boost::tie(actualBayesNet, actualSfg) = + simpleTestGraph2.eliminatePartialSequential(Ordering(list_of(0)(1))); + + EXPECT(assert_equal(expectedSfg, *actualSfg)); + EXPECT(assert_equal(expectedBayesNet, *actualBayesNet)); + + SymbolicBayesNet::shared_ptr actualBayesNet2; + SymbolicFactorGraph::shared_ptr actualSfg2; + boost::tie(actualBayesNet2, actualSfg2) = + simpleTestGraph2.eliminatePartialSequential(list_of(0)(1).convert_to_container >()); + + EXPECT(assert_equal(expectedSfg, *actualSfg2)); + EXPECT(assert_equal(expectedBayesNet, *actualBayesNet2)); +} + +/* ************************************************************************* */ +TEST(SymbolicFactorGraph, eliminateFullMultifrontal) +{ + Ordering ordering; ordering += 0,1,2,3; + SymbolicBayesTree actual1 = + *simpleChain.eliminateMultifrontal(ordering); + EXPECT(assert_equal(simpleChainBayesTree, actual1)); + + SymbolicBayesTree actual2 = + *asiaGraph.eliminateMultifrontal(asiaOrdering); + EXPECT(assert_equal(asiaBayesTree, actual2)); +} + +/* ************************************************************************* */ +TEST(SymbolicFactorGraph, eliminatePartialMultifrontal) +{ + SymbolicBayesTree expectedBayesTree; + SymbolicConditional::shared_ptr root = boost::make_shared( + SymbolicConditional::FromKeys(list_of(4)(5)(1), 2)); + expectedBayesTree.insertRoot(boost::make_shared(root)); + + SymbolicFactorGraph expectedFactorGraph = list_of + (SymbolicFactor(0,1)) + (SymbolicFactor(0,2)) + (SymbolicFactor(1,3)) + (SymbolicFactor(2,3)) + (SymbolicFactor(1)); + + SymbolicBayesTree::shared_ptr actualBayesTree; + SymbolicFactorGraph::shared_ptr actualFactorGraph; + boost::tie(actualBayesTree, actualFactorGraph) = + simpleTestGraph2.eliminatePartialMultifrontal(Ordering(list_of(4)(5))); + + EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph)); + EXPECT(assert_equal(expectedBayesTree, *actualBayesTree)); + + SymbolicBayesTree expectedBayesTree2; + SymbolicBayesTreeClique::shared_ptr root2 = boost::make_shared( + boost::make_shared(4,1)); + root2->children.push_back(boost::make_shared( + boost::make_shared(5,4))); + expectedBayesTree2.insertRoot(root2); + + SymbolicBayesTree::shared_ptr actualBayesTree2; + SymbolicFactorGraph::shared_ptr actualFactorGraph2; + boost::tie(actualBayesTree2, actualFactorGraph2) = + simpleTestGraph2.eliminatePartialMultifrontal(list_of(4)(5).convert_to_container >()); + + EXPECT(assert_equal(expectedFactorGraph, *actualFactorGraph2)); + EXPECT(assert_equal(expectedBayesTree2, *actualBayesTree2)); +} + +/* ************************************************************************* */ +TEST(SymbolicFactorGraph, marginalMultifrontalBayesNet) +{ + SymbolicBayesNet expectedBayesNet = list_of + (SymbolicConditional(0, 1, 2)) + (SymbolicConditional(1, 2, 3)) + (SymbolicConditional(2, 3)) + (SymbolicConditional(3)); + + SymbolicBayesNet actual1 = *simpleTestGraph2.marginalMultifrontalBayesNet( + Ordering(list_of(0)(1)(2)(3))); + EXPECT(assert_equal(expectedBayesNet, actual1)); +} + +/* ************************************************************************* */ +TEST(SymbolicFactorGraph, eliminate_disconnected_graph) { + SymbolicFactorGraph fg; + fg.push_factor(0, 1); + fg.push_factor(0, 2); + fg.push_factor(1, 2); + fg.push_factor(3, 4); + + // create expected Chordal bayes Net + SymbolicBayesNet expected; + expected.push_back(boost::make_shared(0,1,2)); + expected.push_back(boost::make_shared(1,2)); + expected.push_back(boost::make_shared(2)); + expected.push_back(boost::make_shared(3,4)); + expected.push_back(boost::make_shared(4)); + + Ordering order; + order += 0,1,2,3,4; + SymbolicBayesNet actual = *fg.eliminateSequential(order); + + EXPECT(assert_equal(expected,actual)); +} + +/* ************************************************************************* */ +//TEST(SymbolicFactorGraph, marginals) +//{ +// // Create factor graph +// SymbolicFactorGraph fg; +// fg.push_factor(0, 1); +// fg.push_factor(0, 2); +// fg.push_factor(1, 4); +// fg.push_factor(2, 4); +// fg.push_factor(3, 4); +// +// // eliminate +// SymbolicSequentialSolver solver(fg); +// SymbolicBayesNet::shared_ptr actual = solver.eliminate(); +// SymbolicBayesNet expected; +// expected.push_front(boost::make_shared(4)); +// expected.push_front(boost::make_shared(3, 4)); +// expected.push_front(boost::make_shared(2, 4)); +// expected.push_front(boost::make_shared(1, 2, 4)); +// expected.push_front(boost::make_shared(0, 1, 2)); +// EXPECT(assert_equal(expected,*actual)); +// +// { +// // jointBayesNet +// vector js; +// js.push_back(0); +// js.push_back(4); +// js.push_back(3); +// SymbolicBayesNet::shared_ptr actualBN = solver.jointBayesNet(js); +// SymbolicBayesNet expectedBN; +// expectedBN.push_front(boost::make_shared(3)); +// expectedBN.push_front(boost::make_shared(4, 3)); +// expectedBN.push_front(boost::make_shared(0, 4)); +// EXPECT( assert_equal(expectedBN,*actualBN)); +// +// // jointFactorGraph +// SymbolicFactorGraph::shared_ptr actualFG = solver.jointFactorGraph(js); +// SymbolicFactorGraph expectedFG; +// expectedFG.push_factor(0, 4); +// expectedFG.push_factor(4, 3); +// expectedFG.push_factor(3); +// EXPECT( assert_equal(expectedFG,(SymbolicFactorGraph)(*actualFG))); +// } +// +// { +// // jointBayesNet +// vector js; +// js.push_back(0); +// js.push_back(2); +// js.push_back(3); +// SymbolicBayesNet::shared_ptr actualBN = solver.jointBayesNet(js); +// SymbolicBayesNet expectedBN; +// expectedBN.push_front(boost::make_shared(2)); +// expectedBN.push_front(boost::make_shared(3, 2)); +// expectedBN.push_front(boost::make_shared(0, 3, 2)); +// EXPECT( assert_equal(expectedBN,*actualBN)); +// +// // jointFactorGraph +// SymbolicFactorGraph::shared_ptr actualFG = solver.jointFactorGraph(js); +// SymbolicFactorGraph expectedFG; +// expectedFG.push_factor(0, 3, 2); +// expectedFG.push_factor(3, 2); +// expectedFG.push_factor(2); +// EXPECT( assert_equal(expectedFG,(SymbolicFactorGraph)(*actualFG))); +// } +// +// { +// // conditionalBayesNet +// vector js; +// js.push_back(0); +// js.push_back(2); +// js.push_back(3); +// size_t nrFrontals = 2; +// SymbolicBayesNet::shared_ptr actualBN = // +// solver.conditionalBayesNet(js, nrFrontals); +// SymbolicBayesNet expectedBN; +// expectedBN.push_front(boost::make_shared(2, 3)); +// expectedBN.push_front(boost::make_shared(0, 2, 3)); +// EXPECT( assert_equal(expectedBN,*actualBN)); +// } +//} + +/* ************************************************************************* */ +TEST( SymbolicFactorGraph, constructFromBayesNet ) +{ + // create expected factor graph + SymbolicFactorGraph expected; + expected.push_factor(0, 1, 2); + expected.push_factor(1, 2); + expected.push_factor(1); + + // create Bayes Net + SymbolicBayesNet bayesNet; + bayesNet += SymbolicConditional(0, 1, 2); + bayesNet += SymbolicConditional(1, 2); + bayesNet += SymbolicConditional(1); + + // create actual factor graph from a Bayes Net + SymbolicFactorGraph actual(bayesNet); + + CHECK(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +TEST( SymbolicFactorGraph, constructFromBayesTree ) +{ + // create expected factor graph + SymbolicFactorGraph expected; + expected.push_factor(_S_, _E_, _L_, _B_); + expected.push_factor(_T_, _E_, _L_); + expected.push_factor(_X_, _E_); + + // create actual factor graph + SymbolicFactorGraph actual(asiaBayesTree); + + CHECK(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +TEST( SymbolicFactorGraph, push_back ) +{ + // Create two factor graphs and expected combined graph + SymbolicFactorGraph fg1, fg2, expected; + + fg1.push_factor(1); + fg1.push_factor(0, 1); + + fg2.push_factor(1, 2); + fg2.push_factor(0, 2); + + expected.push_factor(1); + expected.push_factor(0, 1); + expected.push_factor(1, 2); + expected.push_factor(0, 2); + + // combine + SymbolicFactorGraph actual; + actual.push_back(fg1); + actual.push_back(fg2); + CHECK(assert_equal(expected, actual)); +} + +/* ************************************************************************* */ +int main() { TestResult tr; return TestRegistry::runAllTests(tr); } +/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testISAM.cpp b/gtsam/symbolic/tests/testSymbolicISAM.cpp similarity index 73% rename from gtsam/inference/tests/testISAM.cpp rename to gtsam/symbolic/tests/testSymbolicISAM.cpp index 0e8f1a75d..52dfbacb8 100644 --- a/gtsam/inference/tests/testISAM.cpp +++ b/gtsam/symbolic/tests/testSymbolicISAM.cpp @@ -21,16 +21,12 @@ using namespace boost::assign; #include -#include -#include -#include -#include +#include +#include using namespace std; using namespace gtsam; -typedef ISAM SymbolicISAM; - /* ************************************************************************* */ // Some numbers that should be consistent among all smoother tests @@ -80,42 +76,27 @@ typedef ISAM SymbolicISAM; // bayesTree.insert(X); // return bayesTree; //} -// -///* ************************************************************************* */ -//TEST( ISAM, iSAM ) -//{ -// SymbolicISAM bayesTree = createAsiaSymbolicISAM(); -// -// // Now we modify the Bayes tree by inserting a new factor over B and S -// -// // New conditionals in modified top of the tree -// SymbolicConditional::shared_ptr -// S_(new SymbolicConditional("S")), -// B_(new SymbolicConditional("B", "S")), -// L_(new SymbolicConditional("L", "B", "S")); -// -// // Create expected Bayes tree -// SymbolicISAM expected; -// expected.insert(S_); -// expected.insert(B_); -// expected.insert(L_); -// expected.insert(E); -// expected.insert(T); -// expected.insert(X); -// -// // create new factors to be inserted -// SymbolicFactorGraph factorGraph; -// factorGraph.push_factor("B","S"); -// factorGraph.push_factor("B"); -// -// // do incremental inference -// bayesTree.update(factorGraph); -// -// // Check whether the same -// CHECK(assert_equal(expected,bayesTree)); -//} -// -///* ************************************************************************* */ + +/* ************************************************************************* */ +TEST( SymbolicISAM, iSAM ) +{ + // Now we modify the Bayes tree by inserting a new factor over B and S + + SymbolicFactorGraph fullGraph; + fullGraph += asiaGraph; + fullGraph += SymbolicFactor(_B_, _S_); + + // This ordering is chosen to match the one chosen by COLAMD during the ISAM update + SymbolicBayesTree expected = *fullGraph.eliminateMultifrontal(Ordering(list_of(_X_)(_B_)(_S_)(_E_)(_L_)(_T_))); + + // Add factor on B and S + SymbolicISAM actual = *asiaGraph.eliminateMultifrontal(); + + // Check whether the same + EXPECT(assert_equal(expected, (const SymbolicBayesTree&)actual)); +} + +/* ************************************************************************* */ //TEST( ISAM, iSAM_slam ) //{ // // Create using insert diff --git a/gtsam/symbolic/tests/testSymbolicJunctionTree.cpp b/gtsam/symbolic/tests/testSymbolicJunctionTree.cpp new file mode 100644 index 000000000..07c0c58e7 --- /dev/null +++ b/gtsam/symbolic/tests/testSymbolicJunctionTree.cpp @@ -0,0 +1,65 @@ +/* ---------------------------------------------------------------------------- + + * GTSAM Copyright 2010, Georgia Tech Research Corporation, + * Atlanta, Georgia 30332-0415 + * All Rights Reserved + * Authors: Frank Dellaert, et al. (see THANKS for the full author list) + + * See LICENSE for the license information + + * -------------------------------------------------------------------------- */ + +/** + * @file testJunctionTree.cpp + * @brief Unit tests for Junction Tree + * @author Kai Ni + * @author Frank Dellaert + */ + +#include +#include + +#include +#include +#include + +#include +using namespace boost::assign; + +#include "symbolicExampleGraphs.h" + +using namespace gtsam; +using namespace std; + +/* ************************************************************************* * + * 1 - 0 - 2 - 3 + * 2 3 + * 0 1 : 2 + ****************************************************************************/ +TEST( JunctionTree, constructor ) +{ + Ordering order; order += 0, 1, 2, 3; + + SymbolicJunctionTree actual(SymbolicEliminationTree(simpleChain, order)); + + vector frontal1 = list_of(2)(3); + vector frontal2 = list_of(0)(1); + vector sep1; + vector sep2 = list_of(2); + EXPECT(assert_equal(frontal1, actual.roots().front()->keys)); + //EXPECT(assert_equal(sep1, actual.roots().front()->separator)); + LONGS_EQUAL(1, (long)actual.roots().front()->factors.size()); + EXPECT(assert_equal(frontal2, actual.roots().front()->children.front()->keys)); + //EXPECT(assert_equal(sep2, actual.roots().front()->children.front()->separator)); + LONGS_EQUAL(2, (long)actual.roots().front()->children.front()->factors.size()); + EXPECT(assert_equal(*simpleChain[2], *actual.roots().front()->factors[0])); + EXPECT(assert_equal(*simpleChain[0], *actual.roots().front()->children.front()->factors[0])); + EXPECT(assert_equal(*simpleChain[1], *actual.roots().front()->children.front()->factors[1])); +} + +/* ************************************************************************* */ +int main() { + TestResult tr; + return TestRegistry::runAllTests(tr); +} +/* ************************************************************************* */ diff --git a/gtsam/inference/tests/testVariableIndex.cpp b/gtsam/symbolic/tests/testVariableIndexUnordered.cpp similarity index 86% rename from gtsam/inference/tests/testVariableIndex.cpp rename to gtsam/symbolic/tests/testVariableIndexUnordered.cpp index bc2af6565..5cb0f68b7 100644 --- a/gtsam/inference/tests/testVariableIndex.cpp +++ b/gtsam/symbolic/tests/testVariableIndexUnordered.cpp @@ -16,11 +16,14 @@ * @date Sep 26, 2010 */ +#include +using namespace boost::assign; + #include #include #include -#include +#include using namespace std; using namespace gtsam; @@ -44,8 +47,8 @@ TEST(VariableIndex, augment) { VariableIndex actual(fg1); actual.augment(fg2); - LONGS_EQUAL(16, actual.nEntries()); - LONGS_EQUAL(8, actual.nFactors()); + LONGS_EQUAL(16, (long)actual.nEntries()); + LONGS_EQUAL(8, (long)actual.nFactors()); EXPECT(assert_equal(expected, actual)); } @@ -71,11 +74,13 @@ TEST(VariableIndex, remove) { // The expected VariableIndex has the same factor indices as fgCombined but // with entries from fg1 removed, and still has all 10 variables. - VariableIndex expected(fg2removed, 10); + VariableIndex expected(fg2removed); VariableIndex actual(fgCombined); vector indices; indices.push_back(0); indices.push_back(1); indices.push_back(2); indices.push_back(3); - actual.remove(indices, fg1); + actual.remove(indices.begin(), indices.end(), fg1); + std::list unusedVariables; unusedVariables += 0, 9; + actual.removeUnusedVariables(unusedVariables.begin(), unusedVariables.end()); CHECK(assert_equal(expected, actual)); } @@ -108,7 +113,9 @@ TEST(VariableIndex, deep_copy) { VariableIndex clone(original); vector indices; indices.push_back(0); indices.push_back(1); indices.push_back(2); indices.push_back(3); - clone.remove(indices, fg1); + clone.remove(indices.begin(), indices.end(), fg1); + std::list unusedVariables; unusedVariables += 0, 9; + clone.removeUnusedVariables(unusedVariables.begin(), unusedVariables.end()); // When modifying the clone, the original should have stayed the same EXPECT(assert_equal(expectedOriginal, original)); diff --git a/gtsam_unstable/CMakeLists.txt b/gtsam_unstable/CMakeLists.txt index f59208ef6..e0ce9b3a5 100644 --- a/gtsam_unstable/CMakeLists.txt +++ b/gtsam_unstable/CMakeLists.txt @@ -3,7 +3,7 @@ set (gtsam_unstable_subdirs base geometry - discrete + #discrete linear dynamics nonlinear @@ -18,11 +18,20 @@ add_custom_target(check.unstable COMMAND ${CMAKE_CTEST_COMMAND} --output-on-fail # Add the full name to this list, as in the following example # Sources to remove from builds set (excluded_sources # "") + "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/BatchFixedLagSmoother.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/ConcurrentBatchFilter.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/ConcurrentBatchSmoother.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/ConcurrentFilteringAndSmoothing.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/ConcurrentIncrementalFilter.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/ConcurrentIncrementalSmoother.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/FixedLagSmoother.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/nonlinear/IncrementalFixedLagSmoother.cpp" + "${CMAKE_CURRENT_SOURCE_DIR}/slam/serialization.cpp" ) set (excluded_headers # "") - "${CMAKE_CURRENT_SOURCE_DIR}/slam/serialization.h" + "${CMAKE_CURRENT_SOURCE_DIR}/slam/serialization.h" ) # assemble core libaries @@ -48,7 +57,7 @@ endforeach(subdir) set(gtsam_unstable_srcs ${base_srcs} ${geometry_srcs} - ${discrete_srcs} + #${discrete_srcs} ${dynamics_srcs} ${linear_srcs} ${nonlinear_srcs} diff --git a/gtsam_unstable/discrete/CSP.cpp b/gtsam_unstable/discrete/CSP.cpp index 7b27fb866..93fef162c 100644 --- a/gtsam_unstable/discrete/CSP.cpp +++ b/gtsam_unstable/discrete/CSP.cpp @@ -25,7 +25,7 @@ namespace gtsam { void CSP::runArcConsistency(size_t cardinality, size_t nrIterations, bool print) const { // Create VariableIndex - VariableIndex index(*this); + VariableIndexOrdered index(*this); // index.print(); size_t n = index.size(); @@ -46,7 +46,7 @@ namespace gtsam { // keep track of which domains changed changed[v] = false; // loop over all factors/constraints for variable v - const VariableIndex::Factors& factors = index[v]; + const VariableIndexOrdered::Factors& factors = index[v]; BOOST_FOREACH(size_t f,factors) { // if not already a singleton if (!domains[v].isSingleton()) { diff --git a/gtsam_unstable/discrete/tests/testCSP.cpp b/gtsam_unstable/discrete/tests/testCSP.cpp index dd155e817..29cea692c 100644 --- a/gtsam_unstable/discrete/tests/testCSP.cpp +++ b/gtsam_unstable/discrete/tests/testCSP.cpp @@ -128,7 +128,7 @@ TEST_UNSAFE( CSP, WesternUS) // Write out the dual graph for hmetis #ifdef DUAL - VariableIndex index(csp); + VariableIndexOrdered index(csp); index.print("index"); ofstream os("/Users/dellaert/src/hmetis-1.5-osx-i686/US-West-dual.txt"); index.outputMetisFormat(os); diff --git a/gtsam_unstable/discrete/tests/testSudoku.cpp b/gtsam_unstable/discrete/tests/testSudoku.cpp index d2ce16a12..415a73dc1 100644 --- a/gtsam_unstable/discrete/tests/testSudoku.cpp +++ b/gtsam_unstable/discrete/tests/testSudoku.cpp @@ -179,7 +179,7 @@ TEST_UNSAFE( Sudoku, extreme) sudoku.runArcConsistency(9,10,PRINT); #ifdef METIS - VariableIndex index(sudoku); + VariableIndexOrdered index(sudoku); index.print("index"); ofstream os("/Users/dellaert/src/hmetis-1.5-osx-i686/extreme-dual.txt"); index.outputMetisFormat(os); diff --git a/gtsam_unstable/dynamics/tests/testIMUSystem.cpp b/gtsam_unstable/dynamics/tests/testIMUSystem.cpp index 0cefa315c..ddee01b9b 100644 --- a/gtsam_unstable/dynamics/tests/testIMUSystem.cpp +++ b/gtsam_unstable/dynamics/tests/testIMUSystem.cpp @@ -69,13 +69,13 @@ TEST( testIMUSystem, optimize_chain ) { // assemble simple graph with IMU measurements and velocity constraints NonlinearFactorGraph graph; - graph.add(NonlinearEquality(x1, pose1)); - graph.add(IMUFactor(imu12, dt, x1, x2, model)); - graph.add(IMUFactor(imu23, dt, x2, x3, model)); - graph.add(IMUFactor(imu34, dt, x3, x4, model)); - graph.add(VelocityConstraint(x1, x2, dt)); - graph.add(VelocityConstraint(x2, x3, dt)); - graph.add(VelocityConstraint(x3, x4, dt)); + graph += NonlinearEquality(x1, pose1); + graph += IMUFactor(imu12, dt, x1, x2, model); + graph += IMUFactor(imu23, dt, x2, x3, model); + graph += IMUFactor(imu34, dt, x3, x4, model); + graph += VelocityConstraint(x1, x2, dt); + graph += VelocityConstraint(x2, x3, dt); + graph += VelocityConstraint(x3, x4, dt); // ground truth values Values true_values; @@ -116,10 +116,10 @@ TEST( testIMUSystem, optimize_chain_fullfactor ) { // assemble simple graph with IMU measurements and velocity constraints NonlinearFactorGraph graph; - graph.add(NonlinearEquality(x1, pose1)); - graph.add(FullIMUFactor(imu12, dt, x1, x2, model)); - graph.add(FullIMUFactor(imu23, dt, x2, x3, model)); - graph.add(FullIMUFactor(imu34, dt, x3, x4, model)); + graph += NonlinearEquality(x1, pose1); + graph += FullIMUFactor(imu12, dt, x1, x2, model); + graph += FullIMUFactor(imu23, dt, x2, x3, model); + graph += FullIMUFactor(imu34, dt, x3, x4, model); // ground truth values Values true_values; @@ -158,7 +158,7 @@ TEST( testIMUSystem, linear_trajectory) { Values true_traj, init_traj; NonlinearFactorGraph graph; - graph.add(NonlinearEquality(x0, start)); + graph += NonlinearEquality(x0, start); true_traj.insert(x0, start); init_traj.insert(x0, start); @@ -167,7 +167,7 @@ TEST( testIMUSystem, linear_trajectory) { for (size_t i=1; i(accel - g, gyro, dt, xA, xB, model)); + graph += FullIMUFactor(accel - g, gyro, dt, xA, xB, model); true_traj.insert(xB, cur_pose); init_traj.insert(xB, PoseRTV()); } diff --git a/gtsam_unstable/examples/FixedLagSmootherExample.cpp b/gtsam_unstable/examples/FixedLagSmootherExample.cpp index 69712540c..1fcc00c73 100644 --- a/gtsam_unstable/examples/FixedLagSmootherExample.cpp +++ b/gtsam_unstable/examples/FixedLagSmootherExample.cpp @@ -45,7 +45,7 @@ #include // We will use simple integer Keys to uniquely identify each robot pose. -#include +#include // We will use Pose2 variables (x, y, theta) to represent the robot positions #include diff --git a/gtsam_unstable/linear/bayesTreeOperations.cpp b/gtsam_unstable/linear/bayesTreeOperations.cpp index c2e84dec3..d9c9eda32 100644 --- a/gtsam_unstable/linear/bayesTreeOperations.cpp +++ b/gtsam_unstable/linear/bayesTreeOperations.cpp @@ -13,14 +13,6 @@ using namespace std; namespace gtsam { -/* ************************************************************************* */ -std::set keysToIndices(const KeySet& keys, const Ordering& ordering) { - std::set result; - BOOST_FOREACH(const Key& key, keys) - result.insert(ordering[key]); - return result; -} - /* ************************************************************************* */ GaussianFactorGraph splitFactors(const GaussianFactorGraph& fullgraph) { GaussianFactorGraph result; @@ -55,9 +47,9 @@ GaussianFactorGraph splitFactor(const GaussianFactor::shared_ptr& factor) { size_t nrRows = std::min(rowsRemaining, varDim); // Extract submatrices - std::vector > matrices; + std::vector > matrices; for (colIt = rowIt; colIt != jf->end(); ++colIt) { - Index idx = *colIt; + Key idx = *colIt; const Matrix subA = jf->getA(colIt).middleRows(startRow, nrRows); matrices.push_back(make_pair(idx, subA)); } @@ -103,11 +95,11 @@ void findCliques(const GaussianBayesTree::sharedClique& current_clique, /* ************************************************************************* */ std::set findAffectedCliqueConditionals( - const GaussianBayesTree& bayesTree, const std::set& savedIndices) { + const GaussianBayesTree& bayesTree, const std::set& savedIndices) { std::set affected_cliques; // FIXME: track previously found keys more efficiently - BOOST_FOREACH(const Index& index, savedIndices) { - GaussianBayesTree::sharedClique clique = bayesTree.nodes()[index]; + BOOST_FOREACH(const Key& index, savedIndices) { + GaussianBayesTree::sharedClique clique = bayesTree[index]; // add path back to root to affected set findCliques(clique, affected_cliques); diff --git a/gtsam_unstable/linear/bayesTreeOperations.h b/gtsam_unstable/linear/bayesTreeOperations.h index 3f85dc12c..1658c8973 100644 --- a/gtsam_unstable/linear/bayesTreeOperations.h +++ b/gtsam_unstable/linear/bayesTreeOperations.h @@ -12,15 +12,9 @@ #include #include #include -#include namespace gtsam { -// Managing orderings - -/** Converts sets of keys to indices by way of orderings */ -GTSAM_UNSTABLE_EXPORT std::set keysToIndices(const KeySet& keys, const Ordering& ordering); - // Linear Graph Operations /** @@ -47,7 +41,7 @@ GTSAM_UNSTABLE_EXPORT GaussianFactorGraph removePriors(const GaussianFactorGraph * @return the set of conditionals extracted from cliques. */ GTSAM_UNSTABLE_EXPORT std::set findAffectedCliqueConditionals( - const GaussianBayesTree& bayesTree, const std::set& savedIndices); + const GaussianBayesTree& bayesTree, const std::set& savedIndices); /** * Recursively traverses from a given clique in a Bayes Tree and collects all of the conditionals @@ -78,11 +72,11 @@ GaussianFactorGraph liquefy(const typename BAYESTREE::sharedClique& root, bool s if (root && root->conditional()) { GaussianConditional::shared_ptr conditional = root->conditional(); if (!splitConditionals) - result.push_back(conditional->toFactor()); + result.push_back(conditional); else - result.push_back(splitFactor(conditional->toFactor())); + result.push_back(splitFactor(conditional)); } - BOOST_FOREACH(const typename BAYESTREE::sharedClique& child, root->children()) + BOOST_FOREACH(typename BAYESTREE::sharedClique child, root->children) result.push_back(liquefy(child, splitConditionals)); return result; } @@ -94,7 +88,10 @@ GaussianFactorGraph liquefy(const typename BAYESTREE::sharedClique& root, bool s */ template GaussianFactorGraph liquefy(const BAYESTREE& bayesTree, bool splitConditionals = false) { - return liquefy(bayesTree.root(), splitConditionals); + GaussianFactorGraph result; + BOOST_FOREACH(const typename BAYESTREE::sharedClique& root, bayesTree.roots()) + result.push_back(liquefy(root, splitConditionals)); + return result; } } // \namespace gtsam diff --git a/gtsam_unstable/linear/conditioning.cpp b/gtsam_unstable/linear/conditioning.cpp index c02f4d48c..63871776c 100644 --- a/gtsam_unstable/linear/conditioning.cpp +++ b/gtsam_unstable/linear/conditioning.cpp @@ -8,13 +8,16 @@ #include #include +#include + using namespace std; +using namespace boost::assign; namespace gtsam { /* ************************************************************************* */ GaussianConditional::shared_ptr conditionDensity(const GaussianConditional::shared_ptr& initConditional, - const std::set& saved_indices, const VectorValues& solution) { + const std::set& saved_indices, const VectorValues& solution) { const bool verbose = false; if (!initConditional) @@ -26,12 +29,12 @@ GaussianConditional::shared_ptr conditionDensity(const GaussianConditional::shar } // Check for presence of variables to remove - std::set frontalsToRemove, parentsToRemove; - BOOST_FOREACH(const Index& frontal, initConditional->frontals()) + std::set frontalsToRemove, parentsToRemove; + BOOST_FOREACH(const Key& frontal, initConditional->frontals()) if (!saved_indices.count(frontal)) frontalsToRemove.insert(frontal); - BOOST_FOREACH(const Index& parent, initConditional->parents()) + BOOST_FOREACH(const Key& parent, initConditional->parents()) if (!saved_indices.count(parent)) parentsToRemove.insert(parent); @@ -50,14 +53,14 @@ GaussianConditional::shared_ptr conditionDensity(const GaussianConditional::shar size_t oldOffset = 0; vector newDims, oldDims; vector oldColOffsets; - vector newIndices; + vector newIndices; vector newIdxToOldIdx; // Access to arrays, maps from new var to old var - const vector& oldIndices = initConditional->keys(); + const vector& oldIndices = initConditional->keys(); const size_t oldNrFrontals = initConditional->nrFrontals(); GaussianConditional::const_iterator varIt = initConditional->beginFrontals(); size_t oldIdx = 0; for (; varIt != initConditional->endFrontals(); ++varIt) { - size_t varDim = initConditional->dim(varIt); + size_t varDim = initConditional->getDim(varIt); oldDims += varDim; if (!frontalsToRemove.count(*varIt)) { newTotalCols += varDim; @@ -73,7 +76,7 @@ GaussianConditional::shared_ptr conditionDensity(const GaussianConditional::shar } varIt = initConditional->beginParents(); for (; varIt != initConditional->endParents(); ++varIt) { - size_t varDim = initConditional->dim(varIt); + size_t varDim = initConditional->getDim(varIt); oldDims += varDim; if (!parentsToRemove.count(*varIt)) { newTotalCols += varDim; @@ -99,7 +102,7 @@ GaussianConditional::shared_ptr conditionDensity(const GaussianConditional::shar size_t oldColOffset = oldColOffsets.at(newfrontalIdx); // get R block, sliced by row - Eigen::Block rblock = + Eigen::Block rblock = initConditional->get_R().block(oldColOffset, oldColOffset, dim, oldRNrCols-oldColOffset); if (verbose) cout << " rblock\n" << rblock << endl; @@ -113,14 +116,14 @@ GaussianConditional::shared_ptr conditionDensity(const GaussianConditional::shar if (verbose) cout << " full_matrix: set rhs\n" << full_matrix << endl; // set sigmas - sigmas.segment(newColOffset, dim) = initConditional->get_sigmas().segment(oldColOffset, dim); + sigmas.segment(newColOffset, dim) = initConditional->get_model()->sigmas().segment(oldColOffset, dim); // add parents in R block, while updating rhs // Loop through old variable list size_t newParentStartCol = newColOffset + dim; size_t oldParentStartCol = dim; // Copying from Rblock - offset already accounted for for (size_t oldIdx = newIdxToOldIdx[newfrontalIdx]+1; oldIdxbeginParents(); for (; oldParent != initConditional->endParents(); ++oldParent) { - Index parentKey = *oldParent; - size_t parentDim = initConditional->dim(oldParent); + Key parentKey = *oldParent; + size_t parentDim = initConditional->getDim(oldParent); if (verbose) cout << " Adding parents from S: parentKey: " << parentKey << " parentDim: " << parentDim << endl; if (parentsToRemove.count(parentKey)) { // Solve out the variable @@ -169,17 +172,18 @@ GaussianConditional::shared_ptr conditionDensity(const GaussianConditional::shar // Construct a new conditional if (verbose) cout << "backsubSummarize() Complete!" << endl; - GaussianConditional::rsd_type matrices(full_matrix, newDims.begin(), newDims.end()); +// GaussianConditional::rsd_type matrices(full_matrix, newDims.begin(), newDims.end()); + VerticalBlockMatrix matrices(newDims, full_matrix); return GaussianConditional::shared_ptr(new - GaussianConditional(newIndices.begin(), newIndices.end(), newNrFrontals, matrices, sigmas)); + GaussianConditional(newIndices, newNrFrontals, matrices, noiseModel::Diagonal::Sigmas(sigmas))); } /* ************************************************************************* */ GaussianFactorGraph conditionDensity(const GaussianBayesTree& bayesTree, - const std::set& saved_indices) { + const std::set& saved_indices) { const bool verbose = false; - VectorValues solution = optimize(bayesTree); + VectorValues solution = bayesTree.optimize(); // FIXME: set of conditionals does not manage possibility of solving out whole separators std::set affected_cliques = findAffectedCliqueConditionals(bayesTree, saved_indices); @@ -191,7 +195,7 @@ GaussianFactorGraph conditionDensity(const GaussianBayesTree& bayesTree, GaussianConditional::shared_ptr reducedConditional = conditionDensity(conditional, saved_indices, solution); if (reducedConditional) { if (verbose) reducedConditional->print("Final conditional"); - summarized_graph.push_back(reducedConditional->toFactor()); + summarized_graph.push_back(reducedConditional); } else if (verbose) { cout << "Conditional removed after summarization!" << endl; } diff --git a/gtsam_unstable/linear/conditioning.h b/gtsam_unstable/linear/conditioning.h index fbd5d6918..22eeaaeb2 100644 --- a/gtsam_unstable/linear/conditioning.h +++ b/gtsam_unstable/linear/conditioning.h @@ -12,7 +12,6 @@ #include #include #include -#include namespace gtsam { @@ -27,16 +26,18 @@ namespace gtsam { * @param saved_indices is the set of indices that should appear in the result * @param solution is a full solution for the system */ -GTSAM_UNSTABLE_EXPORT gtsam::GaussianConditional::shared_ptr conditionDensity(const gtsam::GaussianConditional::shared_ptr& initConditional, - const std::set& saved_indices, const gtsam::VectorValues& solution); +GTSAM_UNSTABLE_EXPORT GaussianConditional::shared_ptr conditionDensity( + const GaussianConditional::shared_ptr& initConditional, + const std::set& saved_indices, const VectorValues& solution); /** * Backsubstitution-based conditioning for a complete Bayes Tree - reduces * conditionals by solving out variables to eliminate. Traverses the tree to * add the correct dummy factors whenever a separator is eliminated. */ -GTSAM_UNSTABLE_EXPORT gtsam::GaussianFactorGraph conditionDensity(const gtsam::GaussianBayesTree& bayesTree, - const std::set& saved_indices); +GTSAM_UNSTABLE_EXPORT GaussianFactorGraph conditionDensity( + const GaussianBayesTree& bayesTree, + const std::set& saved_indices); } // \namespace gtsam diff --git a/gtsam_unstable/linear/tests/testBayesTreeOperations.cpp b/gtsam_unstable/linear/tests/testBayesTreeOperations.cpp index efc5bba55..64906936a 100644 --- a/gtsam_unstable/linear/tests/testBayesTreeOperations.cpp +++ b/gtsam_unstable/linear/tests/testBayesTreeOperations.cpp @@ -10,7 +10,6 @@ #include #include -#include #include @@ -194,10 +193,10 @@ TEST( testBayesTreeOperations, liquefy ) { // Create smoother with 7 nodes Ordering ordering; ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4); - GaussianFactorGraph smoother = createSmoother(7, ordering).first; + GaussianFactorGraph smoother = createSmoother(7); // Create the Bayes tree - GaussianBayesTree bayesTree = *GaussianMultifrontalSolver(smoother).eliminate(); + GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering); // bayesTree.print("Full tree"); SharedDiagonal unit6 = noiseModel::Diagonal::Sigmas(Vector_(ones(6))); @@ -278,6 +277,7 @@ TEST( testBayesTreeOperations, liquefy ) { // Liquefy the tree back into a graph, splitting factors { + CHECK(("*** liquify fails here *** - does not check for null noiseModel", 0)); GaussianFactorGraph actGraph = liquefy(bayesTree, true); GaussianFactorGraph expGraph; diff --git a/gtsam_unstable/linear/tests/testConditioning.cpp b/gtsam_unstable/linear/tests/testConditioning.cpp index 69646fbdc..da8f56117 100644 --- a/gtsam_unstable/linear/tests/testConditioning.cpp +++ b/gtsam_unstable/linear/tests/testConditioning.cpp @@ -12,6 +12,8 @@ #include #include +#include +#include #include @@ -64,7 +66,8 @@ TEST( testConditioning, directed_elimination_singlefrontal ) { Index root_key = 0, removed_key = 1, remaining_parent = 2; Matrix R11 = Matrix_(1,1, 1.0), R22 = Matrix_(1,1, 3.0), S = Matrix_(1,1,-2.0), T = Matrix_(1,1,-3.0); - Vector d0 = d.segment(0,1), d1 = d.segment(1,1), sigmas = Vector_(1, 1.0); + Vector d0 = d.segment(0,1), d1 = d.segment(1,1); + SharedDiagonal sigmas = noiseModel::Unit::Create(1); GaussianConditional::shared_ptr initConditional(new GaussianConditional(root_key, d0, R11, removed_key, S, remaining_parent, T, sigmas)); @@ -95,155 +98,156 @@ TEST( testConditioning, directed_elimination_singlefrontal ) { EXPECT(!actRemoveFrontal); } -/* ************************************************************************* */ -TEST( testConditioning, directed_elimination_multifrontal ) { - // Use top two rows from the previous example - Index root_key = 0, removed_key = 1, remaining_parent = 2; - Matrix R11 = R.topLeftCorner(2,2), S = R.block(0,2,2,1), - Sprime = Matrix_(1,1,-2.0), R11prime = Matrix_(1,1, 1.0); - Vector d1 = d.segment(0,2), sigmas1 = Vector_(1, 1.0), sigmas2 = Vector_(2, 1.0, 1.0); - - - std::list > terms; - terms += make_pair(root_key, Matrix(R11.col(0))); - terms += make_pair(removed_key, Matrix(R11.col(1))); - terms += make_pair(remaining_parent, S); - GaussianConditional::shared_ptr initConditional(new GaussianConditional(terms, 2, d1, sigmas2)); - - VectorValues solution; - solution.insert(0, x.segment(0,1)); - solution.insert(1, x.segment(1,1)); - solution.insert(2, x.segment(2,1)); - - std::set saved_indices; - saved_indices += root_key, remaining_parent; - - GaussianConditional::shared_ptr actSummarized = conditionDensity(initConditional, saved_indices, solution); - GaussianConditional::shared_ptr expSummarized(new - GaussianConditional(root_key, d.segment(0,1) - Sprime*x(1), R11prime, remaining_parent, R.block(0,2,1,1), sigmas1)); - - CHECK(actSummarized); - EXPECT(assert_equal(*expSummarized, *actSummarized, tol)); -} - -/* ************************************************************************* */ -TEST( testConditioning, directed_elimination_multifrontal_multidim ) { - // use larger example, three frontal variables, dim = 2 each, two parents (one removed) - // Vars: 0, 1, 2, 3, 4; frontal: 0, 1, 2. parents: 3, 4; - // Remove 1, 3 - Matrix Rinit = Matrix_(6, 11, - 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 1.0, 0.0, -1.0, 0.0, 0.1, - 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 1.0, 0.0, 1.0, 0.2, - 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.0,-1.0, 1.0, 0.0, 0.3, - 0.0, 0.0, 0.0, 4.0, 0.0, 4.0, 3.0, 2.0, 0.0, 9.0, 0.4, - 0.0, 0.0, 0.0, 0.0, 5.0, 0.0, 7.0, 0.0, 3.0, 0.0, 0.5, - 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 0.0, 8.0, 0.0, 6.0, 0.6); - - vector init_dims; init_dims += 2, 2, 2, 2, 2, 1; - GaussianConditional::rsd_type init_matrices(Rinit, init_dims.begin(), init_dims.end()); - Vector sigmas = ones(6); - vector init_keys; init_keys += 0, 1, 2, 3, 4; - GaussianConditional::shared_ptr initConditional(new - GaussianConditional(init_keys.begin(), init_keys.end(), 3, init_matrices, sigmas)); - - // Construct a solution vector - VectorValues solution; - solution.insert(0, zero(2)); - solution.insert(1, zero(2)); - solution.insert(2, zero(2)); - solution.insert(3, Vector_(2, 1.0, 2.0)); - solution.insert(4, Vector_(2, 3.0, 4.0)); - - initConditional->solveInPlace(solution); - - std::set saved_indices; - saved_indices += 0, 2, 4; - - GaussianConditional::shared_ptr actSummarized = conditionDensity(initConditional, saved_indices, solution); - CHECK(actSummarized); - - Matrix Rexp = Matrix_(4, 7, - 1.0, 0.0, 3.0, 0.0, -1.0, 0.0, 0.1, - 0.0, 1.0, 0.0, 3.0, 0.0, 1.0, 0.2, - 0.0, 0.0, 5.0, 0.0, 3.0, 0.0, 0.5, - 0.0, 0.0, 0.0, 4.0, 0.0, 6.0, 0.6); - - // Update rhs - Rexp.block(0, 6, 2, 1) -= Rinit.block(0, 2, 2, 2) * solution.at(1) + Rinit.block(0, 6, 2, 2) * solution.at(3); - Rexp.block(2, 6, 2, 1) -= Rinit.block(4, 6, 2, 2) * solution.at(3); - - vector exp_dims; exp_dims += 2, 2, 2, 1; - GaussianConditional::rsd_type exp_matrices(Rexp, exp_dims.begin(), exp_dims.end()); - Vector exp_sigmas = ones(4); - vector exp_keys; exp_keys += 0, 2, 4; - GaussianConditional expSummarized(exp_keys.begin(), exp_keys.end(), 2, exp_matrices, exp_sigmas); - - EXPECT(assert_equal(expSummarized, *actSummarized, tol)); -} - -/* ************************************************************************* */ -TEST( testConditioning, directed_elimination_multifrontal_multidim2 ) { - // Example from LinearAugmentedSystem - // 4 variables, last two in ordering kept - should be able to do this with no computation. - - vector init_dims; init_dims += 3, 3, 2, 2, 1; - - //Full initial conditional: density on [3] [4] [5] [6] - Matrix Rinit = Matrix_(10, 11, - 8.78422312, -0.0375455118, -0.0387376278, -5.059576, 0.0, 0.0, -0.0887200041, 0.00429643583, -0.130078263, 0.0193260727, 0.0, - 0.0, 8.46951839, 9.51456887, -0.0224291821, -5.24757636, 0.0, 0.0586258904, -0.173455825, 0.11090295, -0.330696013, 0.0, - 0.0, 0.0, 16.5539485, 0.00105159359, -2.35354497, -6.04085484, -0.0212095105, 0.0978729072, 0.00471054272, 0.0694956367, 0.0, - 0.0, 0.0, 0.0, 10.9015885, -0.0105694572, 0.000582715469, -0.0410535006, 0.00162772139, -0.0601433772, 0.0082824087,0.0, - 0.0, 0.0, 0.0, 0.0, 10.5531086, -1.34722553, 0.02438072, -0.0644224578, 0.0561372492, -0.148932792, 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 21.4870439, -0.00443305851, 0.0234766354, 0.00484572411, 0.0101997356, 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.73892865, 0.0242046766, -0.0459727048, 0.0445071938, 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.61246954, 0.02287419, -0.102870789, 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.04823446, -0.302033014, 0.0, - 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.24068986, 0.0); - Vector dinit = Vector_(10, - -0.00186915, 0.00318554, 0.000592421, -0.000861, 0.00171528, 0.000274123, -0.0284011, 0.0275465, 0.0439795, -0.0222134); - Rinit.rightCols(1) = dinit; - Vector sigmas = ones(10); - - GaussianConditional::rsd_type init_matrices(Rinit, init_dims.begin(), init_dims.end()); - vector init_keys; init_keys += 3, 4, 5, 6; - GaussianConditional::shared_ptr initConditional(new - GaussianConditional(init_keys.begin(), init_keys.end(), 4, init_matrices, sigmas)); - - // Calculate a solution - VectorValues solution; - solution.insert(0, zero(3)); - solution.insert(1, zero(3)); - solution.insert(2, zero(3)); - solution.insert(3, zero(3)); - solution.insert(4, zero(3)); - solution.insert(5, zero(2)); - solution.insert(6, zero(2)); - - initConditional->solveInPlace(solution); - - // Perform summarization - std::set saved_indices; - saved_indices += 5, 6; - - GaussianConditional::shared_ptr actSummarized = conditionDensity(initConditional, saved_indices, solution); - CHECK(actSummarized); - - // Create expected value on [5], [6] - Matrix Rexp = Matrix_(4, 5, - 2.73892865, 0.0242046766, -0.0459727048, 0.0445071938, -0.0284011, - 0.0, 2.61246954, 0.02287419, -0.102870789, 0.0275465, - 0.0, 0.0, 2.04823446, -0.302033014, 0.0439795, - 0.0, 0.0, 0.0, 2.24068986, -0.0222134); - Vector expsigmas = ones(4); - - vector exp_dims; exp_dims += 2, 2, 1; - GaussianConditional::rsd_type exp_matrices(Rexp, exp_dims.begin(), exp_dims.end()); - vector exp_keys; exp_keys += 5, 6; - GaussianConditional expConditional(exp_keys.begin(), exp_keys.end(), 2, exp_matrices, expsigmas); - - EXPECT(assert_equal(expConditional, *actSummarized, tol)); -} +///* ************************************************************************* */ +//TEST( testConditioning, directed_elimination_multifrontal ) { +// // Use top two rows from the previous example +// Index root_key = 0, removed_key = 1, remaining_parent = 2; +// Matrix R11 = R.topLeftCorner(2,2), S = R.block(0,2,2,1), +// Sprime = Matrix_(1,1,-2.0), R11prime = Matrix_(1,1, 1.0); +// Vector d1 = d.segment(0,2); +// SharedDiagonal sigmas1 = noiseModel::Unit::Create(1), sigmas2 = noiseModel::Unit::Create(2); +// +// +// std::list > terms; +// terms += make_pair(root_key, Matrix(R11.col(0))); +// terms += make_pair(removed_key, Matrix(R11.col(1))); +// terms += make_pair(remaining_parent, S); +// GaussianConditional::shared_ptr initConditional(new GaussianConditional(terms, 2, d1, sigmas2)); +// +// VectorValues solution; +// solution.insert(0, x.segment(0,1)); +// solution.insert(1, x.segment(1,1)); +// solution.insert(2, x.segment(2,1)); +// +// std::set saved_indices; +// saved_indices += root_key, remaining_parent; +// +// GaussianConditional::shared_ptr actSummarized = conditionDensity(initConditional, saved_indices, solution); +// GaussianConditional::shared_ptr expSummarized(new +// GaussianConditional(root_key, d.segment(0,1) - Sprime*x(1), R11prime, remaining_parent, R.block(0,2,1,1), sigmas1)); +// +// CHECK(actSummarized); +// EXPECT(assert_equal(*expSummarized, *actSummarized, tol)); +//} +// +///* ************************************************************************* */ +//TEST( testConditioning, directed_elimination_multifrontal_multidim ) { +// // use larger example, three frontal variables, dim = 2 each, two parents (one removed) +// // Vars: 0, 1, 2, 3, 4; frontal: 0, 1, 2. parents: 3, 4; +// // Remove 1, 3 +// Matrix Rinit = Matrix_(6, 11, +// 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 1.0, 0.0, -1.0, 0.0, 0.1, +// 0.0, 1.0, 0.0, 2.0, 0.0, 3.0, 0.0, 1.0, 0.0, 1.0, 0.2, +// 0.0, 0.0, 3.0, 0.0, 4.0, 0.0, 0.0,-1.0, 1.0, 0.0, 0.3, +// 0.0, 0.0, 0.0, 4.0, 0.0, 4.0, 3.0, 2.0, 0.0, 9.0, 0.4, +// 0.0, 0.0, 0.0, 0.0, 5.0, 0.0, 7.0, 0.0, 3.0, 0.0, 0.5, +// 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 0.0, 8.0, 0.0, 6.0, 0.6); +// +// vector init_dims; init_dims += 2, 2, 2, 2, 2, 1; +// VerticalBlockMatrix init_matrices(init_dims, Rinit); +// SharedDiagonal sigmas = noiseModel::Unit::Create(6); +// vector init_keys; init_keys += 0, 1, 2, 3, 4; +// GaussianConditional::shared_ptr initConditional(new +// GaussianConditional(init_keys, 3, init_matrices, sigmas)); +// +// // Construct a solution vector +// VectorValues solution; +// solution.insert(0, zero(2)); +// solution.insert(1, zero(2)); +// solution.insert(2, zero(2)); +// solution.insert(3, Vector_(2, 1.0, 2.0)); +// solution.insert(4, Vector_(2, 3.0, 4.0)); +// +// solution = initConditional->solve(solution); +// +// std::set saved_indices; +// saved_indices += 0, 2, 4; +// +// GaussianConditional::shared_ptr actSummarized = conditionDensity(initConditional, saved_indices, solution); +// CHECK(actSummarized); +// +// Matrix Rexp = Matrix_(4, 7, +// 1.0, 0.0, 3.0, 0.0, -1.0, 0.0, 0.1, +// 0.0, 1.0, 0.0, 3.0, 0.0, 1.0, 0.2, +// 0.0, 0.0, 5.0, 0.0, 3.0, 0.0, 0.5, +// 0.0, 0.0, 0.0, 4.0, 0.0, 6.0, 0.6); +// +// // Update rhs +// Rexp.block(0, 6, 2, 1) -= Rinit.block(0, 2, 2, 2) * solution.at(1) + Rinit.block(0, 6, 2, 2) * solution.at(3); +// Rexp.block(2, 6, 2, 1) -= Rinit.block(4, 6, 2, 2) * solution.at(3); +// +// vector exp_dims; exp_dims += 2, 2, 2, 1; +// VerticalBlockMatrix exp_matrices(exp_dims, Rexp); +// SharedDiagonal exp_sigmas = noiseModel::Unit::Create(4); +// vector exp_keys; exp_keys += 0, 2, 4; +// GaussianConditional expSummarized(exp_keys, 2, exp_matrices, exp_sigmas); +// +// EXPECT(assert_equal(expSummarized, *actSummarized, tol)); +//} +// +///* ************************************************************************* */ +//TEST( testConditioning, directed_elimination_multifrontal_multidim2 ) { +// // Example from LinearAugmentedSystem +// // 4 variables, last two in ordering kept - should be able to do this with no computation. +// +// vector init_dims; init_dims += 3, 3, 2, 2, 1; +// +// //Full initial conditional: density on [3] [4] [5] [6] +// Matrix Rinit = Matrix_(10, 11, +// 8.78422312, -0.0375455118, -0.0387376278, -5.059576, 0.0, 0.0, -0.0887200041, 0.00429643583, -0.130078263, 0.0193260727, 0.0, +// 0.0, 8.46951839, 9.51456887, -0.0224291821, -5.24757636, 0.0, 0.0586258904, -0.173455825, 0.11090295, -0.330696013, 0.0, +// 0.0, 0.0, 16.5539485, 0.00105159359, -2.35354497, -6.04085484, -0.0212095105, 0.0978729072, 0.00471054272, 0.0694956367, 0.0, +// 0.0, 0.0, 0.0, 10.9015885, -0.0105694572, 0.000582715469, -0.0410535006, 0.00162772139, -0.0601433772, 0.0082824087,0.0, +// 0.0, 0.0, 0.0, 0.0, 10.5531086, -1.34722553, 0.02438072, -0.0644224578, 0.0561372492, -0.148932792, 0.0, +// 0.0, 0.0, 0.0, 0.0, 0.0, 21.4870439, -0.00443305851, 0.0234766354, 0.00484572411, 0.0101997356, 0.0, +// 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.73892865, 0.0242046766, -0.0459727048, 0.0445071938, 0.0, +// 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.61246954, 0.02287419, -0.102870789, 0.0, +// 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.04823446, -0.302033014, 0.0, +// 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.24068986, 0.0); +// Vector dinit = Vector_(10, +// -0.00186915, 0.00318554, 0.000592421, -0.000861, 0.00171528, 0.000274123, -0.0284011, 0.0275465, 0.0439795, -0.0222134); +// Rinit.rightCols(1) = dinit; +// SharedDiagonal sigmas = noiseModel::Unit::Create(10); +// +// VerticalBlockMatrix init_matrices(init_dims, Rinit); +// vector init_keys; init_keys += 3, 4, 5, 6; +// GaussianConditional::shared_ptr initConditional(new +// GaussianConditional(init_keys, 4, init_matrices, sigmas)); +// +// // Calculate a solution +// VectorValues solution; +// solution.insert(0, zero(3)); +// solution.insert(1, zero(3)); +// solution.insert(2, zero(3)); +// solution.insert(3, zero(3)); +// solution.insert(4, zero(3)); +// solution.insert(5, zero(2)); +// solution.insert(6, zero(2)); +// +// solution = initConditional->solve(solution); +// +// // Perform summarization +// std::set saved_indices; +// saved_indices += 5, 6; +// +// GaussianConditional::shared_ptr actSummarized = conditionDensity(initConditional, saved_indices, solution); +// CHECK(actSummarized); +// +// // Create expected value on [5], [6] +// Matrix Rexp = Matrix_(4, 5, +// 2.73892865, 0.0242046766, -0.0459727048, 0.0445071938, -0.0284011, +// 0.0, 2.61246954, 0.02287419, -0.102870789, 0.0275465, +// 0.0, 0.0, 2.04823446, -0.302033014, 0.0439795, +// 0.0, 0.0, 0.0, 2.24068986, -0.0222134); +// SharedDiagonal expsigmas = noiseModel::Unit::Create(4); +// +// vector exp_dims; exp_dims += 2, 2, 1; +// VerticalBlockMatrix exp_matrices(exp_dims, Rexp); +// vector exp_keys; exp_keys += 5, 6; +// GaussianConditional expConditional(exp_keys, 2, exp_matrices, expsigmas); +// +// EXPECT(assert_equal(expConditional, *actSummarized, tol)); +//} /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } diff --git a/gtsam_unstable/nonlinear/BatchFixedLagSmoother.cpp b/gtsam_unstable/nonlinear/BatchFixedLagSmoother.cpp index 167215db2..a9601b652 100644 --- a/gtsam_unstable/nonlinear/BatchFixedLagSmoother.cpp +++ b/gtsam_unstable/nonlinear/BatchFixedLagSmoother.cpp @@ -22,7 +22,7 @@ #include #include #include -#include +//#include #include namespace gtsam { diff --git a/gtsam_unstable/nonlinear/BatchFixedLagSmoother.h b/gtsam_unstable/nonlinear/BatchFixedLagSmoother.h index b8597c79c..b609b442f 100644 --- a/gtsam_unstable/nonlinear/BatchFixedLagSmoother.h +++ b/gtsam_unstable/nonlinear/BatchFixedLagSmoother.h @@ -55,7 +55,7 @@ public: * a single variable is needed, it is faster to call calculateEstimate(const KEY&). */ Values calculateEstimate() const { - return theta_.retract(delta_, ordering_); + return theta_.retract(delta_); } /** Compute an estimate for a single variable using its incomplete linear delta computed @@ -66,8 +66,7 @@ public: */ template VALUE calculateEstimate(Key key) const { - const Index index = ordering_.at(key); - const Vector delta = delta_.at(index); + const Vector delta = delta_.at(key); return theta_.at(key).retract(delta); } diff --git a/gtsam_unstable/nonlinear/CMakeLists.txt b/gtsam_unstable/nonlinear/CMakeLists.txt index e40ffdc09..6672a163a 100644 --- a/gtsam_unstable/nonlinear/CMakeLists.txt +++ b/gtsam_unstable/nonlinear/CMakeLists.txt @@ -3,24 +3,33 @@ file(GLOB nonlinear_headers "*.h") install(FILES ${nonlinear_headers} DESTINATION include/gtsam_unstable/nonlinear) # Components to link tests in this subfolder against -set(nonlinear_local_libs - nonlinear_unstable - nonlinear - linear - linear_unstable - inference - geometry - base - ccolamd +set(nonlinear_local_libs +nonlinear_unstable +nonlinear +linear +linear_unstable +inference +geometry +base +ccolamd ) set (nonlinear_full_libs - ${gtsam-default} - ${gtsam_unstable-default}) +${gtsam-default} +${gtsam_unstable-default}) # Exclude tests that don't work -set (nonlinear_excluded_tests "") +set (nonlinear_excluded_tests #"") +"${CMAKE_CURRENT_SOURCE_DIR}/tests/testBatchFixedLagSmoother.cpp" +"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentBatchFilter.cpp" +"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentBatchSmoother.cpp" +"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalFilter.cpp" +"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalSmootherDL.cpp" +"${CMAKE_CURRENT_SOURCE_DIR}/tests/testConcurrentIncrementalSmootherGN.cpp" +"${CMAKE_CURRENT_SOURCE_DIR}/tests/testIncrementalFixedLagSmoother.cpp" +) + # Add all tests -gtsam_add_subdir_tests(nonlinear_unstable "${nonlinear_local_libs}" "${nonlinear_full_libs}" "${nonlinear_excluded_tests}") +gtsam_add_subdir_tests(nonlinear_unstable "${nonlinear_local_libs}" "${nonlinear_full_libs}" "${nonlinear_excluded_tests}") add_dependencies(check.unstable check.nonlinear_unstable) diff --git a/gtsam_unstable/nonlinear/ConcurrentBatchFilter.h b/gtsam_unstable/nonlinear/ConcurrentBatchFilter.h index 96752bd4e..5270dd1bd 100644 --- a/gtsam_unstable/nonlinear/ConcurrentBatchFilter.h +++ b/gtsam_unstable/nonlinear/ConcurrentBatchFilter.h @@ -90,7 +90,7 @@ public: * If only a single variable is needed, it may be faster to call calculateEstimate(const KEY&). */ Values calculateEstimate() const { - return theta_.retract(delta_, ordering_); + return theta_.retract(delta_); } /** Compute the current best estimate of a single variable. This is generally faster than @@ -100,8 +100,7 @@ public: */ template VALUE calculateEstimate(Key key) const { - const Index index = ordering_.at(key); - const Vector delta = delta_.at(index); + const Vector delta = delta_.at(key); return theta_.at(key).retract(delta); } @@ -222,7 +221,7 @@ private: const std::string& indent = "", const std::string& title = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter); /** Print just the nonlinear keys in a linear factor */ - static void PrintLinearFactor(const GaussianFactor::shared_ptr& factor, const Ordering& ordering, + static void PrintLinearFactor(const GaussianFactor::shared_ptr& factor, const std::string& indent = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter); /** Print just the nonlinear keys in each linear factor for a whole Gaussian Factor Graph */ diff --git a/gtsam_unstable/nonlinear/ConcurrentBatchSmoother.h b/gtsam_unstable/nonlinear/ConcurrentBatchSmoother.h index 8c6ea7dda..95838382c 100644 --- a/gtsam_unstable/nonlinear/ConcurrentBatchSmoother.h +++ b/gtsam_unstable/nonlinear/ConcurrentBatchSmoother.h @@ -89,7 +89,7 @@ public: * If only a single variable is needed, it may be faster to call calculateEstimate(const KEY&). */ Values calculateEstimate() const { - return theta_.retract(delta_, ordering_); + return theta_.retract(delta_); } /** Compute the current best estimate of a single variable. This is generally faster than diff --git a/gtsam_unstable/nonlinear/FixedLagSmoother.h b/gtsam_unstable/nonlinear/FixedLagSmoother.h index 89dba73ca..686cd239f 100644 --- a/gtsam_unstable/nonlinear/FixedLagSmoother.h +++ b/gtsam_unstable/nonlinear/FixedLagSmoother.h @@ -21,9 +21,9 @@ #pragma once #include +#include #include #include -#include #include namespace gtsam { diff --git a/gtsam_unstable/nonlinear/LinearizedFactor.cpp b/gtsam_unstable/nonlinear/LinearizedFactor.cpp index eb5a83c79..4b95c65b4 100644 --- a/gtsam_unstable/nonlinear/LinearizedFactor.cpp +++ b/gtsam_unstable/nonlinear/LinearizedFactor.cpp @@ -22,38 +22,28 @@ namespace gtsam { /* ************************************************************************* */ -LinearizedGaussianFactor::LinearizedGaussianFactor(const GaussianFactor::shared_ptr& gaussian, const Ordering& ordering, const Values& lin_points) { +LinearizedGaussianFactor::LinearizedGaussianFactor( + const GaussianFactor::shared_ptr& gaussian, const Values& lin_points) +: NonlinearFactor(gaussian->keys()) +{ // Extract the keys and linearization points - BOOST_FOREACH(const Index& idx, gaussian->keys()) { - // find full symbol - if (idx < ordering.size()) { - Key key = ordering.key(idx); - - // extract linearization point - assert(lin_points.exists(key)); - this->lin_points_.insert(key, lin_points.at(key)); - - // store keys - this->keys_.push_back(key); - } else { - throw std::runtime_error("LinearizedGaussianFactor: could not find index in decoder!"); - } + BOOST_FOREACH(const Key& key, gaussian->keys()) { + // extract linearization point + assert(lin_points.exists(key)); + this->lin_points_.insert(key, lin_points.at(key)); } } - - /* ************************************************************************* */ -LinearizedJacobianFactor::LinearizedJacobianFactor() : Ab_(matrix_) { +// LinearizedJacobianFactor +/* ************************************************************************* */ +LinearizedJacobianFactor::LinearizedJacobianFactor() { } /* ************************************************************************* */ -LinearizedJacobianFactor::LinearizedJacobianFactor(const JacobianFactor::shared_ptr& jacobian, - const Ordering& ordering, const Values& lin_points) -: Base(jacobian, ordering, lin_points), Ab_(matrix_) { - - // Get the Ab matrix from the Jacobian factor, with any covariance baked in - AbMatrix fullMatrix = jacobian->matrix_augmented(true); +LinearizedJacobianFactor::LinearizedJacobianFactor( + const JacobianFactor::shared_ptr& jacobian, const Values& lin_points) +: Base(jacobian, lin_points) { // Create the dims array size_t *dims = (size_t *)alloca(sizeof(size_t) * (jacobian->size() + 1)); @@ -64,8 +54,9 @@ LinearizedJacobianFactor::LinearizedJacobianFactor(const JacobianFactor::shared_ dims[index] = 1; // Update the BlockInfo accessor - BlockAb Ab(fullMatrix, dims, dims+jacobian->size()+1); - Ab.swap(Ab_); + Ab_ = VerticalBlockMatrix(dims, dims+jacobian->size()+1, jacobian->rows()); + // Get the Ab matrix from the Jacobian factor, with any covariance baked in + Ab_.matrix() = jacobian->augmentedJacobian(); } /* ************************************************************************* */ @@ -110,12 +101,12 @@ double LinearizedJacobianFactor::error(const Values& c) const { /* ************************************************************************* */ boost::shared_ptr -LinearizedJacobianFactor::linearize(const Values& c, const Ordering& ordering) const { +LinearizedJacobianFactor::linearize(const Values& c) const { // Create the 'terms' data structure for the Jacobian constructor std::vector > terms; BOOST_FOREACH(Key key, keys()) { - terms.push_back(std::make_pair(ordering[key], this->A(key))); + terms.push_back(std::make_pair(key, this->A(key))); } // compute rhs @@ -140,22 +131,16 @@ Vector LinearizedJacobianFactor::error_vector(const Values& c) const { return errorVector; } - - - - - /* ************************************************************************* */ -LinearizedHessianFactor::LinearizedHessianFactor() : info_(matrix_) { +// LinearizedHessianFactor +/* ************************************************************************* */ +LinearizedHessianFactor::LinearizedHessianFactor() { } /* ************************************************************************* */ -LinearizedHessianFactor::LinearizedHessianFactor(const HessianFactor::shared_ptr& hessian, - const Ordering& ordering, const Values& lin_points) -: Base(hessian, ordering, lin_points), info_(matrix_) { - - // Copy the augmented matrix holding G, g, and f - Matrix fullMatrix = hessian->info(); +LinearizedHessianFactor::LinearizedHessianFactor( + const HessianFactor::shared_ptr& hessian, const Values& lin_points) +: Base(hessian, lin_points) { // Create the dims array size_t *dims = (size_t*)alloca(sizeof(size_t)*(hessian->size() + 1)); @@ -166,8 +151,9 @@ LinearizedHessianFactor::LinearizedHessianFactor(const HessianFactor::shared_ptr dims[index] = 1; // Update the BlockInfo accessor - BlockInfo infoMatrix(fullMatrix, dims, dims+hessian->size()+1); - infoMatrix.swap(info_); + info_ = SymmetricBlockMatrix(dims, dims+hessian->size()+1); + // Copy the augmented matrix holding G, g, and f + info_.matrix() = hessian->info(); } /* ************************************************************************* */ @@ -228,18 +214,7 @@ double LinearizedHessianFactor::error(const Values& c) const { /* ************************************************************************* */ boost::shared_ptr -LinearizedHessianFactor::linearize(const Values& c, const Ordering& ordering) const { - - // Use the ordering to convert the keys into indices; - std::vector js; - BOOST_FOREACH(Key key, this->keys()){ - js.push_back(ordering.at(key)); - } - - // Make a copy of the info matrix - Matrix newMatrix; - SymmetricBlockView newInfo(newMatrix); - newInfo.assignNoalias(info_); +LinearizedHessianFactor::linearize(const Values& c) const { // Construct an error vector in key-order from the Values Vector dx = zero(dim()); @@ -260,22 +235,22 @@ LinearizedHessianFactor::linearize(const Values& c, const Ordering& ordering) co //newInfo.rangeColumn(0, this->size(), this->size(), 0) -= squaredTerm().selfadjointView() * dx; Vector g = linearTerm() - squaredTerm().selfadjointView() * dx; std::vector gs; - for(size_t i = 0; i < info_.nBlocks()-1; ++i) { + for(DenseIndex i = 0; i < info_.nBlocks()-1; ++i) { gs.push_back(g.segment(info_.offset(i), info_.offset(i+1) - info_.offset(i))); } // G2 = G1 // Do Nothing std::vector Gs; - for(size_t i = 0; i < info_.nBlocks()-1; ++i) { - for(size_t j = i; j < info_.nBlocks()-1; ++j) { + for(DenseIndex i = 0; i < info_.nBlocks()-1; ++i) { + for(DenseIndex j = i; j < info_.nBlocks()-1; ++j) { Gs.push_back(info_(i,j)); } } // Create a Hessian Factor from the modified info matrix //return boost::shared_ptr(new HessianFactor(js, newInfo)); - return boost::shared_ptr(new HessianFactor(js, Gs, gs, f)); + return boost::shared_ptr(new HessianFactor(keys(), Gs, gs, f)); } } // \namespace aspn diff --git a/gtsam_unstable/nonlinear/LinearizedFactor.h b/gtsam_unstable/nonlinear/LinearizedFactor.h index 330df0c6a..9e2319ae6 100644 --- a/gtsam_unstable/nonlinear/LinearizedFactor.h +++ b/gtsam_unstable/nonlinear/LinearizedFactor.h @@ -19,11 +19,9 @@ #include #include -#include #include #include #include -#include namespace gtsam { @@ -51,10 +49,9 @@ public: /** * @param gaussian: A jacobian or hessian factor - * @param ordering: The full ordering used to linearize this factor * @param lin_points: The linearization points for, at least, the variables used by this factor */ - LinearizedGaussianFactor(const GaussianFactor::shared_ptr& gaussian, const Ordering& ordering, const Values& lin_points); + LinearizedGaussianFactor(const GaussianFactor::shared_ptr& gaussian, const Values& lin_points); virtual ~LinearizedGaussianFactor() {}; @@ -87,12 +84,10 @@ public: /** shared pointer for convenience */ typedef boost::shared_ptr shared_ptr; - typedef Matrix AbMatrix; - typedef VerticalBlockView BlockAb; - typedef BlockAb::Block ABlock; - typedef BlockAb::constBlock constABlock; - typedef BlockAb::Column BVector; - typedef BlockAb::constColumn constBVector; + typedef VerticalBlockMatrix::Block ABlock; + typedef VerticalBlockMatrix::constBlock constABlock; + typedef VerticalBlockMatrix::Block::ColXpr BVector; + typedef VerticalBlockMatrix::constBlock::ConstColXpr constBVector; protected: @@ -101,8 +96,7 @@ protected: // KeyMatrixMap matrices_; // Vector b_; - AbMatrix matrix_; // the full matrix corresponding to the factor - BlockAb Ab_; // the block view of the full matrix + VerticalBlockMatrix Ab_; // the block view of the full matrix public: @@ -111,11 +105,9 @@ public: /** * @param jacobian: A jacobian factor - * @param ordering: The ordering used to linearize this factor * @param lin_points: The linearization points for, at least, the variables used by this factor */ - LinearizedJacobianFactor(const JacobianFactor::shared_ptr& jacobian, - const Ordering& ordering, const Values& lin_points); + LinearizedJacobianFactor(const JacobianFactor::shared_ptr& jacobian, const Values& lin_points); virtual ~LinearizedJacobianFactor() {} @@ -133,7 +125,7 @@ public: virtual bool equals(const NonlinearFactor& expected, double tol = 1e-9) const; // access functions - const constBVector b() const { return Ab_.column(size(), 0); } + const constBVector b() const { return Ab_(size()).col(0); } const constABlock A() const { return Ab_.range(0, size()); }; const constABlock A(Key key) const { return Ab_(std::find(begin(), end(), key) - begin()); } @@ -148,8 +140,7 @@ public: * Reimplemented from NoiseModelFactor to directly copy out the * matrices and only update the RHS b with an updated residual */ - boost::shared_ptr linearize( - const Values& c, const Ordering& ordering) const; + boost::shared_ptr linearize(const Values& c) const; /** (A*x-b) */ Vector error_vector(const Values& c) const; @@ -161,7 +152,6 @@ private: void serialize(ARCHIVE & ar, const unsigned int version) { ar & boost::serialization::make_nvp("LinearizedJacobianFactor", boost::serialization::base_object(*this)); - ar & BOOST_SERIALIZATION_NVP(matrix_); ar & BOOST_SERIALIZATION_NVP(Ab_); } }; @@ -184,17 +174,15 @@ public: typedef boost::shared_ptr shared_ptr; /** hessian block data types */ - typedef Matrix InfoMatrix; ///< The full augmented Hessian - typedef SymmetricBlockView BlockInfo; ///< A blockwise view of the Hessian - typedef BlockInfo::Block Block; ///< A block from the Hessian matrix - typedef BlockInfo::constBlock constBlock; ///< A block from the Hessian matrix (const version) - typedef BlockInfo::Column Column; ///< A column containing the linear term h - typedef BlockInfo::constColumn constColumn; ///< A column containing the linear term h (const version) + typedef SymmetricBlockMatrix::Block Block; ///< A block from the Hessian matrix + typedef SymmetricBlockMatrix::constBlock constBlock; ///< A block from the Hessian matrix (const version) + typedef SymmetricBlockMatrix::Block::ColXpr Column; ///< A column containing the linear term h + typedef SymmetricBlockMatrix::constBlock::ColXpr constColumn; ///< A column containing the linear term h (const version) protected: - InfoMatrix matrix_; ///< The full augmented information matrix, s.t. the quadratic error is 0.5*[x -1]'*H*[x -1] - BlockInfo info_; ///< The block view of the full information matrix. + SymmetricBlockMatrix info_; ///< The block view of the full information matrix, s.t. the quadratic + /// error is 0.5*[x -1]'*H*[x -1] public: @@ -204,11 +192,9 @@ public: /** * Use this constructor with the ordering used to linearize the graph * @param hessian: A hessian factor - * @param ordering: The ordering used to linearize this factor * @param lin_points: The linearization points for, at least, the variables used by this factor */ - LinearizedHessianFactor(const HessianFactor::shared_ptr& hessian, - const Ordering& ordering, const Values& lin_points); + LinearizedHessianFactor(const HessianFactor::shared_ptr& hessian, const Values& lin_points); virtual ~LinearizedHessianFactor() {} @@ -234,11 +220,11 @@ public: * @param j Which block row to get, as an iterator pointing to the slot in this factor. You can * use, for example, begin() + 2 to get the 3rd variable in this factor. * @return The linear term \f$ g \f$ */ - constColumn linearTerm(const_iterator j) const { return info_.column(j - this->begin(), this->size(), 0); } + constColumn linearTerm(const_iterator j) const { return info_(j - this->begin(), this->size()).col(0); } /** Return the complete linear term \f$ g \f$ as described above. * @return The linear term \f$ g \f$ */ - constColumn linearTerm() const { return info_.rangeColumn(0, this->size(), this->size(), 0); }; + constColumn linearTerm() const { return info_.range(0, this->size(), this->size(), this->size() + 1).col(0); }; /** Return a view of the block at (j1,j2) of the upper-triangular part of the * squared term \f$ H \f$, no data is copied. See HessianFactor class documentation @@ -260,7 +246,7 @@ public: /** get the dimension of the factor (number of rows on linearization) */ - size_t dim() const { return matrix_.rows() - 1; }; + size_t dim() const { return info_.rows() - 1; }; /** Calculate the error of the factor */ double error(const Values& c) const; @@ -270,8 +256,7 @@ public: * Reimplemented from NoiseModelFactor to directly copy out the * matrices and only update the RHS b with an updated residual */ - boost::shared_ptr linearize( - const Values& c, const Ordering& ordering) const; + boost::shared_ptr linearize(const Values& c) const; private: /** Serialization function */ @@ -280,7 +265,6 @@ private: void serialize(ARCHIVE & ar, const unsigned int version) { ar & boost::serialization::make_nvp("LinearizedHessianFactor", boost::serialization::base_object(*this)); - ar & BOOST_SERIALIZATION_NVP(matrix_); ar & BOOST_SERIALIZATION_NVP(info_); } }; diff --git a/gtsam_unstable/nonlinear/sequentialSummarization.cpp b/gtsam_unstable/nonlinear/sequentialSummarization.cpp deleted file mode 100644 index fa0b58836..000000000 --- a/gtsam_unstable/nonlinear/sequentialSummarization.cpp +++ /dev/null @@ -1,35 +0,0 @@ -/** - * @file summarization.cpp - * - * @date Jun 22, 2012 - * @author Alex Cunningham - */ - -#include -#include - -#include - -using namespace std; - -namespace gtsam { - -/* ************************************************************************* */ -GaussianFactorGraph::shared_ptr summarizeGraphSequential( - const GaussianFactorGraph& full_graph, const std::vector& indices, bool useQR) { - GaussianSequentialSolver solver(full_graph, useQR); - return solver.jointFactorGraph(indices); -} - -/* ************************************************************************* */ -GaussianFactorGraph::shared_ptr summarizeGraphSequential( - const GaussianFactorGraph& full_graph, const Ordering& ordering, const KeySet& saved_keys, bool useQR) { - std::vector indices; - BOOST_FOREACH(const Key& k, saved_keys) - indices.push_back(ordering[k]); - return summarizeGraphSequential(full_graph, indices, useQR); -} - -/* ************************************************************************* */ -} // \namespace gtsam - diff --git a/gtsam_unstable/nonlinear/sequentialSummarization.h b/gtsam_unstable/nonlinear/sequentialSummarization.h deleted file mode 100644 index 4d783ebe1..000000000 --- a/gtsam_unstable/nonlinear/sequentialSummarization.h +++ /dev/null @@ -1,34 +0,0 @@ -/** - * @file summarization.h - * - * @brief Types and utility functions for summarization - * - * @date Jun 22, 2012 - * @author Alex Cunningham - */ - -#pragma once - -#include -#include - -#include -#include - -namespace gtsam { - -/** - * Summarization function that eliminates a set of variables (does not convert to Jacobians) - * NOTE: uses sequential solver - requires fully constrained system - */ -GaussianFactorGraph::shared_ptr GTSAM_UNSTABLE_EXPORT summarizeGraphSequential( - const GaussianFactorGraph& full_graph, const std::vector& indices, bool useQR = false); - -/** Summarization that also converts keys to indices */ -GaussianFactorGraph::shared_ptr GTSAM_UNSTABLE_EXPORT summarizeGraphSequential( - const GaussianFactorGraph& full_graph, const Ordering& ordering, - const KeySet& saved_keys, bool useQR = false); - -} // \namespace gtsam - - diff --git a/gtsam_unstable/nonlinear/tests/testBatchFixedLagSmoother.cpp b/gtsam_unstable/nonlinear/tests/testBatchFixedLagSmoother.cpp index 3c7405da3..6b109d657 100644 --- a/gtsam_unstable/nonlinear/tests/testBatchFixedLagSmoother.cpp +++ b/gtsam_unstable/nonlinear/tests/testBatchFixedLagSmoother.cpp @@ -18,16 +18,16 @@ #include #include -#include -#include -#include +#include +#include +#include +#include +#include +#include #include #include -#include -#include -#include -#include -#include +#include +#include using namespace std; using namespace gtsam; @@ -35,11 +35,9 @@ using namespace gtsam; /* ************************************************************************* */ bool check_smoother(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const BatchFixedLagSmoother& smoother, const Key& key) { - Ordering ordering = *fullgraph.orderingCOLAMD(fullinit); - GaussianFactorGraph linearized = *fullgraph.linearize(fullinit, ordering); - GaussianBayesNet gbn = *GaussianSequentialSolver(linearized).eliminate(); - VectorValues delta = optimize(gbn); - Values fullfinal = fullinit.retract(delta, ordering); + GaussianFactorGraph linearized = *fullgraph.linearize(fullinit); + VectorValues delta = linearized.optimize(); + Values fullfinal = fullinit.retract(delta); Point2 expected = fullfinal.at(key); Point2 actual = smoother.calculateEstimate(key); @@ -48,7 +46,7 @@ bool check_smoother(const NonlinearFactorGraph& fullgraph, const Values& fullini } /* ************************************************************************* */ -TEST_UNSAFE( BatchFixedLagSmoother, Example ) +TEST( BatchFixedLagSmoother, Example ) { // Test the BatchFixedLagSmoother in a pure linear environment. Thus, full optimization and // the BatchFixedLagSmoother should be identical (even with the linearized approximations at diff --git a/gtsam_unstable/nonlinear/tests/testIncrementalFixedLagSmoother.cpp b/gtsam_unstable/nonlinear/tests/testIncrementalFixedLagSmoother.cpp index 73bb8c0a3..a2941acaa 100644 --- a/gtsam_unstable/nonlinear/tests/testIncrementalFixedLagSmoother.cpp +++ b/gtsam_unstable/nonlinear/tests/testIncrementalFixedLagSmoother.cpp @@ -19,17 +19,17 @@ #include #include +#include +#include +#include +#include +#include +#include #include #include -#include #include #include #include -#include -#include -#include -#include -#include using namespace std; using namespace gtsam; @@ -39,11 +39,9 @@ Key MakeKey(size_t index) { return Symbol('x', index); } /* ************************************************************************* */ bool check_smoother(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const IncrementalFixedLagSmoother& smoother, const Key& key) { - Ordering ordering = *fullgraph.orderingCOLAMD(fullinit); - GaussianFactorGraph linearized = *fullgraph.linearize(fullinit, ordering); - GaussianBayesNet gbn = *GaussianSequentialSolver(linearized).eliminate(); - VectorValues delta = optimize(gbn); - Values fullfinal = fullinit.retract(delta, ordering); + GaussianFactorGraph linearized = *fullgraph.linearize(fullinit); + VectorValues delta = linearized.optimize(); + Values fullfinal = fullinit.retract(delta); Point2 expected = fullfinal.at(key); Point2 actual = smoother.calculateEstimate(key); @@ -52,7 +50,7 @@ bool check_smoother(const NonlinearFactorGraph& fullgraph, const Values& fullini } /* ************************************************************************* */ -TEST_UNSAFE( IncrementalFixedLagSmoother, Example ) +TEST( IncrementalFixedLagSmoother, Example ) { // Test the IncrementalFixedLagSmoother in a pure linear environment. Thus, full optimization and // the IncrementalFixedLagSmoother should be identical (even with the linearized approximations at diff --git a/gtsam_unstable/nonlinear/tests/testLinearizedFactor.cpp b/gtsam_unstable/nonlinear/tests/testLinearizedFactor.cpp index 426683806..21e271bd8 100644 --- a/gtsam_unstable/nonlinear/tests/testLinearizedFactor.cpp +++ b/gtsam_unstable/nonlinear/tests/testLinearizedFactor.cpp @@ -17,12 +17,12 @@ */ #include -#include +#include #include +#include #include #include -#include -#include +#include #include using namespace gtsam; @@ -33,9 +33,6 @@ TEST( LinearizedFactor, equals_jacobian ) // Create a Between Factor from a Point3. This is actually a linear factor. Key x1(1); Key x2(2); - Ordering ordering; - ordering.push_back(x1); - ordering.push_back(x2); Values values; values.insert(x1, Point3(-22.4, +8.5, +2.4)); values.insert(x2, Point3(-21.0, +5.0, +21.0)); @@ -46,9 +43,9 @@ TEST( LinearizedFactor, equals_jacobian ) // Create two identical factors and make sure they're equal - JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values, ordering)); - LinearizedJacobianFactor jacobian1(jf, ordering, values); - LinearizedJacobianFactor jacobian2(jf, ordering, values); + JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values)); + LinearizedJacobianFactor jacobian1(jf, values); + LinearizedJacobianFactor jacobian2(jf, values); CHECK(assert_equal(jacobian1, jacobian2)); } @@ -59,9 +56,6 @@ TEST( LinearizedFactor, clone_jacobian ) // Create a Between Factor from a Point3. This is actually a linear factor. Key x1(1); Key x2(2); - Ordering ordering; - ordering.push_back(x1); - ordering.push_back(x2); Values values; values.insert(x1, Point3(-22.4, +8.5, +2.4)); values.insert(x2, Point3(-21.0, +5.0, +21.0)); @@ -71,13 +65,13 @@ TEST( LinearizedFactor, clone_jacobian ) BetweenFactor betweenFactor(x1, x2, measured, model); // Create one factor that is a clone of the other and make sure they're equal - JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values, ordering)); - LinearizedJacobianFactor jacobian1(jf, ordering, values); + JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values)); + LinearizedJacobianFactor jacobian1(jf, values); LinearizedJacobianFactor::shared_ptr jacobian2 = boost::static_pointer_cast(jacobian1.clone()); CHECK(assert_equal(jacobian1, *jacobian2)); - JacobianFactor::shared_ptr jf1 = boost::static_pointer_cast(jacobian1.linearize(values, ordering)); - JacobianFactor::shared_ptr jf2 = boost::static_pointer_cast(jacobian2->linearize(values, ordering)); + JacobianFactor::shared_ptr jf1 = boost::static_pointer_cast(jacobian1.linearize(values)); + JacobianFactor::shared_ptr jf2 = boost::static_pointer_cast(jacobian2->linearize(values)); CHECK(assert_equal(*jf1, *jf2)); Matrix information1 = jf1->augmentedInformation(); @@ -91,9 +85,6 @@ TEST( LinearizedFactor, add_jacobian ) // Create a Between Factor from a Point3. This is actually a linear factor. Key x1(1); Key x2(2); - Ordering ordering; - ordering.push_back(x1); - ordering.push_back(x2); Values values; values.insert(x1, Point3(-22.4, +8.5, +2.4)); values.insert(x2, Point3(-21.0, +5.0, +21.0)); @@ -103,10 +94,10 @@ TEST( LinearizedFactor, add_jacobian ) BetweenFactor betweenFactor(x1, x2, measured, model); // Create two factor graphs, one using 'push_back' and one using 'add' and make sure they're equal - JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values, ordering)); - LinearizedJacobianFactor::shared_ptr jacobian(new LinearizedJacobianFactor(jf, ordering, values)); + JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values)); + LinearizedJacobianFactor::shared_ptr jacobian(new LinearizedJacobianFactor(jf, values)); NonlinearFactorGraph graph1; graph1.push_back(jacobian); - NonlinearFactorGraph graph2; graph2.add(*jacobian); + NonlinearFactorGraph graph2; graph2.push_back(*jacobian); // TODO: When creating a Jacobian from a cached factor, I experienced a problem in the 'add' version // However, I am currently unable to reproduce the error in this unit test. @@ -133,8 +124,8 @@ TEST( LinearizedFactor, add_jacobian ) // // // // Create a linearized jacobian factors -// JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values, ordering)); -// LinearizedJacobianFactor jacobian(jf, ordering, values); +// JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values)); +// LinearizedJacobianFactor jacobian(jf, values); // // // for(double x1 = -10; x1 < 10; x1 += 2.0) { @@ -156,8 +147,8 @@ TEST( LinearizedFactor, add_jacobian ) // EXPECT_DOUBLES_EQUAL(expected_error, actual_error, 1e-9 ); // // // Check that the linearized factors are identical -// GaussianFactor::shared_ptr expected_factor = betweenFactor.linearize(linpoint, ordering); -// GaussianFactor::shared_ptr actual_factor = jacobian.linearize(linpoint, ordering); +// GaussianFactor::shared_ptr expected_factor = betweenFactor.linearize(linpoint); +// GaussianFactor::shared_ptr actual_factor = jacobian.linearize(linpoint); // CHECK(assert_equal(*expected_factor, *actual_factor)); // } // } @@ -175,9 +166,6 @@ TEST( LinearizedFactor, equals_hessian ) // Create a Between Factor from a Point3. This is actually a linear factor. Key x1(1); Key x2(2); - Ordering ordering; - ordering.push_back(x1); - ordering.push_back(x2); Values values; values.insert(x1, Point3(-22.4, +8.5, +2.4)); values.insert(x2, Point3(-21.0, +5.0, +21.0)); @@ -188,10 +176,10 @@ TEST( LinearizedFactor, equals_hessian ) // Create two identical factors and make sure they're equal - JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values, ordering)); + JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values)); HessianFactor::shared_ptr hf(new HessianFactor(*jf)); - LinearizedHessianFactor hessian1(hf, ordering, values); - LinearizedHessianFactor hessian2(hf, ordering, values); + LinearizedHessianFactor hessian1(hf, values); + LinearizedHessianFactor hessian2(hf, values); CHECK(assert_equal(hessian1, hessian2)); } @@ -202,9 +190,6 @@ TEST( LinearizedFactor, clone_hessian ) // Create a Between Factor from a Point3. This is actually a linear factor. Key x1(1); Key x2(2); - Ordering ordering; - ordering.push_back(x1); - ordering.push_back(x2); Values values; values.insert(x1, Point3(-22.4, +8.5, +2.4)); values.insert(x2, Point3(-21.0, +5.0, +21.0)); @@ -215,9 +200,9 @@ TEST( LinearizedFactor, clone_hessian ) // Create two identical factors and make sure they're equal - JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values, ordering)); + JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values)); HessianFactor::shared_ptr hf(new HessianFactor(*jf)); - LinearizedHessianFactor hessian1(hf, ordering, values); + LinearizedHessianFactor hessian1(hf, values); LinearizedHessianFactor::shared_ptr hessian2 = boost::static_pointer_cast(hessian1.clone()); CHECK(assert_equal(hessian1, *hessian2)); @@ -229,9 +214,6 @@ TEST( LinearizedFactor, add_hessian ) // Create a Between Factor from a Point3. This is actually a linear factor. Key x1(1); Key x2(2); - Ordering ordering; - ordering.push_back(x1); - ordering.push_back(x2); Values values; values.insert(x1, Point3(-22.4, +8.5, +2.4)); values.insert(x2, Point3(-21.0, +5.0, +21.0)); @@ -242,11 +224,11 @@ TEST( LinearizedFactor, add_hessian ) // Create two identical factors and make sure they're equal - JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values, ordering)); + JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values)); HessianFactor::shared_ptr hf(new HessianFactor(*jf)); - LinearizedHessianFactor::shared_ptr hessian(new LinearizedHessianFactor(hf, ordering, values)); + LinearizedHessianFactor::shared_ptr hessian(new LinearizedHessianFactor(hf, values)); NonlinearFactorGraph graph1; graph1.push_back(hessian); - NonlinearFactorGraph graph2; graph2.add(*hessian); + NonlinearFactorGraph graph2; graph2.push_back(*hessian); CHECK(assert_equal(graph1, graph2)); } @@ -270,9 +252,9 @@ TEST( LinearizedFactor, add_hessian ) // // // // Create a linearized hessian factor -// JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values, ordering)); +// JacobianFactor::shared_ptr jf = boost::static_pointer_cast(betweenFactor.linearize(values)); // HessianFactor::shared_ptr hf(new HessianFactor(*jf)); -// LinearizedHessianFactor hessian(hf, ordering, values); +// LinearizedHessianFactor hessian(hf, values); // // // for(double x1 = -10; x1 < 10; x1 += 2.0) { @@ -294,8 +276,8 @@ TEST( LinearizedFactor, add_hessian ) // EXPECT_DOUBLES_EQUAL(expected_error, actual_error, 1e-9 ); // // // Check that the linearized factors are identical -// GaussianFactor::shared_ptr expected_factor = HessianFactor::shared_ptr(new HessianFactor(*betweenFactor.linearize(linpoint, ordering))); -// GaussianFactor::shared_ptr actual_factor = hessian.linearize(linpoint, ordering); +// GaussianFactor::shared_ptr expected_factor = HessianFactor::shared_ptr(new HessianFactor(*betweenFactor.linearize(linpoint))); +// GaussianFactor::shared_ptr actual_factor = hessian.linearize(linpoint); // CHECK(assert_equal(*expected_factor, *actual_factor)); // } // } diff --git a/gtsam_unstable/slam/BetweenFactorEM.h b/gtsam_unstable/slam/BetweenFactorEM.h index 9eb253f22..279ffa6a5 100644 --- a/gtsam_unstable/slam/BetweenFactorEM.h +++ b/gtsam_unstable/slam/BetweenFactorEM.h @@ -68,7 +68,7 @@ namespace gtsam { BetweenFactorEM(Key key1, Key key2, const VALUE& measured, const SharedGaussian& model_inlier, const SharedGaussian& model_outlier, const double prior_inlier, const double prior_outlier) : - Base(key1, key2), key1_(key1), key2_(key2), measured_(measured), + Base(cref_list_of<2>(key1)(key2)), key1_(key1), key2_(key2), measured_(measured), model_inlier_(model_inlier), model_outlier_(model_outlier), prior_inlier_(prior_inlier), prior_outlier_(prior_outlier){ } @@ -119,7 +119,7 @@ namespace gtsam { * Hence \f$ b = z - h(x) = - \mathtt{error\_vector}(x) \f$ */ /* This version of linearize recalculates the noise model each time */ - virtual boost::shared_ptr linearize(const gtsam::Values& x, const gtsam::Ordering& ordering) const { + virtual boost::shared_ptr linearize(const gtsam::Values& x) const { // Only linearize if the factor is active if (!this->active(x)) return boost::shared_ptr(); @@ -132,7 +132,7 @@ namespace gtsam { A2 = A[1]; return gtsam::GaussianFactor::shared_ptr( - new gtsam::JacobianFactor(ordering[key1_], A1, ordering[key2_], A2, b, gtsam::noiseModel::Unit::Create(b.size()))); + new gtsam::JacobianFactor(key1_, A1, key2_, A2, b, gtsam::noiseModel::Unit::Create(b.size()))); } @@ -165,17 +165,17 @@ namespace gtsam { Vector err_wh_eq; err_wh_eq.resize(err_wh_inlier.rows()*2); - err_wh_eq << std::sqrt(p_inlier) * err_wh_inlier.array() , std::sqrt(p_outlier) * err_wh_outlier.array(); + err_wh_eq << sqrt(p_inlier) * err_wh_inlier.array() , sqrt(p_outlier) * err_wh_outlier.array(); if (H){ // stack Jacobians for the two indicators for each of the key - Matrix H1_inlier = std::sqrt(p_inlier)*model_inlier_->Whiten(H1); - Matrix H1_outlier = std::sqrt(p_outlier)*model_outlier_->Whiten(H1); + Matrix H1_inlier = sqrt(p_inlier)*model_inlier_->Whiten(H1); + Matrix H1_outlier = sqrt(p_outlier)*model_outlier_->Whiten(H1); Matrix H1_aug = gtsam::stack(2, &H1_inlier, &H1_outlier); - Matrix H2_inlier = std::sqrt(p_inlier)*model_inlier_->Whiten(H2); - Matrix H2_outlier = std::sqrt(p_outlier)*model_outlier_->Whiten(H2); + Matrix H2_inlier = sqrt(p_inlier)*model_inlier_->Whiten(H2); + Matrix H2_outlier = sqrt(p_outlier)*model_outlier_->Whiten(H2); Matrix H2_aug = gtsam::stack(2, &H2_inlier, &H2_outlier); (*H)[0].resize(H1_aug.rows(),H1_aug.cols()); @@ -232,8 +232,8 @@ namespace gtsam { Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R(); Matrix invCov_outlier = model_outlier_->R().transpose() * model_outlier_->R(); - double p_inlier = prior_inlier_ * sqrt(invCov_inlier.norm()) * exp( -0.5 * err_wh_inlier.dot(err_wh_inlier) ); - double p_outlier = prior_outlier_ * sqrt(invCov_outlier.norm()) * exp( -0.5 * err_wh_outlier.dot(err_wh_outlier) ); + double p_inlier = prior_inlier_ * std::sqrt(invCov_inlier.norm()) * exp( -0.5 * err_wh_inlier.dot(err_wh_inlier) ); + double p_outlier = prior_outlier_ * std::sqrt(invCov_outlier.norm()) * exp( -0.5 * err_wh_outlier.dot(err_wh_outlier) ); double sumP = p_inlier + p_outlier; p_inlier /= sumP; diff --git a/gtsam_unstable/slam/CMakeLists.txt b/gtsam_unstable/slam/CMakeLists.txt index 27ade7a5e..b9580a85b 100644 --- a/gtsam_unstable/slam/CMakeLists.txt +++ b/gtsam_unstable/slam/CMakeLists.txt @@ -25,7 +25,7 @@ set (slam_full_libs # Exclude tests that don't work set (slam_excluded_tests - "${CMAKE_CURRENT_SOURCE_DIR}/tests/testSerialization.cpp" +# "${CMAKE_CURRENT_SOURCE_DIR}/tests/testSerialization.cpp" # "" # Add to this list, with full path, to exclude ) # Add all tests diff --git a/gtsam_unstable/slam/DummyFactor.cpp b/gtsam_unstable/slam/DummyFactor.cpp index 6f4b3b109..62ada7e41 100644 --- a/gtsam_unstable/slam/DummyFactor.cpp +++ b/gtsam_unstable/slam/DummyFactor.cpp @@ -7,13 +7,16 @@ #include +#include #include +using namespace boost::assign; + namespace gtsam { /* ************************************************************************* */ DummyFactor::DummyFactor(const Key& key1, size_t dim1, const Key& key2, size_t dim2) -: NonlinearFactor(key1, key2) +: NonlinearFactor(cref_list_of<2>(key1)(key2)) { dims_.push_back(dim1); dims_.push_back(dim2); @@ -38,7 +41,7 @@ bool DummyFactor::equals(const NonlinearFactor& f, double tol) const { /* ************************************************************************* */ boost::shared_ptr -DummyFactor::linearize(const Values& c, const Ordering& ordering) const { +DummyFactor::linearize(const Values& c) const { // Only linearize if the factor is active if (!this->active(c)) return boost::shared_ptr(); @@ -46,7 +49,7 @@ DummyFactor::linearize(const Values& c, const Ordering& ordering) const { // Fill in terms with zero matrices std::vector > terms(this->size()); for(size_t j=0; jsize(); ++j) { - terms[j].first = ordering[this->keys()[j]]; + terms[j].first = keys()[j]; terms[j].second = zeros(rowDim_, dims_[j]); } diff --git a/gtsam_unstable/slam/DummyFactor.h b/gtsam_unstable/slam/DummyFactor.h index d2ea2567f..0b0a1bd6c 100644 --- a/gtsam_unstable/slam/DummyFactor.h +++ b/gtsam_unstable/slam/DummyFactor.h @@ -54,8 +54,7 @@ public: virtual size_t dim() const { return rowDim_; } /** linearize to a GaussianFactor */ - virtual boost::shared_ptr - linearize(const Values& c, const Ordering& ordering) const; + virtual boost::shared_ptr linearize(const Values& c) const; /** * Creates a shared_ptr clone of the factor - needs to be specialized to allow diff --git a/gtsam_unstable/slam/SmartRangeFactor.h b/gtsam_unstable/slam/SmartRangeFactor.h index c2a43b019..e9fe1aa3e 100644 --- a/gtsam_unstable/slam/SmartRangeFactor.h +++ b/gtsam_unstable/slam/SmartRangeFactor.h @@ -11,8 +11,9 @@ #include #include -#include +#include #include +#include #include #include diff --git a/gtsam_unstable/slam/tests/testDummyFactor.cpp b/gtsam_unstable/slam/tests/testDummyFactor.cpp index c6a1c8bd3..86b4e5584 100644 --- a/gtsam_unstable/slam/tests/testDummyFactor.cpp +++ b/gtsam_unstable/slam/tests/testDummyFactor.cpp @@ -10,10 +10,11 @@ #include -#include - #include +#include +#include + using namespace gtsam; const double tol = 1e-9; @@ -41,15 +42,12 @@ TEST( testDummyFactor, basics ) { // errors - all zeros DOUBLES_EQUAL(0.0, dummyfactor.error(c), tol); - Ordering ordering; - ordering += key1, key2; - // linearization: all zeros - GaussianFactor::shared_ptr actLinearization = dummyfactor.linearize(c, ordering); + GaussianFactor::shared_ptr actLinearization = dummyfactor.linearize(c); CHECK(actLinearization); noiseModel::Diagonal::shared_ptr model3 = noiseModel::Unit::Create(3); GaussianFactor::shared_ptr expLinearization(new JacobianFactor( - ordering[key1], zeros(3,3), ordering[key2], zeros(3,3), zero(3), model3)); + key1, zeros(3,3), key2, zeros(3,3), zero(3), model3)); EXPECT(assert_equal(*expLinearization, *actLinearization, tol)); } diff --git a/gtsam_unstable/slam/tests/testInvDepthFactor3.cpp b/gtsam_unstable/slam/tests/testInvDepthFactor3.cpp index 9ab38bb37..21f2bc4dc 100644 --- a/gtsam_unstable/slam/tests/testInvDepthFactor3.cpp +++ b/gtsam_unstable/slam/tests/testInvDepthFactor3.cpp @@ -49,7 +49,7 @@ TEST( InvDepthFactor, optimize) { InverseDepthFactor::shared_ptr factor(new InverseDepthFactor(expected_uv, sigma, Symbol('x',1), Symbol('l',1), Symbol('d',1), K)); graph.push_back(factor); - graph.add(PoseConstraint(Symbol('x',1),level_pose)); + graph += PoseConstraint(Symbol('x',1),level_pose); initial.insert(Symbol('x',1), level_pose); initial.insert(Symbol('l',1), inv_landmark); initial.insert(Symbol('d',1), inv_depth); @@ -75,7 +75,7 @@ TEST( InvDepthFactor, optimize) { Symbol('x',2), Symbol('l',1),Symbol('d',1),K)); graph.push_back(factor1); - graph.add(PoseConstraint(Symbol('x',2),right_pose)); + graph += PoseConstraint(Symbol('x',2),right_pose); initial.insert(Symbol('x',2), right_pose); diff --git a/gtsam_unstable/slam/tests/testMultiProjectionFactor.cpp b/gtsam_unstable/slam/tests/testMultiProjectionFactor.cpp index 1cb338b4b..a0f692e9e 100644 --- a/gtsam_unstable/slam/tests/testMultiProjectionFactor.cpp +++ b/gtsam_unstable/slam/tests/testMultiProjectionFactor.cpp @@ -16,7 +16,6 @@ * @date Nov 2009 */ -#include #include #include #include @@ -25,11 +24,10 @@ #include #include #include -#include +#include #include #include -#include -#include +#include #include #include #include @@ -44,7 +42,7 @@ using namespace gtsam; // make a realistic calibration matrix static double fov = 60; // degrees -static size_t w=640,h=480; +static int w=640,h=480; static Cal3_S2::shared_ptr K(new Cal3_S2(fov,w,h)); // Create a noise model for the pixel error @@ -77,7 +75,7 @@ TEST( MultiProjectionFactor, create ){ views.insert(x3); MultiProjectionFactor mpFactor(n_measPixel, noiseProjection, views, l1, K); - graph.add(mpFactor); + graph += mpFactor; } diff --git a/gtsam_unstable/slam/tests/testSerialization.cpp b/gtsam_unstable/slam/tests/testSerialization.cpp index 616b5b6b7..4ade3b05f 100644 --- a/gtsam_unstable/slam/tests/testSerialization.cpp +++ b/gtsam_unstable/slam/tests/testSerialization.cpp @@ -8,6 +8,14 @@ */ #include +#include + +TEST(testSerialization, disabled) +{ + CHECK(("*** testSerialization in gtsam_unstable/slam is disabled *** - Needs conversion for unordered", 0)); +} + +#if 0 #include @@ -117,6 +125,8 @@ TEST( testSerialization, serialization_file ) { EXPECT(assert_equal(values, actValuesXML)); } +#endif + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } /* ************************************************************************* */ diff --git a/gtsam_unstable/slam/tests/testSmartProjectionFactor.cpp b/gtsam_unstable/slam/tests/testSmartProjectionFactor.cpp index cd24a9572..2e277ab76 100644 --- a/gtsam_unstable/slam/tests/testSmartProjectionFactor.cpp +++ b/gtsam_unstable/slam/tests/testSmartProjectionFactor.cpp @@ -16,7 +16,16 @@ * @date Nov 2009 */ -#include +#include +#include + +TEST(SmartProjectionFactor, disabled) +{ + CHECK(("*** testSmartProjectionFactor is disabled *** - Needs conversion for unordered", 0)); +} + +#if 0 + #include #include #include @@ -26,11 +35,8 @@ #include #include #include -#include #include #include -#include -#include #include #include #include @@ -39,10 +45,11 @@ #include #include #include -#include +#include using namespace std; +using namespace boost::assign; using namespace gtsam; // make a realistic calibration matrix @@ -369,8 +376,8 @@ TEST( SmartProjectionFactor, 3poses_smart_projection_factor ){ graph.push_back(smartFactor1); graph.push_back(smartFactor2); graph.push_back(smartFactor3); - graph.add(PriorFactor(x1, pose1, noisePrior)); - graph.add(PriorFactor(x2, pose2, noisePrior)); + graph.push_back(PriorFactor(x1, pose1, noisePrior)); + graph.push_back(PriorFactor(x2, pose2, noisePrior)); // Pose3 noise_pose = Pose3(Rot3::ypr(-M_PI/10, 0., -M_PI/10), gtsam::Point3(0.5,0.1,0.3)); // noise from regular projection factor test below Pose3 noise_pose = Pose3(Rot3::ypr(-M_PI/100, 0., -M_PI/100), gtsam::Point3(0.1,0.1,0.1)); // smaller noise @@ -531,6 +538,7 @@ TEST( SmartProjectionFactor, Hessian ){ } +#endif /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } diff --git a/gtsam_unstable/slam/tests/testSmartRangeFactor.cpp b/gtsam_unstable/slam/tests/testSmartRangeFactor.cpp index 8f485738b..7708caf66 100644 --- a/gtsam_unstable/slam/tests/testSmartRangeFactor.cpp +++ b/gtsam_unstable/slam/tests/testSmartRangeFactor.cpp @@ -113,11 +113,11 @@ TEST( SmartRangeFactor, optimization ) { // Create Factor graph NonlinearFactorGraph graph; - graph.add(f); + graph.push_back(f); const noiseModel::Base::shared_ptr // priorNoise = noiseModel::Diagonal::Sigmas(Vector3(1, 1, M_PI)); - graph.add(PriorFactor(1, pose1, priorNoise)); - graph.add(PriorFactor(2, pose2, priorNoise)); + graph.push_back(PriorFactor(1, pose1, priorNoise)); + graph.push_back(PriorFactor(2, pose2, priorNoise)); // Try optimizing LevenbergMarquardtParams params; diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index cc91ea4f3..e07e0a6b0 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -5,6 +5,7 @@ set(tests_local_libs linear discrete inference + symbolic geometry base ccolamd diff --git a/tests/smallExample.h b/tests/smallExample.h index eb87e84e2..26ed41a27 100644 --- a/tests/smallExample.h +++ b/tests/smallExample.h @@ -24,19 +24,20 @@ #include #include #include +#include +#include #include +#include namespace gtsam { namespace example { namespace { -typedef NonlinearFactorGraph Graph; - /** * Create small example for non-linear factor graph */ -boost::shared_ptr sharedNonlinearFactorGraph(); -Graph createNonlinearFactorGraph(); +boost::shared_ptr sharedNonlinearFactorGraph(); +NonlinearFactorGraph createNonlinearFactorGraph(); /** * Create values structure to go with it @@ -56,18 +57,18 @@ Values createNoisyValues(); /** * Zero delta config */ -VectorValues createZeroDelta(const Ordering& ordering); +VectorValues createZeroDelta(); /** * Delta config that, when added to noisyValues, returns the ground truth */ -VectorValues createCorrectDelta(const Ordering& ordering); +VectorValues createCorrectDelta(); /** * create a linear factor graph * The non-linear graph above evaluated at NoisyValues */ -GaussianFactorGraph createGaussianFactorGraph(const Ordering& ordering); +GaussianFactorGraph createGaussianFactorGraph(); /** * create small Chordal Bayes Net x <- y @@ -77,21 +78,21 @@ GaussianBayesNet createSmallGaussianBayesNet(); /** * Create really non-linear factor graph (cos/sin) */ -boost::shared_ptr +boost::shared_ptr sharedReallyNonlinearFactorGraph(); -Graph createReallyNonlinearFactorGraph(); +NonlinearFactorGraph createReallyNonlinearFactorGraph(); /** * Create a full nonlinear smoother * @param T number of time-steps */ -std::pair createNonlinearSmoother(int T); +std::pair createNonlinearSmoother(int T); /** * Create a Kalman smoother by linearizing a non-linear factor graph * @param T number of time-steps */ -std::pair, Ordering> createSmoother(int T, boost::optional ordering = boost::none); +GaussianFactorGraph createSmoother(int T); /* ******************************************************* */ // Linear Constrained Examples @@ -165,19 +166,19 @@ static SharedDiagonal sigma0_1 = noiseModel::Isotropic::Sigma(2,0.1); static SharedDiagonal sigma0_2 = noiseModel::Isotropic::Sigma(2,0.2); static SharedDiagonal constraintModel = noiseModel::Constrained::All(2); -static const Index _l1_=0, _x1_=1, _x2_=2; -static const Index _x_=0, _y_=1, _z_=2; +static const Key _l1_=0, _x1_=1, _x2_=2; +static const Key _x_=0, _y_=1, _z_=2; } // \namespace impl /* ************************************************************************* */ -boost::shared_ptr sharedNonlinearFactorGraph() { +boost::shared_ptr sharedNonlinearFactorGraph() { using namespace impl; using symbol_shorthand::X; using symbol_shorthand::L; // Create - boost::shared_ptr nlfg( - new Graph); + boost::shared_ptr nlfg( + new NonlinearFactorGraph); // prior on x1 Point2 mu; @@ -203,7 +204,7 @@ boost::shared_ptr sharedNonlinearFactorGraph() { } /* ************************************************************************* */ -Graph createNonlinearFactorGraph() { +NonlinearFactorGraph createNonlinearFactorGraph() { return *sharedNonlinearFactorGraph(); } @@ -221,10 +222,10 @@ Values createValues() { /* ************************************************************************* */ VectorValues createVectorValues() { using namespace impl; - VectorValues c(std::vector(3, 2)); - c[_l1_] = Vector_(2, 0.0, -1.0); - c[_x1_] = Vector_(2, 0.0, 0.0); - c[_x2_] = Vector_(2, 1.5, 0.0); + VectorValues c = boost::assign::pair_list_of + (_l1_, Vector_(2, 0.0, -1.0)) + (_x1_, Vector_(2, 0.0, 0.0)) + (_x2_, Vector_(2, 1.5, 0.0)); return c; } @@ -245,47 +246,45 @@ Values createNoisyValues() { } /* ************************************************************************* */ -VectorValues createCorrectDelta(const Ordering& ordering) { +VectorValues createCorrectDelta() { using symbol_shorthand::X; using symbol_shorthand::L; - VectorValues c(std::vector(3,2)); - c[ordering[L(1)]] = Vector_(2, -0.1, 0.1); - c[ordering[X(1)]] = Vector_(2, -0.1, -0.1); - c[ordering[X(2)]] = Vector_(2, 0.1, -0.2); + VectorValues c; + c.insert(L(1), Vector_(2, -0.1, 0.1)); + c.insert(X(1), Vector_(2, -0.1, -0.1)); + c.insert(X(2), Vector_(2, 0.1, -0.2)); return c; } /* ************************************************************************* */ -VectorValues createZeroDelta(const Ordering& ordering) { +VectorValues createZeroDelta() { using symbol_shorthand::X; using symbol_shorthand::L; - VectorValues c(std::vector(3,2)); - c[ordering[L(1)]] = zero(2); - c[ordering[X(1)]] = zero(2); - c[ordering[X(2)]] = zero(2); + VectorValues c; + c.insert(L(1), zero(2)); + c.insert(X(1), zero(2)); + c.insert(X(2), zero(2)); return c; } /* ************************************************************************* */ -GaussianFactorGraph createGaussianFactorGraph(const Ordering& ordering) { +GaussianFactorGraph createGaussianFactorGraph() { using symbol_shorthand::X; using symbol_shorthand::L; // Create empty graph GaussianFactorGraph fg; - SharedDiagonal unit2 = noiseModel::Unit::Create(2); - // linearized prior on x1: c[_x1_]+x1=0 i.e. x1=-c[_x1_] - fg.push_back(boost::make_shared(ordering[X(1)], 10*eye(2), -1.0*ones(2), unit2)); + fg += JacobianFactor(X(1), 10*eye(2), -1.0*ones(2)); // odometry between x1 and x2: x2-x1=[0.2;-0.1] - fg.push_back(boost::make_shared(ordering[X(1)], -10*eye(2),ordering[X(2)], 10*eye(2), Vector_(2, 2.0, -1.0), unit2)); + fg += JacobianFactor(X(1), -10*eye(2), X(2), 10*eye(2), Vector_(2, 2.0, -1.0)); // measurement between x1 and l1: l1-x1=[0.0;0.2] - fg.push_back(boost::make_shared(ordering[X(1)], -5*eye(2), ordering[L(1)], 5*eye(2), Vector_(2, 0.0, 1.0), unit2)); + fg += JacobianFactor(X(1), -5*eye(2), L(1), 5*eye(2), Vector_(2, 0.0, 1.0)); // measurement between x2 and l1: l1-x2=[-0.2;0.3] - fg.push_back(boost::make_shared(ordering[X(2)], -5*eye(2), ordering[L(1)], 5*eye(2), Vector_(2, -1.0, 1.5), unit2)); + fg += JacobianFactor(X(2), -5*eye(2), L(1), 5*eye(2), Vector_(2, -1.0, 1.5)); return fg; } @@ -303,12 +302,10 @@ GaussianBayesNet createSmallGaussianBayesNet() { Vector d1(1), d2(1); d1(0) = 9; d2(0) = 5; - Vector tau(1); - tau(0) = 1.0; // define nodes and specify in reverse topological sort (i.e. parents last) - GaussianConditional::shared_ptr Px_y(new GaussianConditional(_x_, d1, R11, _y_, S12, tau)); - GaussianConditional::shared_ptr Py(new GaussianConditional(_y_, d2, R22, tau)); + GaussianConditional::shared_ptr Px_y(new GaussianConditional(_x_, d1, R11, _y_, S12)); + GaussianConditional::shared_ptr Py(new GaussianConditional(_y_, d2, R22)); GaussianBayesNet cbn; cbn.push_back(Px_y); cbn.push_back(Py); @@ -349,10 +346,10 @@ struct UnaryFactor: public gtsam::NoiseModelFactor1 { } /* ************************************************************************* */ -boost::shared_ptr sharedReallyNonlinearFactorGraph() { +boost::shared_ptr sharedReallyNonlinearFactorGraph() { using symbol_shorthand::X; using symbol_shorthand::L; - boost::shared_ptr fg(new Graph); + boost::shared_ptr fg(new NonlinearFactorGraph); Vector z = Vector_(2, 1.0, 0.0); double sigma = 0.1; boost::shared_ptr factor( @@ -361,18 +358,18 @@ boost::shared_ptr sharedReallyNonlinearFactorGraph() { return fg; } -Graph createReallyNonlinearFactorGraph() { +NonlinearFactorGraph createReallyNonlinearFactorGraph() { return *sharedReallyNonlinearFactorGraph(); } /* ************************************************************************* */ -std::pair createNonlinearSmoother(int T) { +std::pair createNonlinearSmoother(int T) { using namespace impl; using symbol_shorthand::X; using symbol_shorthand::L; // Create - Graph nlfg; + NonlinearFactorGraph nlfg; Values poses; // prior on x1 @@ -400,13 +397,12 @@ std::pair createNonlinearSmoother(int T) { } /* ************************************************************************* */ -std::pair, Ordering> createSmoother(int T, boost::optional ordering) { - Graph nlfg; +GaussianFactorGraph createSmoother(int T) { + NonlinearFactorGraph nlfg; Values poses; boost::tie(nlfg, poses) = createNonlinearSmoother(T); - if(!ordering) ordering = *poses.orderingArbitrary(); - return std::make_pair(*nlfg.linearize(poses, *ordering), *ordering); + return *nlfg.linearize(poses); } /* ************************************************************************* */ @@ -443,10 +439,10 @@ VectorValues createSimpleConstraintValues() { using namespace impl; using symbol_shorthand::X; using symbol_shorthand::L; - VectorValues config(std::vector(2,2)); + VectorValues config; Vector v = Vector_(2, 1.0, -1.0); - config[_x_] = v; - config[_y_] = v; + config.insert(_x_, v); + config.insert(_y_, v); return config; } @@ -487,9 +483,9 @@ GaussianFactorGraph createSingleConstraintGraph() { /* ************************************************************************* */ VectorValues createSingleConstraintValues() { using namespace impl; - VectorValues config(std::vector(2,2)); - config[_x_] = Vector_(2, 1.0, -1.0); - config[_y_] = Vector_(2, 0.2, 0.1); + VectorValues config = boost::assign::pair_list_of + (_x_, Vector_(2, 1.0, -1.0)) + (_y_, Vector_(2, 0.2, 0.1)); return config; } @@ -551,17 +547,17 @@ GaussianFactorGraph createMultiConstraintGraph() { /* ************************************************************************* */ VectorValues createMultiConstraintValues() { using namespace impl; - VectorValues config(std::vector(3,2)); - config[_x_] = Vector_(2, -2.0, 2.0); - config[_y_] = Vector_(2, -0.1, 0.4); - config[_z_] = Vector_(2, -4.0, 5.0); + VectorValues config = boost::assign::pair_list_of + (_x_, Vector_(2, -2.0, 2.0)) + (_y_, Vector_(2, -0.1, 0.4)) + (_z_, Vector_(2, -4.0, 5.0)); return config; } /* ************************************************************************* */ // Create key for simulated planar graph namespace impl { -Symbol key(int x, int y) { +Symbol key(size_t x, size_t y) { using symbol_shorthand::X; return X(1000*x+y); } @@ -599,14 +595,13 @@ boost::tuple planarGraph(size_t N) { for (size_t x = 1; x <= N; x++) for (size_t y = 1; y <= N; y++) zeros.insert(key(x, y), Point2()); - Ordering ordering(planarOrdering(N)); - VectorValues xtrue(zeros.dims(ordering)); + VectorValues xtrue; for (size_t x = 1; x <= N; x++) for (size_t y = 1; y <= N; y++) - xtrue[ordering[key(x, y)]] = Point2(x,y).vector(); + xtrue.insert(key(x, y), Point2((double)x, (double)y).vector()); // linearize around zero - boost::shared_ptr gfg = nlfg.linearize(zeros, ordering); + boost::shared_ptr gfg = nlfg.linearize(zeros); return boost::make_tuple(*gfg, xtrue); } diff --git a/tests/testBoundingConstraint.cpp b/tests/testBoundingConstraint.cpp index 516b7e070..65803ecac 100644 --- a/tests/testBoundingConstraint.cpp +++ b/tests/testBoundingConstraint.cpp @@ -116,10 +116,8 @@ TEST( testBoundingConstraint, unary_linearization_inactive) { Point2 pt1(2.0, 3.0); Values config1; config1.insert(key, pt1); - Ordering ordering; - ordering += key; - GaussianFactor::shared_ptr actual1 = constraint1.linearize(config1, ordering); - GaussianFactor::shared_ptr actual2 = constraint2.linearize(config1, ordering); + GaussianFactor::shared_ptr actual1 = constraint1.linearize(config1); + GaussianFactor::shared_ptr actual2 = constraint2.linearize(config1); EXPECT(!actual1); EXPECT(!actual2); } @@ -129,12 +127,10 @@ TEST( testBoundingConstraint, unary_linearization_active) { Point2 pt2(-2.0, -3.0); Values config2; config2.insert(key, pt2); - Ordering ordering; - ordering += key; - GaussianFactor::shared_ptr actual1 = constraint1.linearize(config2, ordering); - GaussianFactor::shared_ptr actual2 = constraint2.linearize(config2, ordering); - JacobianFactor expected1(ordering[key], Matrix_(1, 2, 1.0, 0.0), repeat(1, 3.0), hard_model1); - JacobianFactor expected2(ordering[key], Matrix_(1, 2, 0.0, 1.0), repeat(1, 5.0), hard_model1); + GaussianFactor::shared_ptr actual1 = constraint1.linearize(config2); + GaussianFactor::shared_ptr actual2 = constraint2.linearize(config2); + JacobianFactor expected1(key, Matrix_(1, 2, 1.0, 0.0), repeat(1, 3.0), hard_model1); + JacobianFactor expected2(key, Matrix_(1, 2, 0.0, 1.0), repeat(1, 5.0), hard_model1); EXPECT(assert_equal((const GaussianFactor&)expected1, *actual1, tol)); EXPECT(assert_equal((const GaussianFactor&)expected2, *actual2, tol)); } @@ -148,9 +144,9 @@ TEST( testBoundingConstraint, unary_simple_optimization1) { NonlinearFactorGraph graph; Symbol x1('x',1); - graph.add(iq2D::PoseXInequality(x1, 1.0, true)); - graph.add(iq2D::PoseYInequality(x1, 2.0, true)); - graph.add(simulated2D::Prior(start_pt, soft_model2, x1)); + graph += iq2D::PoseXInequality(x1, 1.0, true); + graph += iq2D::PoseYInequality(x1, 2.0, true); + graph += simulated2D::Prior(start_pt, soft_model2, x1); Values initValues; initValues.insert(x1, start_pt); @@ -169,9 +165,9 @@ TEST( testBoundingConstraint, unary_simple_optimization2) { Point2 start_pt(2.0, 3.0); NonlinearFactorGraph graph; - graph.add(iq2D::PoseXInequality(key, 1.0, false)); - graph.add(iq2D::PoseYInequality(key, 2.0, false)); - graph.add(simulated2D::Prior(start_pt, soft_model2, key)); + graph += iq2D::PoseXInequality(key, 1.0, false); + graph += iq2D::PoseYInequality(key, 2.0, false); + graph += simulated2D::Prior(start_pt, soft_model2, key); Values initValues; initValues.insert(key, start_pt); @@ -199,16 +195,15 @@ TEST( testBoundingConstraint, MaxDistance_basics) { Values config1; config1.insert(key1, pt1); config1.insert(key2, pt1); - Ordering ordering; ordering += key1, key2; EXPECT(!rangeBound.active(config1)); EXPECT(assert_equal(zero(1), rangeBound.unwhitenedError(config1))); - EXPECT(!rangeBound.linearize(config1, ordering)); + EXPECT(!rangeBound.linearize(config1)); EXPECT_DOUBLES_EQUAL(0.0, rangeBound.error(config1), tol); config1.update(key2, pt2); EXPECT(!rangeBound.active(config1)); EXPECT(assert_equal(zero(1), rangeBound.unwhitenedError(config1))); - EXPECT(!rangeBound.linearize(config1, ordering)); + EXPECT(!rangeBound.linearize(config1)); EXPECT_DOUBLES_EQUAL(0.0, rangeBound.error(config1), tol); config1.update(key2, pt3); @@ -229,9 +224,9 @@ TEST( testBoundingConstraint, MaxDistance_simple_optimization) { Symbol x1('x',1), x2('x',2); NonlinearFactorGraph graph; - graph.add(simulated2D::equality_constraints::UnaryEqualityConstraint(pt1, x1)); - graph.add(simulated2D::Prior(pt2_init, soft_model2_alt, x2)); - graph.add(iq2D::PoseMaxDistConstraint(x1, x2, 2.0)); + graph += simulated2D::equality_constraints::UnaryEqualityConstraint(pt1, x1); + graph += simulated2D::Prior(pt2_init, soft_model2_alt, x2); + graph += iq2D::PoseMaxDistConstraint(x1, x2, 2.0); Values initial_state; initial_state.insert(x1, pt1); @@ -255,12 +250,12 @@ TEST( testBoundingConstraint, avoid_demo) { Point2 odo(2.0, 0.0); NonlinearFactorGraph graph; - graph.add(simulated2D::equality_constraints::UnaryEqualityConstraint(x1_pt, x1)); - graph.add(simulated2D::Odometry(odo, soft_model2_alt, x1, x2)); - graph.add(iq2D::LandmarkAvoid(x2, l1, radius)); - graph.add(simulated2D::equality_constraints::UnaryEqualityPointConstraint(l1_pt, l1)); - graph.add(simulated2D::Odometry(odo, soft_model2_alt, x2, x3)); - graph.add(simulated2D::equality_constraints::UnaryEqualityConstraint(x3_pt, x3)); + graph += simulated2D::equality_constraints::UnaryEqualityConstraint(x1_pt, x1); + graph += simulated2D::Odometry(odo, soft_model2_alt, x1, x2); + graph += iq2D::LandmarkAvoid(x2, l1, radius); + graph += simulated2D::equality_constraints::UnaryEqualityPointConstraint(l1_pt, l1); + graph += simulated2D::Odometry(odo, soft_model2_alt, x2, x3); + graph += simulated2D::equality_constraints::UnaryEqualityConstraint(x3_pt, x3); Values init, expected; init.insert(x1, x1_pt); diff --git a/tests/testDoglegOptimizer.cpp b/tests/testDoglegOptimizer.cpp index 008ea443a..f55bd1b49 100644 --- a/tests/testDoglegOptimizer.cpp +++ b/tests/testDoglegOptimizer.cpp @@ -15,17 +15,15 @@ * @author Richard Roberts */ +#include + #include #include #include #include -#include #include -#include #include -#include - #ifdef __GNUC__ #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wunused-variable" @@ -45,282 +43,6 @@ using namespace gtsam; using symbol_shorthand::X; using symbol_shorthand::L; -/* ************************************************************************* */ -double computeError(const GaussianBayesNet& gbn, const LieVector& values) { - - // Convert Vector to VectorValues - VectorValues vv = *allocateVectorValues(gbn); - internal::writeVectorValuesSlices(values, vv, - boost::make_counting_iterator(size_t(0)), boost::make_counting_iterator(vv.size())); - - // Convert to factor graph - GaussianFactorGraph gfg(gbn); - return gfg.error(vv); -} - -/* ************************************************************************* */ -double computeErrorBt(const BayesTree& gbt, const LieVector& values) { - - // Convert Vector to VectorValues - VectorValues vv = *allocateVectorValues(gbt); - internal::writeVectorValuesSlices(values, vv, - boost::make_counting_iterator(size_t(0)), boost::make_counting_iterator(vv.size())); - - // Convert to factor graph - GaussianFactorGraph gfg(gbt); - return gfg.error(vv); -} - -/* ************************************************************************* */ -TEST(DoglegOptimizer, ComputeSteepestDescentPoint) { - - // Create an arbitrary Bayes Net - GaussianBayesNet gbn; - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 0, Vector_(2, 1.0,2.0), Matrix_(2,2, 3.0,4.0,0.0,6.0), - 3, Matrix_(2,2, 7.0,8.0,9.0,10.0), - 4, Matrix_(2,2, 11.0,12.0,13.0,14.0), ones(2))); - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 1, Vector_(2, 15.0,16.0), Matrix_(2,2, 17.0,18.0,0.0,20.0), - 2, Matrix_(2,2, 21.0,22.0,23.0,24.0), - 4, Matrix_(2,2, 25.0,26.0,27.0,28.0), ones(2))); - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 2, Vector_(2, 29.0,30.0), Matrix_(2,2, 31.0,32.0,0.0,34.0), - 3, Matrix_(2,2, 35.0,36.0,37.0,38.0), ones(2))); - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 3, Vector_(2, 39.0,40.0), Matrix_(2,2, 41.0,42.0,0.0,44.0), - 4, Matrix_(2,2, 45.0,46.0,47.0,48.0), ones(2))); - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 4, Vector_(2, 49.0,50.0), Matrix_(2,2, 51.0,52.0,0.0,54.0), ones(2))); - - // Compute the Hessian numerically - Matrix hessian = numericalHessian( - boost::function(boost::bind(&computeError, gbn, _1)), - LieVector(VectorValues::Zero(*allocateVectorValues(gbn)).asVector())); - - // Compute the gradient numerically - VectorValues gradientValues = *allocateVectorValues(gbn); - Vector gradient = numericalGradient( - boost::function(boost::bind(&computeError, gbn, _1)), - LieVector(VectorValues::Zero(gradientValues).asVector())); - internal::writeVectorValuesSlices(gradient, gradientValues, - boost::make_counting_iterator(size_t(0)), boost::make_counting_iterator(gradientValues.size())); - - // Compute the gradient using dense matrices - Matrix augmentedHessian = GaussianFactorGraph(gbn).augmentedHessian(); - LONGS_EQUAL(11, augmentedHessian.cols()); - VectorValues denseMatrixGradient = *allocateVectorValues(gbn); - internal::writeVectorValuesSlices(-augmentedHessian.col(10).segment(0,10), denseMatrixGradient, - boost::make_counting_iterator(size_t(0)), boost::make_counting_iterator(gradientValues.size())); - EXPECT(assert_equal(gradientValues, denseMatrixGradient, 1e-5)); - - // Compute the steepest descent point - double step = -gradient.squaredNorm() / (gradient.transpose() * hessian * gradient)(0); - VectorValues expected = gradientValues; scal(step, expected); - - // Compute the steepest descent point with the dogleg function - VectorValues actual = optimizeGradientSearch(gbn); - - // Check that points agree - EXPECT(assert_equal(expected, actual, 1e-5)); - - // Check that point causes a decrease in error - double origError = GaussianFactorGraph(gbn).error(VectorValues::Zero(*allocateVectorValues(gbn))); - double newError = GaussianFactorGraph(gbn).error(actual); - EXPECT(newError < origError); -} - -/* ************************************************************************* */ -TEST(DoglegOptimizer, BT_BN_equivalency) { - - // Create an arbitrary Bayes Tree - BayesTree bt; - bt.insert(BayesTree::sharedClique(new BayesTree::Clique( - GaussianConditional::shared_ptr(new GaussianConditional( - boost::assign::pair_list_of - (2, Matrix_(6,2, - 31.0,32.0, - 0.0,34.0, - 0.0,0.0, - 0.0,0.0, - 0.0,0.0, - 0.0,0.0)) - (3, Matrix_(6,2, - 35.0,36.0, - 37.0,38.0, - 41.0,42.0, - 0.0,44.0, - 0.0,0.0, - 0.0,0.0)) - (4, Matrix_(6,2, - 0.0,0.0, - 0.0,0.0, - 45.0,46.0, - 47.0,48.0, - 51.0,52.0, - 0.0,54.0)), - 3, Vector_(6, 29.0,30.0,39.0,40.0,49.0,50.0), ones(6)))))); - bt.insert(BayesTree::sharedClique(new BayesTree::Clique( - GaussianConditional::shared_ptr(new GaussianConditional( - boost::assign::pair_list_of - (0, Matrix_(4,2, - 3.0,4.0, - 0.0,6.0, - 0.0,0.0, - 0.0,0.0)) - (1, Matrix_(4,2, - 0.0,0.0, - 0.0,0.0, - 17.0,18.0, - 0.0,20.0)) - (2, Matrix_(4,2, - 0.0,0.0, - 0.0,0.0, - 21.0,22.0, - 23.0,24.0)) - (3, Matrix_(4,2, - 7.0,8.0, - 9.0,10.0, - 0.0,0.0, - 0.0,0.0)) - (4, Matrix_(4,2, - 11.0,12.0, - 13.0,14.0, - 25.0,26.0, - 27.0,28.0)), - 2, Vector_(4, 1.0,2.0,15.0,16.0), ones(4)))))); - - // Create an arbitrary Bayes Net - GaussianBayesNet gbn; - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 0, Vector_(2, 1.0,2.0), Matrix_(2,2, 3.0,4.0,0.0,6.0), - 3, Matrix_(2,2, 7.0,8.0,9.0,10.0), - 4, Matrix_(2,2, 11.0,12.0,13.0,14.0), ones(2))); - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 1, Vector_(2, 15.0,16.0), Matrix_(2,2, 17.0,18.0,0.0,20.0), - 2, Matrix_(2,2, 21.0,22.0,23.0,24.0), - 4, Matrix_(2,2, 25.0,26.0,27.0,28.0), ones(2))); - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 2, Vector_(2, 29.0,30.0), Matrix_(2,2, 31.0,32.0,0.0,34.0), - 3, Matrix_(2,2, 35.0,36.0,37.0,38.0), ones(2))); - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 3, Vector_(2, 39.0,40.0), Matrix_(2,2, 41.0,42.0,0.0,44.0), - 4, Matrix_(2,2, 45.0,46.0,47.0,48.0), ones(2))); - gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 4, Vector_(2, 49.0,50.0), Matrix_(2,2, 51.0,52.0,0.0,54.0), ones(2))); - - GaussianFactorGraph expected(gbn); - GaussianFactorGraph actual(bt); - - EXPECT(assert_equal(expected.augmentedHessian(), actual.augmentedHessian())); -} - -/* ************************************************************************* */ -TEST(DoglegOptimizer, ComputeSteepestDescentPointBT) { - - // Create an arbitrary Bayes Tree - BayesTree bt; - bt.insert(BayesTree::sharedClique(new BayesTree::Clique( - GaussianConditional::shared_ptr(new GaussianConditional( - boost::assign::pair_list_of - (2, Matrix_(6,2, - 31.0,32.0, - 0.0,34.0, - 0.0,0.0, - 0.0,0.0, - 0.0,0.0, - 0.0,0.0)) - (3, Matrix_(6,2, - 35.0,36.0, - 37.0,38.0, - 41.0,42.0, - 0.0,44.0, - 0.0,0.0, - 0.0,0.0)) - (4, Matrix_(6,2, - 0.0,0.0, - 0.0,0.0, - 45.0,46.0, - 47.0,48.0, - 51.0,52.0, - 0.0,54.0)), - 3, Vector_(6, 29.0,30.0,39.0,40.0,49.0,50.0), ones(6)))))); - bt.insert(BayesTree::sharedClique(new BayesTree::Clique( - GaussianConditional::shared_ptr(new GaussianConditional( - boost::assign::pair_list_of - (0, Matrix_(4,2, - 3.0,4.0, - 0.0,6.0, - 0.0,0.0, - 0.0,0.0)) - (1, Matrix_(4,2, - 0.0,0.0, - 0.0,0.0, - 17.0,18.0, - 0.0,20.0)) - (2, Matrix_(4,2, - 0.0,0.0, - 0.0,0.0, - 21.0,22.0, - 23.0,24.0)) - (3, Matrix_(4,2, - 7.0,8.0, - 9.0,10.0, - 0.0,0.0, - 0.0,0.0)) - (4, Matrix_(4,2, - 11.0,12.0, - 13.0,14.0, - 25.0,26.0, - 27.0,28.0)), - 2, Vector_(4, 1.0,2.0,15.0,16.0), ones(4)))))); - - // Compute the Hessian numerically - Matrix hessian = numericalHessian( - boost::function(boost::bind(&computeErrorBt, bt, _1)), - LieVector(VectorValues::Zero(*allocateVectorValues(bt)).asVector())); - - // Compute the gradient numerically - VectorValues gradientValues = *allocateVectorValues(bt); - Vector gradient = numericalGradient( - boost::function(boost::bind(&computeErrorBt, bt, _1)), - LieVector(VectorValues::Zero(gradientValues).asVector())); - internal::writeVectorValuesSlices(gradient, gradientValues, - boost::make_counting_iterator(size_t(0)), boost::make_counting_iterator(gradientValues.size())); - - // Compute the gradient using dense matrices - Matrix augmentedHessian = GaussianFactorGraph(bt).augmentedHessian(); - LONGS_EQUAL(11, augmentedHessian.cols()); - VectorValues denseMatrixGradient = *allocateVectorValues(bt); - internal::writeVectorValuesSlices(-augmentedHessian.col(10).segment(0,10), denseMatrixGradient, - boost::make_counting_iterator(size_t(0)), boost::make_counting_iterator(gradientValues.size())); - EXPECT(assert_equal(gradientValues, denseMatrixGradient, 1e-5)); - - // Compute the steepest descent point - double step = -gradient.squaredNorm() / (gradient.transpose() * hessian * gradient)(0); - VectorValues expected = gradientValues; scal(step, expected); - - // Known steepest descent point from Bayes' net version - VectorValues expectedFromBN(5,2); - expectedFromBN[0] = Vector_(2, 0.000129034, 0.000688183); - expectedFromBN[1] = Vector_(2, 0.0109679, 0.0253767); - expectedFromBN[2] = Vector_(2, 0.0680441, 0.114496); - expectedFromBN[3] = Vector_(2, 0.16125, 0.241294); - expectedFromBN[4] = Vector_(2, 0.300134, 0.423233); - - // Compute the steepest descent point with the dogleg function - VectorValues actual = optimizeGradientSearch(bt); - - // Check that points agree - EXPECT(assert_equal(expected, actual, 1e-5)); - EXPECT(assert_equal(expectedFromBN, actual, 1e-5)); - - // Check that point causes a decrease in error - double origError = GaussianFactorGraph(bt).error(VectorValues::Zero(*allocateVectorValues(bt))); - double newError = GaussianFactorGraph(bt).error(actual); - EXPECT(newError < origError); -} - /* ************************************************************************* */ TEST(DoglegOptimizer, ComputeBlend) { // Create an arbitrary Bayes Net @@ -328,33 +50,33 @@ TEST(DoglegOptimizer, ComputeBlend) { gbn += GaussianConditional::shared_ptr(new GaussianConditional( 0, Vector_(2, 1.0,2.0), Matrix_(2,2, 3.0,4.0,0.0,6.0), 3, Matrix_(2,2, 7.0,8.0,9.0,10.0), - 4, Matrix_(2,2, 11.0,12.0,13.0,14.0), ones(2))); + 4, Matrix_(2,2, 11.0,12.0,13.0,14.0))); gbn += GaussianConditional::shared_ptr(new GaussianConditional( 1, Vector_(2, 15.0,16.0), Matrix_(2,2, 17.0,18.0,0.0,20.0), 2, Matrix_(2,2, 21.0,22.0,23.0,24.0), - 4, Matrix_(2,2, 25.0,26.0,27.0,28.0), ones(2))); + 4, Matrix_(2,2, 25.0,26.0,27.0,28.0))); gbn += GaussianConditional::shared_ptr(new GaussianConditional( 2, Vector_(2, 29.0,30.0), Matrix_(2,2, 31.0,32.0,0.0,34.0), - 3, Matrix_(2,2, 35.0,36.0,37.0,38.0), ones(2))); + 3, Matrix_(2,2, 35.0,36.0,37.0,38.0))); gbn += GaussianConditional::shared_ptr(new GaussianConditional( 3, Vector_(2, 39.0,40.0), Matrix_(2,2, 41.0,42.0,0.0,44.0), - 4, Matrix_(2,2, 45.0,46.0,47.0,48.0), ones(2))); + 4, Matrix_(2,2, 45.0,46.0,47.0,48.0))); gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 4, Vector_(2, 49.0,50.0), Matrix_(2,2, 51.0,52.0,0.0,54.0), ones(2))); + 4, Vector_(2, 49.0,50.0), Matrix_(2,2, 51.0,52.0,0.0,54.0))); // Compute steepest descent point - VectorValues xu = optimizeGradientSearch(gbn); + VectorValues xu = gbn.optimizeGradientSearch(); // Compute Newton's method point - VectorValues xn = optimize(gbn); + VectorValues xn = gbn.optimize(); // The Newton's method point should be more "adventurous", i.e. larger, than the steepest descent point - EXPECT(xu.asVector().norm() < xn.asVector().norm()); + EXPECT(xu.vector().norm() < xn.vector().norm()); // Compute blend double Delta = 1.5; VectorValues xb = DoglegOptimizerImpl::ComputeBlend(Delta, xu, xn); - DOUBLES_EQUAL(Delta, xb.asVector().norm(), 1e-10); + DOUBLES_EQUAL(Delta, xb.vector().norm(), 1e-10); } /* ************************************************************************* */ @@ -364,68 +86,64 @@ TEST(DoglegOptimizer, ComputeDoglegPoint) { gbn += GaussianConditional::shared_ptr(new GaussianConditional( 0, Vector_(2, 1.0,2.0), Matrix_(2,2, 3.0,4.0,0.0,6.0), 3, Matrix_(2,2, 7.0,8.0,9.0,10.0), - 4, Matrix_(2,2, 11.0,12.0,13.0,14.0), ones(2))); + 4, Matrix_(2,2, 11.0,12.0,13.0,14.0))); gbn += GaussianConditional::shared_ptr(new GaussianConditional( 1, Vector_(2, 15.0,16.0), Matrix_(2,2, 17.0,18.0,0.0,20.0), 2, Matrix_(2,2, 21.0,22.0,23.0,24.0), - 4, Matrix_(2,2, 25.0,26.0,27.0,28.0), ones(2))); + 4, Matrix_(2,2, 25.0,26.0,27.0,28.0))); gbn += GaussianConditional::shared_ptr(new GaussianConditional( 2, Vector_(2, 29.0,30.0), Matrix_(2,2, 31.0,32.0,0.0,34.0), - 3, Matrix_(2,2, 35.0,36.0,37.0,38.0), ones(2))); + 3, Matrix_(2,2, 35.0,36.0,37.0,38.0))); gbn += GaussianConditional::shared_ptr(new GaussianConditional( 3, Vector_(2, 39.0,40.0), Matrix_(2,2, 41.0,42.0,0.0,44.0), - 4, Matrix_(2,2, 45.0,46.0,47.0,48.0), ones(2))); + 4, Matrix_(2,2, 45.0,46.0,47.0,48.0))); gbn += GaussianConditional::shared_ptr(new GaussianConditional( - 4, Vector_(2, 49.0,50.0), Matrix_(2,2, 51.0,52.0,0.0,54.0), ones(2))); + 4, Vector_(2, 49.0,50.0), Matrix_(2,2, 51.0,52.0,0.0,54.0))); // Compute dogleg point for different deltas double Delta1 = 0.5; // Less than steepest descent - VectorValues actual1 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta1, optimizeGradientSearch(gbn), optimize(gbn)); - DOUBLES_EQUAL(Delta1, actual1.asVector().norm(), 1e-5); + VectorValues actual1 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta1, gbn.optimizeGradientSearch(), gbn.optimize()); + DOUBLES_EQUAL(Delta1, actual1.vector().norm(), 1e-5); double Delta2 = 1.5; // Between steepest descent and Newton's method - VectorValues expected2 = DoglegOptimizerImpl::ComputeBlend(Delta2, optimizeGradientSearch(gbn), optimize(gbn)); - VectorValues actual2 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta2, optimizeGradientSearch(gbn), optimize(gbn)); - DOUBLES_EQUAL(Delta2, actual2.asVector().norm(), 1e-5); + VectorValues expected2 = DoglegOptimizerImpl::ComputeBlend(Delta2, gbn.optimizeGradientSearch(), gbn.optimize()); + VectorValues actual2 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta2, gbn.optimizeGradientSearch(), gbn.optimize()); + DOUBLES_EQUAL(Delta2, actual2.vector().norm(), 1e-5); EXPECT(assert_equal(expected2, actual2)); double Delta3 = 5.0; // Larger than Newton's method point - VectorValues expected3 = optimize(gbn); - VectorValues actual3 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta3, optimizeGradientSearch(gbn), optimize(gbn)); + VectorValues expected3 = gbn.optimize(); + VectorValues actual3 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta3, gbn.optimizeGradientSearch(), gbn.optimize()); EXPECT(assert_equal(expected3, actual3)); } /* ************************************************************************* */ TEST(DoglegOptimizer, Iterate) { // really non-linear factor graph - boost::shared_ptr fg(new example::Graph( - example::createReallyNonlinearFactorGraph())); + NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); // config far from minimum Point2 x0(3,0); - boost::shared_ptr config(new Values); - config->insert(X(1), x0); - - // ordering - boost::shared_ptr ord(new Ordering()); - ord->push_back(X(1)); + Values config; + config.insert(X(1), x0); double Delta = 1.0; for(size_t it=0; it<10; ++it) { - GaussianSequentialSolver solver(*fg->linearize(*config, *ord)); - GaussianBayesNet gbn = *solver.eliminate(); + GaussianBayesNet gbn = *fg.linearize(config)->eliminateSequential(); // Iterate assumes that linear error = nonlinear error at the linearization point, and this should be true - double nonlinearError = fg->error(*config); - double linearError = GaussianFactorGraph(gbn).error(VectorValues::Zero(*allocateVectorValues(gbn))); + double nonlinearError = fg.error(config); + double linearError = GaussianFactorGraph(gbn).error(config.zeroVectors()); DOUBLES_EQUAL(nonlinearError, linearError, 1e-5); // cout << "it " << it << ", Delta = " << Delta << ", error = " << fg->error(*config) << endl; - DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(Delta, DoglegOptimizerImpl::SEARCH_EACH_ITERATION, gbn, *fg, *config, *ord, fg->error(*config)); + VectorValues dx_u = gbn.optimizeGradientSearch(); + VectorValues dx_n = gbn.optimize(); + DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(Delta, DoglegOptimizerImpl::SEARCH_EACH_ITERATION, dx_u, dx_n, gbn, fg, config, fg.error(config)); Delta = result.Delta; - EXPECT(result.f_error < fg->error(*config)); // Check that error decreases - Values newConfig(config->retract(result.dx_d, *ord)); - (*config) = newConfig; - DOUBLES_EQUAL(fg->error(*config), result.f_error, 1e-5); // Check that error is correctly filled in + EXPECT(result.f_error < fg.error(config)); // Check that error decreases + Values newConfig(config.retract(result.dx_d)); + config = newConfig; + DOUBLES_EQUAL(fg.error(config), result.f_error, 1e-5); // Check that error is correctly filled in } } diff --git a/tests/testExtendedKalmanFilter.cpp b/tests/testExtendedKalmanFilter.cpp index 6bdfa7eee..ac3705cab 100644 --- a/tests/testExtendedKalmanFilter.cpp +++ b/tests/testExtendedKalmanFilter.cpp @@ -32,15 +32,15 @@ using symbol_shorthand::L; /* ************************************************************************* */ TEST( ExtendedKalmanFilter, linear ) { + // Create the TestKeys for our example + Symbol x0('x',0), x1('x',1), x2('x',2), x3('x',3); + // Create the Kalman Filter initialization point Point2 x_initial(0.0, 0.0); SharedDiagonal P_initial = noiseModel::Diagonal::Sigmas(Vector_(2, 0.1, 0.1)); // Create an ExtendedKalmanFilter object - ExtendedKalmanFilter ekf(x_initial, P_initial); - - // Create the TestKeys for our example - Symbol x0('x',0), x1('x',1), x2('x',2), x3('x',3); + ExtendedKalmanFilter ekf(x0, x_initial, P_initial); // Create the controls and measurement properties for our example double dt = 1.0; @@ -256,7 +256,7 @@ public: NonlinearMeasurementModel(){} NonlinearMeasurementModel(const Symbol& TestKey, Vector z) : - Base(noiseModel::Diagonal::Sigmas(Vector_(2, 1.0, 1.0)), TestKey), z_(z), R_(1,1) { + Base(noiseModel::Unit::Create(z.size()), TestKey), z_(z), R_(1,1) { // Initialize nonlinear measurement model parameters: // z(t) = H*x(t) + v @@ -408,7 +408,7 @@ TEST( ExtendedKalmanFilter, nonlinear ) { SharedDiagonal P_initial = noiseModel::Diagonal::Sigmas(Vector_(2, 0.1, 0.1)); // Create an ExtendedKalmanFilter object - ExtendedKalmanFilter ekf(x_initial, P_initial); + ExtendedKalmanFilter ekf(X(0), x_initial, P_initial); // Enter Predict-Update Loop Point2 x_predict, x_update; diff --git a/tests/testGaussianBayesNet.cpp b/tests/testGaussianBayesNet.cpp deleted file mode 100644 index 1cef17341..000000000 --- a/tests/testGaussianBayesNet.cpp +++ /dev/null @@ -1,189 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testGaussianBayesNet.cpp - * @brief Unit tests for GaussianBayesNet - * @author Frank Dellaert - */ - -// STL/C++ -#include -#include -#include -#include -#include - -#include // for operator += -using namespace boost::assign; - -#include -#include -#include -#include -#include - -using namespace std; -using namespace gtsam; -using namespace example; - -static const Index _x_=0, _y_=1, _z_=2; - -/* ************************************************************************* */ -TEST( GaussianBayesNet, constructor ) -{ - // small Bayes Net x <- y - // x y d - // 1 1 9 - // 1 5 - Matrix R11 = Matrix_(1,1,1.0), S12 = Matrix_(1,1,1.0); - Matrix R22 = Matrix_(1,1,1.0); - Vector d1(1), d2(1); - d1(0) = 9; d2(0) = 5; - Vector sigmas(1); - sigmas(0) = 1.; - - // define nodes and specify in reverse topological sort (i.e. parents last) - GaussianConditional x(_x_,d1,R11,_y_,S12, sigmas), y(_y_,d2,R22, sigmas); - - // check small example which uses constructor - GaussianBayesNet cbn = createSmallGaussianBayesNet(); - EXPECT( x.equals(*cbn[_x_]) ); - EXPECT( y.equals(*cbn[_y_]) ); -} - -/* ************************************************************************* */ -TEST( GaussianBayesNet, matrix ) -{ - // Create a test graph - GaussianBayesNet cbn = createSmallGaussianBayesNet(); - - Matrix R; Vector d; - boost::tie(R,d) = matrix(cbn); // find matrix and RHS - - Matrix R1 = Matrix_(2,2, - 1.0, 1.0, - 0.0, 1.0 - ); - Vector d1 = Vector_(2, 9.0, 5.0); - - EXPECT(assert_equal(R,R1)); - EXPECT(assert_equal(d,d1)); -} - -/* ************************************************************************* */ -TEST( GaussianBayesNet, optimize ) -{ - GaussianBayesNet cbn = createSmallGaussianBayesNet(); - VectorValues actual = optimize(cbn); - - VectorValues expected(vector(2,1)); - expected[_x_] = Vector_(1,4.); - expected[_y_] = Vector_(1,5.); - - EXPECT(assert_equal(expected,actual)); -} - -/* ************************************************************************* */ -TEST( GaussianBayesNet, optimize2 ) -{ - - // Create empty graph - GaussianFactorGraph fg; - SharedDiagonal noise = noiseModel::Unit::Create(1); - - fg.add(_y_, eye(1), 2*ones(1), noise); - - fg.add(_x_, eye(1),_y_, -eye(1), -ones(1), noise); - - fg.add(_y_, eye(1),_z_, -eye(1), -ones(1), noise); - - fg.add(_x_, -eye(1), _z_, eye(1), 2*ones(1), noise); - - VectorValues actual = *GaussianSequentialSolver(fg).optimize(); - - VectorValues expected(vector(3,1)); - expected[_x_] = Vector_(1,1.); - expected[_y_] = Vector_(1,2.); - expected[_z_] = Vector_(1,3.); - - EXPECT(assert_equal(expected,actual)); -} - -/* ************************************************************************* */ -TEST( GaussianBayesNet, optimize3 ) -{ - // y=R*x, x=inv(R)*y - // 9 = 1 1 4 - // 5 1 5 - // NOTE: we are supplying a new RHS here - GaussianBayesNet cbn = createSmallGaussianBayesNet(); - - VectorValues expected(vector(2,1)), x(vector(2,1)); - expected[_x_] = Vector_(1, 4.); - expected[_y_] = Vector_(1, 5.); - - // test functional version - VectorValues actual = optimize(cbn); - EXPECT(assert_equal(expected,actual)); - - // test imperative version - optimizeInPlace(cbn,x); - EXPECT(assert_equal(expected,x)); -} - -/* ************************************************************************* */ -TEST( GaussianBayesNet, backSubstituteTranspose ) -{ - // x=R'*y, y=inv(R')*x - // 2 = 1 2 - // 5 1 1 3 - GaussianBayesNet cbn = createSmallGaussianBayesNet(); - - VectorValues y(vector(2,1)), x(vector(2,1)); - x[_x_] = Vector_(1,2.); - x[_y_] = Vector_(1,5.); - y[_x_] = Vector_(1,2.); - y[_y_] = Vector_(1,3.); - - // test functional version - VectorValues actual = backSubstituteTranspose(cbn,x); - EXPECT(assert_equal(y,actual)); -} - -/* ************************************************************************* */ -// Tests computing Determinant -TEST( GaussianBayesNet, DeterminantTest ) -{ - GaussianBayesNet cbn; - cbn += boost::shared_ptr(new GaussianConditional( - 0, Vector_( 2, 3.0, 4.0 ), Matrix_(2, 2, 1.0, 3.0, 0.0, 4.0 ), - 1, Matrix_(2, 2, 2.0, 1.0, 2.0, 3.0), - ones(2))); - - cbn += boost::shared_ptr(new GaussianConditional( - 1, Vector_( 2, 5.0, 6.0 ), Matrix_(2, 2, 1.0, 1.0, 0.0, 3.0 ), - 2, Matrix_(2, 2, 1.0, 0.0, 5.0, 2.0), - ones(2))); - - cbn += boost::shared_ptr(new GaussianConditional( - 3, Vector_( 2, 7.0, 8.0 ), Matrix_(2, 2, 1.0, 1.0, 0.0, 5.0 ), - ones(2))); - - double expectedDeterminant = 60; - double actualDeterminant = determinant(cbn); - - EXPECT_DOUBLES_EQUAL( expectedDeterminant, actualDeterminant, 1e-9); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr);} -/* ************************************************************************* */ diff --git a/tests/testGaussianBayesTree.cpp b/tests/testGaussianBayesTree.cpp index 85ea792fe..653ca0809 100644 --- a/tests/testGaussianBayesTree.cpp +++ b/tests/testGaussianBayesTree.cpp @@ -16,10 +16,12 @@ */ #include -#include #include -#include -#include +#include +#include +#include +#include +#include #include #include @@ -55,42 +57,40 @@ C6 x1 : x2 TEST( GaussianBayesTree, linear_smoother_shortcuts ) { // Create smoother with 7 nodes - Ordering ordering; - GaussianFactorGraph smoother; - boost::tie(smoother, ordering) = createSmoother(7); + GaussianFactorGraph smoother = createSmoother(7); - GaussianBayesTree bayesTree = *GaussianMultifrontalSolver(smoother).eliminate(); + GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(); // Create the Bayes tree - LONGS_EQUAL(6, bayesTree.size()); + LONGS_EQUAL(6, (long)bayesTree.size()); // Check the conditional P(Root|Root) GaussianBayesNet empty; - GaussianBayesTree::sharedClique R = bayesTree.root(); - GaussianBayesNet actual1 = R->shortcut(R, EliminateCholesky); + GaussianBayesTree::sharedClique R = bayesTree.roots().front(); + GaussianBayesNet actual1 = R->shortcut(R); EXPECT(assert_equal(empty,actual1,tol)); // Check the conditional P(C2|Root) - GaussianBayesTree::sharedClique C2 = bayesTree[ordering[X(5)]]; - GaussianBayesNet actual2 = C2->shortcut(R, EliminateCholesky); + GaussianBayesTree::sharedClique C2 = bayesTree[X(5)]; + GaussianBayesNet actual2 = C2->shortcut(R); EXPECT(assert_equal(empty,actual2,tol)); // Check the conditional P(C3|Root) double sigma3 = 0.61808; Matrix A56 = Matrix_(2,2,-0.382022,0.,0.,-0.382022); GaussianBayesNet expected3; - push_front(expected3,ordering[X(5)], zero(2), eye(2)/sigma3, ordering[X(6)], A56/sigma3, ones(2)); - GaussianBayesTree::sharedClique C3 = bayesTree[ordering[X(4)]]; - GaussianBayesNet actual3 = C3->shortcut(R, EliminateCholesky); + expected3 += GaussianConditional(X(5), zero(2), eye(2)/sigma3, X(6), A56/sigma3); + GaussianBayesTree::sharedClique C3 = bayesTree[X(4)]; + GaussianBayesNet actual3 = C3->shortcut(R); EXPECT(assert_equal(expected3,actual3,tol)); // Check the conditional P(C4|Root) double sigma4 = 0.661968; Matrix A46 = Matrix_(2,2,-0.146067,0.,0.,-0.146067); GaussianBayesNet expected4; - push_front(expected4, ordering[X(4)], zero(2), eye(2)/sigma4, ordering[X(6)], A46/sigma4, ones(2)); - GaussianBayesTree::sharedClique C4 = bayesTree[ordering[X(3)]]; - GaussianBayesNet actual4 = C4->shortcut(R, EliminateCholesky); + expected4 += GaussianConditional(X(4), zero(2), eye(2)/sigma4, X(6), A46/sigma4); + GaussianBayesTree::sharedClique C4 = bayesTree[X(3)]; + GaussianBayesNet actual4 = C4->shortcut(R); EXPECT(assert_equal(expected4,actual4,tol)); } @@ -116,66 +116,50 @@ TEST( GaussianBayesTree, linear_smoother_shortcuts ) TEST( GaussianBayesTree, balanced_smoother_marginals ) { // Create smoother with 7 nodes - Ordering ordering; - ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4); - GaussianFactorGraph smoother = createSmoother(7, ordering).first; + GaussianFactorGraph smoother = createSmoother(7); // Create the Bayes tree - GaussianBayesTree bayesTree = *GaussianMultifrontalSolver(smoother).eliminate(); + Ordering ordering; + ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4); + GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering); - VectorValues expectedSolution(VectorValues::Zero(7,2)); - VectorValues actualSolution = optimize(bayesTree); + VectorValues actualSolution = bayesTree.optimize(); + VectorValues expectedSolution = VectorValues::Zero(actualSolution); EXPECT(assert_equal(expectedSolution,actualSolution,tol)); - LONGS_EQUAL(4,bayesTree.size()); + LONGS_EQUAL(3, (long)bayesTree.size()); double tol=1e-5; // Check marginal on x1 - GaussianBayesNet expected1 = simpleGaussian(ordering[X(1)], zero(2), sigmax1); - GaussianBayesNet actual1 = *bayesTree.marginalBayesNet(ordering[X(1)], EliminateCholesky); + JacobianFactor expected1 = GaussianDensity::FromMeanAndStddev(X(1), zero(2), sigmax1); + JacobianFactor actual1 = *bayesTree.marginalFactor(X(1)); Matrix expectedCovarianceX1 = eye(2,2) * (sigmax1 * sigmax1); Matrix actualCovarianceX1; - GaussianFactor::shared_ptr m = bayesTree.marginalFactor(ordering[X(1)], EliminateCholesky); - actualCovarianceX1 = bayesTree.marginalFactor(ordering[X(1)], EliminateCholesky)->information().inverse(); + GaussianFactor::shared_ptr m = bayesTree.marginalFactor(X(1), EliminateCholesky); + actualCovarianceX1 = bayesTree.marginalFactor(X(1), EliminateCholesky)->information().inverse(); EXPECT(assert_equal(expectedCovarianceX1, actualCovarianceX1, tol)); EXPECT(assert_equal(expected1,actual1,tol)); // Check marginal on x2 double sigx2 = 0.68712938; // FIXME: this should be corrected analytically - GaussianBayesNet expected2 = simpleGaussian(ordering[X(2)], zero(2), sigx2); - GaussianBayesNet actual2 = *bayesTree.marginalBayesNet(ordering[X(2)], EliminateCholesky); - Matrix expectedCovarianceX2 = eye(2,2) * (sigx2 * sigx2); - Matrix actualCovarianceX2; - actualCovarianceX2 = bayesTree.marginalFactor(ordering[X(2)], EliminateCholesky)->information().inverse(); - EXPECT(assert_equal(expectedCovarianceX2, actualCovarianceX2, tol)); + JacobianFactor expected2 = GaussianDensity::FromMeanAndStddev(X(2), zero(2), sigx2); + JacobianFactor actual2 = *bayesTree.marginalFactor(X(2)); EXPECT(assert_equal(expected2,actual2,tol)); // Check marginal on x3 - GaussianBayesNet expected3 = simpleGaussian(ordering[X(3)], zero(2), sigmax3); - GaussianBayesNet actual3 = *bayesTree.marginalBayesNet(ordering[X(3)], EliminateCholesky); - Matrix expectedCovarianceX3 = eye(2,2) * (sigmax3 * sigmax3); - Matrix actualCovarianceX3; - actualCovarianceX3 = bayesTree.marginalFactor(ordering[X(3)], EliminateCholesky)->information().inverse(); - EXPECT(assert_equal(expectedCovarianceX3, actualCovarianceX3, tol)); + JacobianFactor expected3 = GaussianDensity::FromMeanAndStddev(X(3), zero(2), sigmax3); + JacobianFactor actual3 = *bayesTree.marginalFactor(X(3)); EXPECT(assert_equal(expected3,actual3,tol)); // Check marginal on x4 - GaussianBayesNet expected4 = simpleGaussian(ordering[X(4)], zero(2), sigmax4); - GaussianBayesNet actual4 = *bayesTree.marginalBayesNet(ordering[X(4)], EliminateCholesky); - Matrix expectedCovarianceX4 = eye(2,2) * (sigmax4 * sigmax4); - Matrix actualCovarianceX4; - actualCovarianceX4 = bayesTree.marginalFactor(ordering[X(4)], EliminateCholesky)->information().inverse(); - EXPECT(assert_equal(expectedCovarianceX4, actualCovarianceX4, tol)); + JacobianFactor expected4 = GaussianDensity::FromMeanAndStddev(X(4), zero(2), sigmax4); + JacobianFactor actual4 = *bayesTree.marginalFactor(X(4)); EXPECT(assert_equal(expected4,actual4,tol)); // Check marginal on x7 (should be equal to x1) - GaussianBayesNet expected7 = simpleGaussian(ordering[X(7)], zero(2), sigmax7); - GaussianBayesNet actual7 = *bayesTree.marginalBayesNet(ordering[X(7)], EliminateCholesky); - Matrix expectedCovarianceX7 = eye(2,2) * (sigmax7 * sigmax7); - Matrix actualCovarianceX7; - actualCovarianceX7 = bayesTree.marginalFactor(ordering[X(7)], EliminateCholesky)->information().inverse(); - EXPECT(assert_equal(expectedCovarianceX7, actualCovarianceX7, tol)); + JacobianFactor expected7 = GaussianDensity::FromMeanAndStddev(X(7), zero(2), sigmax7); + JacobianFactor actual7 = *bayesTree.marginalFactor(X(7)); EXPECT(assert_equal(expected7,actual7,tol)); } @@ -183,22 +167,22 @@ TEST( GaussianBayesTree, balanced_smoother_marginals ) TEST( GaussianBayesTree, balanced_smoother_shortcuts ) { // Create smoother with 7 nodes - Ordering ordering; - ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4); - GaussianFactorGraph smoother = createSmoother(7, ordering).first; + GaussianFactorGraph smoother = createSmoother(7); // Create the Bayes tree - GaussianBayesTree bayesTree = *GaussianMultifrontalSolver(smoother).eliminate(); + Ordering ordering; + ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4); + GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering); // Check the conditional P(Root|Root) GaussianBayesNet empty; - GaussianBayesTree::sharedClique R = bayesTree.root(); - GaussianBayesNet actual1 = R->shortcut(R, EliminateCholesky); + GaussianBayesTree::sharedClique R = bayesTree.roots().front(); + GaussianBayesNet actual1 = R->shortcut(R); EXPECT(assert_equal(empty,actual1,tol)); // Check the conditional P(C2|Root) - GaussianBayesTree::sharedClique C2 = bayesTree[ordering[X(3)]]; - GaussianBayesNet actual2 = C2->shortcut(R, EliminateCholesky); + GaussianBayesTree::sharedClique C2 = bayesTree[X(3)]; + GaussianBayesNet actual2 = C2->shortcut(R); EXPECT(assert_equal(empty,actual2,tol)); // Check the conditional P(C3|Root), which should be equal to P(x2|x4) @@ -249,78 +233,56 @@ TEST( GaussianBayesTree, balanced_smoother_joint ) // Create smoother with 7 nodes Ordering ordering; ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4); - GaussianFactorGraph smoother = createSmoother(7, ordering).first; + GaussianFactorGraph smoother = createSmoother(7); // Create the Bayes tree, expected to look like: // x5 x6 x4 // x3 x2 : x4 // x1 : x2 // x7 : x6 - GaussianBayesTree bayesTree = *GaussianMultifrontalSolver(smoother).eliminate(); + GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering); // Conditional density elements reused by both tests - const Vector sigma = ones(2); const Matrix I = eye(2), A = -0.00429185*I; // Check the joint density P(x1,x7) factored as P(x1|x7)P(x7) - GaussianBayesNet expected1; - // Why does the sign get flipped on the prior? - GaussianConditional::shared_ptr - parent1(new GaussianConditional(ordering[X(7)], zero(2), -1*I/sigmax7, ones(2))); - expected1.push_front(parent1); - push_front(expected1,ordering[X(1)], zero(2), I/sigmax7, ordering[X(7)], A/sigmax7, sigma); - GaussianBayesNet actual1 = *bayesTree.jointBayesNet(ordering[X(1)],ordering[X(7)], EliminateCholesky); - EXPECT(assert_equal(expected1,actual1,tol)); + GaussianBayesNet expected1 = list_of + // Why does the sign get flipped on the prior? + (GaussianConditional(X(1), zero(2), I/sigmax7, X(7), A/sigmax7)) + (GaussianConditional(X(7), zero(2), -1*I/sigmax7)); + GaussianBayesNet actual1 = *bayesTree.jointBayesNet(X(1),X(7)); + EXPECT(assert_equal(expected1, actual1, tol)); // // Check the joint density P(x7,x1) factored as P(x7|x1)P(x1) // GaussianBayesNet expected2; // GaussianConditional::shared_ptr - // parent2(new GaussianConditional(ordering[X(1)], zero(2), -1*I/sigmax1, ones(2))); + // parent2(new GaussianConditional(X(1), zero(2), -1*I/sigmax1, ones(2))); // expected2.push_front(parent2); - // push_front(expected2,ordering[X(7)], zero(2), I/sigmax1, ordering[X(1)], A/sigmax1, sigma); - // GaussianBayesNet actual2 = *bayesTree.jointBayesNet(ordering[X(7)],ordering[X(1)]); + // push_front(expected2,X(7), zero(2), I/sigmax1, X(1), A/sigmax1, sigma); + // GaussianBayesNet actual2 = *bayesTree.jointBayesNet(X(7),X(1)); // EXPECT(assert_equal(expected2,actual2,tol)); // Check the joint density P(x1,x4), i.e. with a root variable - GaussianBayesNet expected3; - GaussianConditional::shared_ptr - parent3(new GaussianConditional(ordering[X(4)], zero(2), I/sigmax4, ones(2))); - expected3.push_front(parent3); double sig14 = 0.784465; Matrix A14 = -0.0769231*I; - push_front(expected3,ordering[X(1)], zero(2), I/sig14, ordering[X(4)], A14/sig14, sigma); - GaussianBayesNet actual3 = *bayesTree.jointBayesNet(ordering[X(1)],ordering[X(4)], EliminateCholesky); + GaussianBayesNet expected3 = list_of + (GaussianConditional(X(1), zero(2), I/sig14, X(4), A14/sig14)) + (GaussianConditional(X(4), zero(2), I/sigmax4)); + GaussianBayesNet actual3 = *bayesTree.jointBayesNet(X(1),X(4)); EXPECT(assert_equal(expected3,actual3,tol)); // // Check the joint density P(x4,x1), i.e. with a root variable, factored the other way // GaussianBayesNet expected4; // GaussianConditional::shared_ptr - // parent4(new GaussianConditional(ordering[X(1)], zero(2), -1.0*I/sigmax1, ones(2))); + // parent4(new GaussianConditional(X(1), zero(2), -1.0*I/sigmax1, ones(2))); // expected4.push_front(parent4); // double sig41 = 0.668096; // Matrix A41 = -0.055794*I; - // push_front(expected4,ordering[X(4)], zero(2), I/sig41, ordering[X(1)], A41/sig41, sigma); - // GaussianBayesNet actual4 = *bayesTree.jointBayesNet(ordering[X(4)],ordering[X(1)]); + // push_front(expected4,X(4), zero(2), I/sig41, X(1), A41/sig41, sigma); + // GaussianBayesNet actual4 = *bayesTree.jointBayesNet(X(4),X(1)); // EXPECT(assert_equal(expected4,actual4,tol)); } -/* ************************************************************************* */ -TEST(GaussianBayesTree, simpleMarginal) -{ - GaussianFactorGraph gfg; - - Matrix A12 = Rot2::fromDegrees(45.0).matrix(); - - gfg.add(0, eye(2), zero(2), noiseModel::Isotropic::Sigma(2, 1.0)); - gfg.add(0, -eye(2), 1, eye(2), ones(2), noiseModel::Isotropic::Sigma(2, 1.0)); - gfg.add(1, -eye(2), 2, A12, ones(2), noiseModel::Isotropic::Sigma(2, 1.0)); - - Matrix expected(GaussianSequentialSolver(gfg).marginalCovariance(2)); - Matrix actual(GaussianMultifrontalSolver(gfg).marginalCovariance(2)); - - EXPECT(assert_equal(expected, actual)); -} - /* ************************************************************************* */ TEST(GaussianBayesTree, shortcut_overlapping_separator) { @@ -347,13 +309,13 @@ TEST(GaussianBayesTree, shortcut_overlapping_separator) // c(5|6) // c(1,2|5) // c(3,4|5) - GaussianBayesTree bt = *GaussianMultifrontalSolver(fg).eliminate(); + GaussianBayesTree bt = *fg.eliminateMultifrontal(Ordering(fg.keys())); // eliminate in increasing key order, fg.keys() is sorted. GaussianFactorGraph joint = *bt.joint(1,2, EliminateQR); Matrix expectedJointJ = (Matrix(2,3) << - 0, 11, 12, - -5, 0, -6 + 5, 0, 6, + 0, -11, -12 ).finished(); Matrix actualJointJ = joint.augmentedJacobian(); diff --git a/tests/testGaussianFactor.cpp b/tests/testGaussianFactor.cpp deleted file mode 100644 index 62cad80ca..000000000 --- a/tests/testGaussianFactor.cpp +++ /dev/null @@ -1,228 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testGaussianFactor.cpp - * @brief Unit tests for Linear Factor - * @author Christian Potthast - * @author Frank Dellaert - **/ - -#include -#include -#include -#include -#include -#include - -#include - -#include -#include // for operator += -#include -#include // for insert -using namespace boost::assign; - -#include - -using namespace std; -using namespace gtsam; - -// Convenience for named keys -using symbol_shorthand::X; -using symbol_shorthand::L; - -static SharedDiagonal - sigma0_1 = noiseModel::Isotropic::Sigma(2,0.1), sigma_02 = noiseModel::Isotropic::Sigma(2,0.2), - constraintModel = noiseModel::Constrained::All(2); - -//const Key kx1 = X(1), kx2 = X(2), kl1 = L(1); // FIXME: throws exception - -/* ************************************************************************* */ -TEST( GaussianFactor, linearFactor ) -{ - const Key kx1 = X(1), kx2 = X(2), kl1 = L(1); - Ordering ordering; ordering += kx1,kx2,kl1; - - Matrix I = eye(2); - Vector b = Vector_(2, 2.0, -1.0); - JacobianFactor expected(ordering[kx1], -10*I,ordering[kx2], 10*I, b, noiseModel::Unit::Create(2)); - - // create a small linear factor graph - GaussianFactorGraph fg = example::createGaussianFactorGraph(ordering); - - // get the factor kf2 from the factor graph - JacobianFactor::shared_ptr lf = - boost::dynamic_pointer_cast(fg[1]); - - // check if the two factors are the same - EXPECT(assert_equal(expected,*lf)); -} - -/* ************************************************************************* */ -TEST( GaussianFactor, getDim ) -{ - const Key kx1 = X(1), kx2 = X(2), kl1 = L(1); - // get a factor - Ordering ordering; ordering += kx1,kx2,kl1; - GaussianFactorGraph fg = example::createGaussianFactorGraph(ordering); - GaussianFactor::shared_ptr factor = fg[0]; - - // get the size of a variable - size_t actual = factor->getDim(factor->find(ordering[kx1])); - - // verify - size_t expected = 2; - EXPECT_LONGS_EQUAL(expected, actual); -} - -/* ************************************************************************* */ -TEST( GaussianFactor, error ) -{ - const Key kx1 = X(1), kx2 = X(2), kl1 = L(1); - // create a small linear factor graph - Ordering ordering; ordering += kx1,kx2,kl1; - GaussianFactorGraph fg = example::createGaussianFactorGraph(ordering); - - // get the first factor from the factor graph - GaussianFactor::shared_ptr lf = fg[0]; - - // check the error of the first factor with noisy config - VectorValues cfg = example::createZeroDelta(ordering); - - // calculate the error from the factor kf1 - // note the error is the same as in testNonlinearFactor - double actual = lf->error(cfg); - DOUBLES_EQUAL( 1.0, actual, 0.00000001 ); -} - -/* ************************************************************************* */ -TEST( GaussianFactor, matrix ) -{ - const Key kx1 = X(1), kx2 = X(2), kl1 = L(1); - // create a small linear factor graph - Ordering ordering; ordering += kx1,kx2,kl1; - GaussianFactorGraph fg = example::createGaussianFactorGraph(ordering); - - // get the factor kf2 from the factor graph - //GaussianFactor::shared_ptr lf = fg[1]; // NOTE: using the older version - Vector b2 = Vector_(2, 0.2, -0.1); - Matrix I = eye(2); - // render with a given ordering - Ordering ord; - ord += kx1,kx2; - JacobianFactor::shared_ptr lf(new JacobianFactor(ord[kx1], -I, ord[kx2], I, b2, sigma0_1)); - - // Test whitened version - Matrix A_act1; Vector b_act1; - boost::tie(A_act1,b_act1) = lf->matrix(true); - - Matrix A1 = Matrix_(2,4, - -10.0, 0.0, 10.0, 0.0, - 000.0,-10.0, 0.0, 10.0 ); - Vector b1 = Vector_(2, 2.0, -1.0); - - EQUALITY(A_act1,A1); - EQUALITY(b_act1,b1); - - // Test unwhitened version - Matrix A_act2; Vector b_act2; - boost::tie(A_act2,b_act2) = lf->matrix(false); - - - Matrix A2 = Matrix_(2,4, - -1.0, 0.0, 1.0, 0.0, - 000.0,-1.0, 0.0, 1.0 ); - //Vector b2 = Vector_(2, 2.0, -1.0); - - EQUALITY(A_act2,A2); - EQUALITY(b_act2,b2); - - // Ensure that whitening is consistent - boost::shared_ptr model = lf->get_model(); - model->WhitenSystem(A_act2, b_act2); - EQUALITY(A_act1, A_act2); - EQUALITY(b_act1, b_act2); -} - -/* ************************************************************************* */ -TEST( GaussianFactor, matrix_aug ) -{ - const Key kx1 = X(1), kx2 = X(2), kl1 = L(1); - // create a small linear factor graph - Ordering ordering; ordering += kx1,kx2,kl1; - GaussianFactorGraph fg = example::createGaussianFactorGraph(ordering); - - // get the factor kf2 from the factor graph - //GaussianFactor::shared_ptr lf = fg[1]; - Vector b2 = Vector_(2, 0.2, -0.1); - Matrix I = eye(2); - // render with a given ordering - Ordering ord; - ord += kx1,kx2; - JacobianFactor::shared_ptr lf(new JacobianFactor(ord[kx1], -I, ord[kx2], I, b2, sigma0_1)); - - - // Test unwhitened version - Matrix Ab_act1; - Ab_act1 = lf->matrix_augmented(false); - - Matrix Ab1 = Matrix_(2,5, - -1.0, 0.0, 1.0, 0.0, 0.2, - 00.0,- 1.0, 0.0, 1.0, -0.1 ); - - EQUALITY(Ab_act1,Ab1); - - // Test whitened version - Matrix Ab_act2; - Ab_act2 = lf->matrix_augmented(true); - - Matrix Ab2 = Matrix_(2,5, - -10.0, 0.0, 10.0, 0.0, 2.0, - 00.0, -10.0, 0.0, 10.0, -1.0 ); - - EQUALITY(Ab_act2,Ab2); - - // Ensure that whitening is consistent - boost::shared_ptr model = lf->get_model(); - model->WhitenInPlace(Ab_act1); - EQUALITY(Ab_act1, Ab_act2); -} - -/* ************************************************************************* */ -// small aux. function to print out lists of anything -template -void print(const list& i) { - copy(i.begin(), i.end(), ostream_iterator (cout, ",")); - cout << endl; -} - -/* ************************************************************************* */ -TEST( GaussianFactor, size ) -{ - // create a linear factor graph - const Key kx1 = X(1), kx2 = X(2), kl1 = L(1); - Ordering ordering; ordering += kx1,kx2,kl1; - GaussianFactorGraph fg = example::createGaussianFactorGraph(ordering); - - // get some factors from the graph - boost::shared_ptr factor1 = fg[0]; - boost::shared_ptr factor2 = fg[1]; - boost::shared_ptr factor3 = fg[2]; - - EXPECT_LONGS_EQUAL(1, factor1->size()); - EXPECT_LONGS_EQUAL(2, factor2->size()); - EXPECT_LONGS_EQUAL(2, factor3->size()); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr);} -/* ************************************************************************* */ diff --git a/tests/testGaussianFactorGraphB.cpp b/tests/testGaussianFactorGraphB.cpp index b8bea8c6e..ba889d4e4 100644 --- a/tests/testGaussianFactorGraphB.cpp +++ b/tests/testGaussianFactorGraphB.cpp @@ -18,9 +18,8 @@ #include #include #include -#include +#include #include -#include #include #include #include @@ -33,6 +32,8 @@ #include // for operator += #include // for operator += using namespace boost::assign; +#include +namespace br { using namespace boost::range; using namespace boost::adaptors; } #include #include @@ -49,17 +50,15 @@ using symbol_shorthand::L; /* ************************************************************************* */ TEST( GaussianFactorGraph, equals ) { - Ordering ordering; ordering += X(1),X(2),L(1); - GaussianFactorGraph fg = createGaussianFactorGraph(ordering); - GaussianFactorGraph fg2 = createGaussianFactorGraph(ordering); + GaussianFactorGraph fg = createGaussianFactorGraph(); + GaussianFactorGraph fg2 = createGaussianFactorGraph(); EXPECT(fg.equals(fg2)); } /* ************************************************************************* */ TEST( GaussianFactorGraph, error ) { - Ordering ordering; ordering += X(1),X(2),L(1); - GaussianFactorGraph fg = createGaussianFactorGraph(ordering); - VectorValues cfg = createZeroDelta(ordering); + GaussianFactorGraph fg = createGaussianFactorGraph(); + VectorValues cfg = createZeroDelta(); // note the error is the same as in testNonlinearFactorGraph as a // zero delta config in the linear graph is equivalent to noisy in @@ -71,21 +70,23 @@ TEST( GaussianFactorGraph, error ) { /* ************************************************************************* */ TEST( GaussianFactorGraph, eliminateOne_x1 ) { - Ordering ordering; ordering += X(1),L(1),X(2); - GaussianFactorGraph fg = createGaussianFactorGraph(ordering); + GaussianFactorGraph fg = createGaussianFactorGraph(); GaussianConditional::shared_ptr conditional; - GaussianFactorGraph remaining; - boost::tie(conditional,remaining) = fg.eliminateOne(0, EliminateQR); + pair result = + fg.eliminatePartialSequential(Ordering(list_of(X(1)))); + conditional = result.first->front(); // create expected Conditional Gaussian Matrix I = 15*eye(2), R11 = I, S12 = -0.111111*I, S13 = -0.444444*I; - Vector d = Vector_(2, -0.133333, -0.0222222), sigma = ones(2); - GaussianConditional expected(ordering[X(1)],15*d,R11,ordering[L(1)],S12,ordering[X(2)],S13,sigma); + Vector d = Vector_(2, -0.133333, -0.0222222); + GaussianConditional expected(X(1),15*d,R11,L(1),S12,X(2),S13); EXPECT(assert_equal(expected,*conditional,tol)); } +#if 0 + /* ************************************************************************* */ TEST( GaussianFactorGraph, eliminateOne_x2 ) { @@ -391,9 +392,9 @@ TEST( GaussianFactorGraph, elimination ) Matrix Ap = eye(1), An = eye(1) * -1; Vector b = Vector_(1, 0.0); SharedDiagonal sigma = noiseModel::Isotropic::Sigma(1,2.0); - fg.add(ord[X(1)], An, ord[X(2)], Ap, b, sigma); - fg.add(ord[X(1)], Ap, b, sigma); - fg.add(ord[X(2)], Ap, b, sigma); + fg += ord[X(1)], An, ord[X(2)], Ap, b, sigma; + fg += ord[X(1)], Ap, b, sigma; + fg += ord[X(2)], Ap, b, sigma; // Eliminate GaussianBayesNet bayesNet = *GaussianSequentialSolver(fg).eliminate(); @@ -513,6 +514,8 @@ TEST(GaussianFactorGraph, createSmoother2) CHECK(assert_equal(*p_x1.back(),*p_x3.front())); // should be the same because of symmetry } +#endif + /* ************************************************************************* */ TEST(GaussianFactorGraph, hasConstraints) { @@ -522,8 +525,7 @@ TEST(GaussianFactorGraph, hasConstraints) FactorGraph fgc2 = createSimpleConstraintGraph() ; EXPECT(hasConstraints(fgc2)); - Ordering ordering; ordering += X(1), X(2), L(1); - GaussianFactorGraph fg = createGaussianFactorGraph(ordering); + GaussianFactorGraph fg = createGaussianFactorGraph(); EXPECT(!hasConstraints(fg)); } @@ -531,7 +533,6 @@ TEST(GaussianFactorGraph, hasConstraints) #include #include #include -#include /* ************************************************************************* */ TEST( GaussianFactorGraph, conditional_sigma_failure) { @@ -548,8 +549,8 @@ TEST( GaussianFactorGraph, conditional_sigma_failure) { gtsam::SharedNoiseModel elevationModel = noiseModel::Isotropic::Sigma(1, 3.0); double fov = 60; // degrees - double imgW = 640; // pixels - double imgH = 480; // pixels + int imgW = 640; // pixels + int imgH = 480; // pixels gtsam::Cal3_S2::shared_ptr K(new gtsam::Cal3_S2(fov, imgW, imgH)); typedef GenericProjectionFactor ProjectionFactor; @@ -567,30 +568,28 @@ TEST( GaussianFactorGraph, conditional_sigma_failure) { initValues.insert(l41, Point3(1.61051523, 6.7373052, -0.231582751) ); NonlinearFactorGraph factors; - factors.add(PriorFactor(xC1, + factors += PriorFactor(xC1, Pose3(Rot3( -1., 0.0, 1.2246468e-16, 0.0, 1., 0.0, -1.2246468e-16, 0.0, -1), - Point3(0.511832102, 8.42819594, 5.76841725)), priorModel)); - factors.add(ProjectionFactor(Point2(333.648615, 98.61535), measModel, xC1, l32, K)); - factors.add(ProjectionFactor(Point2(218.508, 83.8022039), measModel, xC1, l41, K)); - factors.add(RangeFactor(xC1, l32, relElevation, elevationModel)); - factors.add(RangeFactor(xC1, l41, relElevation, elevationModel)); - - Ordering orderingC; orderingC += xC1, l32, l41; + Point3(0.511832102, 8.42819594, 5.76841725)), priorModel); + factors += ProjectionFactor(Point2(333.648615, 98.61535), measModel, xC1, l32, K); + factors += ProjectionFactor(Point2(218.508, 83.8022039), measModel, xC1, l41, K); + factors += RangeFactor(xC1, l32, relElevation, elevationModel); + factors += RangeFactor(xC1, l41, relElevation, elevationModel); // Check that sigmas are correct (i.e., unit) - GaussianFactorGraph lfg = *factors.linearize(initValues, orderingC); + GaussianFactorGraph lfg = *factors.linearize(initValues); - GaussianMultifrontalSolver solver(lfg, false); - GaussianBayesTree actBT = *solver.eliminate(); + GaussianBayesTree actBT = *lfg.eliminateMultifrontal(); // Check that all sigmas in an unconstrained bayes tree are set to one - BOOST_FOREACH(const GaussianBayesTree::sharedClique& clique, actBT.nodes()) { + BOOST_FOREACH(const GaussianBayesTree::sharedClique& clique, actBT.nodes() | br::map_values) { GaussianConditional::shared_ptr conditional = clique->conditional(); - size_t dim = conditional->dim(); - EXPECT(assert_equal(gtsam::ones(dim), conditional->get_sigmas(), tol)); + size_t dim = conditional->rows(); + //EXPECT(assert_equal(gtsam::ones(dim), conditional->get_model()->sigmas(), tol)); + EXPECT(!conditional->get_model()); } } diff --git a/tests/testGaussianISAM.cpp b/tests/testGaussianISAM.cpp index 60d5db672..81c68c106 100644 --- a/tests/testGaussianISAM.cpp +++ b/tests/testGaussianISAM.cpp @@ -15,21 +15,18 @@ * @author Michael Kaess */ -#include -#include -#include -#include -#include -#include -#include -#include -#include - #include +#include +#include +#include +#include + #include #include // for operator += using namespace boost::assign; +#include +namespace br { using namespace boost::adaptors; using namespace boost::range; } using namespace std; using namespace gtsam; @@ -38,11 +35,6 @@ using namespace example; using symbol_shorthand::X; using symbol_shorthand::L; -/* ************************************************************************* */ -// Some numbers that should be consistent among all smoother tests - -static const double tol = 1e-4; - /* ************************************************************************* */ TEST( ISAM, iSAM_smoother ) { @@ -50,7 +42,7 @@ TEST( ISAM, iSAM_smoother ) for (int t = 1; t <= 7; t++) ordering += X(t); // Create smoother with 7 nodes - GaussianFactorGraph smoother = createSmoother(7, ordering).first; + GaussianFactorGraph smoother = createSmoother(7); // run iSAM for every factor GaussianISAM actual; @@ -61,22 +53,21 @@ TEST( ISAM, iSAM_smoother ) } // Create expected Bayes Tree by solving smoother with "natural" ordering - BayesTree::shared_ptr bayesTree = GaussianMultifrontalSolver(smoother).eliminate(); - GaussianISAM expected(*bayesTree); + GaussianBayesTree expected = *smoother.eliminateMultifrontal(ordering); // Verify sigmas in the bayes tree - BOOST_FOREACH(const GaussianBayesTree::sharedClique& clique, bayesTree->nodes()) { + BOOST_FOREACH(const GaussianBayesTree::sharedClique& clique, expected.nodes() | br::map_values) { GaussianConditional::shared_ptr conditional = clique->conditional(); - size_t dim = conditional->dim(); - EXPECT(assert_equal(gtsam::ones(dim), conditional->get_sigmas(), tol)); + EXPECT(!conditional->get_model()); } // Check whether BayesTree is correct - EXPECT(assert_equal(expected, actual)); + EXPECT(assert_equal(GaussianFactorGraph(expected).augmentedHessian(), GaussianFactorGraph(actual).augmentedHessian())); // obtain solution - VectorValues e(VectorValues::Zero(7,2)); // expected solution - VectorValues optimized = optimize(actual); // actual solution + VectorValues e; // expected solution + for (int t = 1; t <= 7; t++) e.insert(X(t), Vector::Zero(2)); + VectorValues optimized = actual.optimize(); // actual solution EXPECT(assert_equal(e, optimized)); } diff --git a/tests/testGaussianISAM2.cpp b/tests/testGaussianISAM2.cpp index aa169c3a0..0e68589e7 100644 --- a/tests/testGaussianISAM2.cpp +++ b/tests/testGaussianISAM2.cpp @@ -4,17 +4,18 @@ * @author Michael Kaess */ +#include + #include #include #include +#include #include #include -#include +#include #include -#include #include #include -#include #include #include #include @@ -24,12 +25,11 @@ #include #include -#include - #include -#include // for operator += -#include +#include using namespace boost::assign; +#include +namespace br { using namespace boost::adaptors; using namespace boost::range; } using namespace std; using namespace gtsam; @@ -48,7 +48,8 @@ SharedDiagonal brNoise = noiseModel::Diagonal::Sigmas(Vector_(2, M_PI/100.0, 0.1 ISAM2 createSlamlikeISAM2( boost::optional init_values = boost::none, boost::optional full_graph = boost::none, - const ISAM2Params& params = ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true)) { + const ISAM2Params& params = ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false, true), + size_t maxPoses = 10) { // These variables will be reused and accumulate factors and values ISAM2 isam(params); @@ -61,7 +62,7 @@ ISAM2 createSlamlikeISAM2( // Add a prior at time 0 and update isam { NonlinearFactorGraph newfactors; - newfactors.add(PriorFactor(0, Pose2(0.0, 0.0, 0.0), odoNoise)); + newfactors += PriorFactor(0, Pose2(0.0, 0.0, 0.0), odoNoise); fullgraph.push_back(newfactors); Values init; @@ -71,10 +72,13 @@ ISAM2 createSlamlikeISAM2( isam.update(newfactors, init); } + if(i > maxPoses) + goto done; + // Add odometry from time 0 to time 5 for( ; i<5; ++i) { NonlinearFactorGraph newfactors; - newfactors.add(BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise)); + newfactors += BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); fullgraph.push_back(newfactors); Values init; @@ -82,14 +86,20 @@ ISAM2 createSlamlikeISAM2( fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01)); isam.update(newfactors, init); + + if(i > maxPoses) + goto done; } + if(i > maxPoses) + goto done; + // Add odometry from time 5 to 6 and landmark measurement at time 5 { NonlinearFactorGraph newfactors; - newfactors.add(BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise)); - newfactors.add(BearingRangeFactor(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise)); - newfactors.add(BearingRangeFactor(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise)); + newfactors += BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); + newfactors += BearingRangeFactor(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise); + newfactors += BearingRangeFactor(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise); fullgraph.push_back(newfactors); Values init; @@ -104,10 +114,13 @@ ISAM2 createSlamlikeISAM2( ++ i; } + if(i > maxPoses) + goto done; + // Add odometry from time 6 to time 10 for( ; i<10; ++i) { NonlinearFactorGraph newfactors; - newfactors.add(BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise)); + newfactors += BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); fullgraph.push_back(newfactors); Values init; @@ -115,14 +128,20 @@ ISAM2 createSlamlikeISAM2( fullinit.insert((i+1), Pose2(double(i+1)+0.1, -0.1, 0.01)); isam.update(newfactors, init); + + if(i > maxPoses) + goto done; } + if(i > maxPoses) + goto done; + // Add odometry from time 10 to 11 and landmark measurement at time 10 { NonlinearFactorGraph newfactors; - newfactors.add(BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise)); - newfactors.add(BearingRangeFactor(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise)); - newfactors.add(BearingRangeFactor(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise)); + newfactors += BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); + newfactors += BearingRangeFactor(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise); + newfactors += BearingRangeFactor(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise); fullgraph.push_back(newfactors); Values init; @@ -133,6 +152,8 @@ ISAM2 createSlamlikeISAM2( ++ i; } +done: + if (full_graph) *full_graph = fullgraph; @@ -142,185 +163,6 @@ ISAM2 createSlamlikeISAM2( return isam; } -/* ************************************************************************* */ -TEST_UNSAFE(ISAM2, ImplAddVariables) { - - // Create initial state - Values theta; - theta.insert(0, Pose2(.1, .2, .3)); - theta.insert(100, Point2(.4, .5)); - Values newTheta; - newTheta.insert(1, Pose2(.6, .7, .8)); - - VectorValues delta; - delta.insert(0, Vector_(3, .1, .2, .3)); - delta.insert(1, Vector_(2, .4, .5)); - - VectorValues deltaNewton; - deltaNewton.insert(0, Vector_(3, .1, .2, .3)); - deltaNewton.insert(1, Vector_(2, .4, .5)); - - VectorValues deltaRg; - deltaRg.insert(0, Vector_(3, .1, .2, .3)); - deltaRg.insert(1, Vector_(2, .4, .5)); - - vector replacedKeys(2, false); - - Ordering ordering; ordering += 100, 0; - - // Verify initial state - LONGS_EQUAL(0, ordering[100]); - LONGS_EQUAL(1, ordering[0]); - EXPECT(assert_equal(delta[0], delta[ordering[100]])); - EXPECT(assert_equal(delta[1], delta[ordering[0]])); - - // Create expected state - Values thetaExpected; - thetaExpected.insert(0, Pose2(.1, .2, .3)); - thetaExpected.insert(100, Point2(.4, .5)); - thetaExpected.insert(1, Pose2(.6, .7, .8)); - - VectorValues deltaExpected; - deltaExpected.insert(0, Vector_(3, .1, .2, .3)); - deltaExpected.insert(1, Vector_(2, .4, .5)); - deltaExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0)); - - VectorValues deltaNewtonExpected; - deltaNewtonExpected.insert(0, Vector_(3, .1, .2, .3)); - deltaNewtonExpected.insert(1, Vector_(2, .4, .5)); - deltaNewtonExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0)); - - VectorValues deltaRgExpected; - deltaRgExpected.insert(0, Vector_(3, .1, .2, .3)); - deltaRgExpected.insert(1, Vector_(2, .4, .5)); - deltaRgExpected.insert(2, Vector_(3, 0.0, 0.0, 0.0)); - - vector replacedKeysExpected(3, false); - - Ordering orderingExpected; orderingExpected += 100, 0, 1; - - // Expand initial state - ISAM2::Impl::AddVariables(newTheta, theta, delta, deltaNewton, deltaRg, replacedKeys, ordering); - - EXPECT(assert_equal(thetaExpected, theta)); - EXPECT(assert_equal(deltaExpected, delta)); - EXPECT(assert_equal(deltaNewtonExpected, deltaNewton)); - EXPECT(assert_equal(deltaRgExpected, deltaRg)); - EXPECT(assert_container_equality(replacedKeysExpected, replacedKeys)); - EXPECT(assert_equal(orderingExpected, ordering)); -} - -/* ************************************************************************* */ -TEST_UNSAFE(ISAM2, ImplRemoveVariables) { - - // Create initial state - Values theta; - theta.insert(0, Pose2(.1, .2, .3)); - theta.insert(1, Pose2(.6, .7, .8)); - theta.insert(100, Point2(.4, .5)); - - SymbolicFactorGraph sfg; - sfg.push_back(boost::make_shared(Index(0), Index(2))); - sfg.push_back(boost::make_shared(Index(0), Index(1))); - VariableIndex variableIndex(sfg); - - VectorValues delta; - delta.insert(0, Vector_(3, .1, .2, .3)); - delta.insert(1, Vector_(3, .4, .5, .6)); - delta.insert(2, Vector_(2, .7, .8)); - - VectorValues deltaNewton; - deltaNewton.insert(0, Vector_(3, .1, .2, .3)); - deltaNewton.insert(1, Vector_(3, .4, .5, .6)); - deltaNewton.insert(2, Vector_(2, .7, .8)); - - VectorValues deltaRg; - deltaRg.insert(0, Vector_(3, .1, .2, .3)); - deltaRg.insert(1, Vector_(3, .4, .5, .6)); - deltaRg.insert(2, Vector_(2, .7, .8)); - - vector replacedKeys(3, false); - replacedKeys[0] = true; - replacedKeys[1] = false; - replacedKeys[2] = true; - - Ordering ordering; ordering += 100, 1, 0; - - ISAM2::Nodes nodes(3); - - // Verify initial state - LONGS_EQUAL(0, ordering[100]); - LONGS_EQUAL(1, ordering[1]); - LONGS_EQUAL(2, ordering[0]); - - // Create expected state - Values thetaExpected; - thetaExpected.insert(0, Pose2(.1, .2, .3)); - thetaExpected.insert(100, Point2(.4, .5)); - - SymbolicFactorGraph sfgRemoved; - sfgRemoved.push_back(boost::make_shared(Index(0), Index(1))); - sfgRemoved.push_back(SymbolicFactorGraph::sharedFactor()); // Add empty factor to keep factor indices consistent - VariableIndex variableIndexExpected(sfgRemoved); - - VectorValues deltaExpected; - deltaExpected.insert(0, Vector_(3, .1, .2, .3)); - deltaExpected.insert(1, Vector_(2, .7, .8)); - - VectorValues deltaNewtonExpected; - deltaNewtonExpected.insert(0, Vector_(3, .1, .2, .3)); - deltaNewtonExpected.insert(1, Vector_(2, .7, .8)); - - VectorValues deltaRgExpected; - deltaRgExpected.insert(0, Vector_(3, .1, .2, .3)); - deltaRgExpected.insert(1, Vector_(2, .7, .8)); - - vector replacedKeysExpected(2, true); - - Ordering orderingExpected; orderingExpected += 100, 0; - - ISAM2::Nodes nodesExpected(2); - - // Reduce initial state - FastSet unusedKeys; - unusedKeys.insert(1); - vector removedFactorsI; removedFactorsI.push_back(1); - SymbolicFactorGraph removedFactors; removedFactors.push_back(sfg[1]); - variableIndex.remove(removedFactorsI, removedFactors); - GaussianFactorGraph linearFactors; - FastSet fixedVariables; - ISAM2::Impl::RemoveVariables(unusedKeys, ISAM2::sharedClique(), theta, variableIndex, delta, deltaNewton, deltaRg, - replacedKeys, ordering, nodes, linearFactors, fixedVariables); - - EXPECT(assert_equal(thetaExpected, theta)); - EXPECT(assert_equal(variableIndexExpected, variableIndex)); - EXPECT(assert_equal(deltaExpected, delta)); - EXPECT(assert_equal(deltaNewtonExpected, deltaNewton)); - EXPECT(assert_equal(deltaRgExpected, deltaRg)); - EXPECT(assert_container_equality(replacedKeysExpected, replacedKeys)); - EXPECT(assert_equal(orderingExpected, ordering)); -} - -/* ************************************************************************* */ -//TEST(ISAM2, IndicesFromFactors) { -// -// using namespace gtsam::planarSLAM; -// typedef GaussianISAM2::Impl Impl; -// -// Ordering ordering; ordering += (0), (0), (1); -// NonlinearFactorGraph graph; -// graph.add(PriorFactor((0), Pose2(), noiseModel::Unit::Create(Pose2::dimension)); -// graph.addRange((0), (0), 1.0, noiseModel::Unit::Create(1)); -// -// FastSet expected; -// expected.insert(0); -// expected.insert(1); -// -// FastSet actual = Impl::IndicesFromFactors(ordering, graph); -// -// EXPECT(assert_equal(expected, actual)); -//} - /* ************************************************************************* */ //TEST(ISAM2, CheckRelinearization) { // @@ -355,37 +197,29 @@ TEST_UNSAFE(ISAM2, ImplRemoveVariables) { //} /* ************************************************************************* */ -TEST(ISAM2, optimize2) { - - // Create initialization - Values theta; - theta.insert((0), Pose2(0.01, 0.01, 0.01)); - - // Create conditional - Vector d(3); d << -0.1, -0.1, -0.31831; - Matrix R(3,3); R << - 10, 0.0, 0.0, - 0.0, 10, 0.0, - 0.0, 0.0, 31.8309886; - GaussianConditional::shared_ptr conditional(new GaussianConditional(0, d, R, Vector::Ones(3))); - - // Create ordering - Ordering ordering; ordering += (0); - - // Expected vector - VectorValues expected(1, 3); - conditional->solveInPlace(expected); - - // Clique - ISAM2::sharedClique clique( - ISAM2::Clique::Create(make_pair(conditional,GaussianFactor::shared_ptr()))); - VectorValues actual(theta.dims(ordering)); - internal::optimizeInPlace(clique, actual); - -// expected.print("expected: "); -// actual.print("actual: "); - EXPECT(assert_equal(expected, actual)); -} +struct ConsistencyVisitor +{ + bool consistent; + const ISAM2& isam; + ConsistencyVisitor(const ISAM2& isam) : + consistent(true), isam(isam) {} + int operator()(const ISAM2::sharedClique& node, int& parentData) + { + if(std::find(isam.roots().begin(), isam.roots().end(), node) == isam.roots().end()) + { + if(node->parent_.expired()) + consistent = false; + if(std::find(node->parent()->children.begin(), node->parent()->children.end(), node) == node->parent()->children.end()) + consistent = false; + } + BOOST_FOREACH(Key j, node->conditional()->frontals()) + { + if(isam.nodes().at(j).get() != node.get()) + consistent = false; + } + return 0; + } +}; /* ************************************************************************* */ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const ISAM2& isam, Test& test, TestResult& result) { @@ -394,31 +228,38 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c const std::string name_ = test.getName(); Values actual = isam.calculateEstimate(); - Ordering ordering = isam.getOrdering(); // *fullgraph.orderingCOLAMD(fullinit).first; - GaussianFactorGraph linearized = *fullgraph.linearize(fullinit, ordering); -// linearized.print("Expected linearized: "); - GaussianBayesNet gbn = *GaussianSequentialSolver(linearized).eliminate(); -// gbn.print("Expected bayesnet: "); - VectorValues delta = optimize(gbn); - Values expected = fullinit.retract(delta, ordering); + Values expected = fullinit.retract(fullgraph.linearize(fullinit)->optimize()); bool isamEqual = assert_equal(expected, actual); + // Check information + GaussianFactorGraph isamGraph(isam); + isamGraph += isam.roots().front()->cachedFactor_; + Matrix expectedHessian = fullgraph.linearize(isam.getLinearizationPoint())->augmentedHessian(); + Matrix actualHessian = isamGraph.augmentedHessian(); + expectedHessian.bottomRightCorner(1,1) = actualHessian.bottomRightCorner(1,1); + bool isamTreeEqual = assert_equal(expectedHessian, actualHessian); + + // Check consistency + ConsistencyVisitor visitor(isam); + int data; // Unused + treeTraversal::DepthFirstForest(isam, data, visitor); + bool consistent = visitor.consistent; + // The following two checks make sure that the cached gradients are maintained and used correctly // Check gradient at each node bool nodeGradientsOk = true; typedef ISAM2::sharedClique sharedClique; - BOOST_FOREACH(const sharedClique& clique, isam.nodes()) { + BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) { // Compute expected gradient - FactorGraph jfg; - jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional()))); - VectorValues expectedGradient(*allocateVectorValues(isam)); - gradientAtZero(jfg, expectedGradient); + GaussianFactorGraph jfg; + jfg += clique->conditional(); + VectorValues expectedGradient = jfg.gradientAtZero(); // Compare with actual gradients - int variablePosition = 0; + DenseIndex variablePosition = 0; for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) { - const int dim = clique->conditional()->dim(jit); + const DenseIndex dim = clique->conditional()->getDim(jit); Vector actual = clique->gradientContribution().segment(variablePosition, dim); bool gradOk = assert_equal(expectedGradient[*jit], actual); EXPECT(gradOk); @@ -431,17 +272,30 @@ bool isam_check(const NonlinearFactorGraph& fullgraph, const Values& fullinit, c } // Check gradient - VectorValues expectedGradient(*allocateVectorValues(isam)); - gradientAtZero(FactorGraph(isam), expectedGradient); - VectorValues expectedGradient2(gradient(FactorGraph(isam), VectorValues::Zero(expectedGradient))); - VectorValues actualGradient(*allocateVectorValues(isam)); + VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero(); + VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient)); + VectorValues actualGradient; gradientAtZero(isam, actualGradient); bool expectedGradOk = assert_equal(expectedGradient2, expectedGradient); EXPECT(expectedGradOk); bool totalGradOk = assert_equal(expectedGradient, actualGradient); EXPECT(totalGradOk); - return nodeGradientsOk && expectedGradOk && totalGradOk && isamEqual; + return nodeGradientsOk && expectedGradOk && totalGradOk && isamEqual && isamTreeEqual && consistent; +} + +/* ************************************************************************* */ +TEST(ISAM2, simple) +{ + for(size_t i = 0; i < 10; ++i) { + // These variables will be reused and accumulate factors and values + Values fullinit; + NonlinearFactorGraph fullgraph; + ISAM2 isam = createSlamlikeISAM2(fullinit, fullgraph, ISAM2Params(ISAM2GaussNewtonParams(0.001), 0.0, 0, false), i); + + // Compare solutions + EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_)); + } } /* ************************************************************************* */ @@ -505,8 +359,8 @@ TEST(ISAM2, clone) { // Modify original isam NonlinearFactorGraph factors; - factors.add(BetweenFactor(0, 10, - isam.calculateEstimate(0).between(isam.calculateEstimate(10)), noiseModel::Unit::Create(3))); + factors += BetweenFactor(0, 10, + isam.calculateEstimate(0).between(isam.calculateEstimate(10)), noiseModel::Unit::Create(3)); isam.update(factors); CHECK(assert_equal(createSlamlikeISAM2(), clone2)); @@ -526,66 +380,6 @@ TEST(ISAM2, clone) { CHECK(assert_equal(ISAM2(), clone1)); } -/* ************************************************************************* */ -TEST(ISAM2, permute_cached) { - typedef boost::shared_ptr sharedISAM2Clique; - - // Construct expected permuted BayesTree (variable 2 has been changed to 1) - BayesTree expected; - expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( - boost::make_shared(pair_list_of - (3, Matrix_(1,1,1.0)) - (4, Matrix_(1,1,2.0)), - 2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4) - HessianFactor::shared_ptr())))); // Cached: empty - expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( - boost::make_shared(pair_list_of - (2, Matrix_(1,1,1.0)) - (3, Matrix_(1,1,2.0)), - 1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3) - boost::make_shared(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3) - expected.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( - boost::make_shared(pair_list_of - (0, Matrix_(1,1,1.0)) - (2, Matrix_(1,1,2.0)), - 1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2) - boost::make_shared(1, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(1) - // Change variable 2 to 1 - expected.root()->children().front()->conditional()->keys()[0] = 1; - expected.root()->children().front()->children().front()->conditional()->keys()[1] = 1; - - // Construct unpermuted BayesTree - BayesTree actual; - actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( - boost::make_shared(pair_list_of - (3, Matrix_(1,1,1.0)) - (4, Matrix_(1,1,2.0)), - 2, Vector_(1,1.0), Vector_(1,1.0)), // p(3,4) - HessianFactor::shared_ptr())))); // Cached: empty - actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( - boost::make_shared(pair_list_of - (2, Matrix_(1,1,1.0)) - (3, Matrix_(1,1,2.0)), - 1, Vector_(1,1.0), Vector_(1,1.0)), // p(2|3) - boost::make_shared(3, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(3) - actual.insert(sharedISAM2Clique(new ISAM2Clique(make_pair( - boost::make_shared(pair_list_of - (0, Matrix_(1,1,1.0)) - (2, Matrix_(1,1,2.0)), - 1, Vector_(1,1.0), Vector_(1,1.0)), // p(0|2) - boost::make_shared(2, Matrix_(1,1,1.0), Vector_(1,1.0), 0.0))))); // Cached: p(2) - - // Create permutation that changes variable 2 -> 0 - Permutation permutation = Permutation::Identity(5); - permutation[2] = 1; - - // Permute BayesTree - actual.root()->permuteWithInverse(permutation); - - // Check - EXPECT(assert_equal(expected, actual)); -} - /* ************************************************************************* */ TEST(ISAM2, removeFactors) { @@ -610,7 +404,7 @@ TEST(ISAM2, removeFactors) } /* ************************************************************************* */ -TEST_UNSAFE(ISAM2, removeVariables) +TEST(ISAM2, removeVariables) { // These variables will be reused and accumulate factors and values Values fullinit; @@ -633,7 +427,7 @@ TEST_UNSAFE(ISAM2, removeVariables) } /* ************************************************************************* */ -TEST_UNSAFE(ISAM2, swapFactors) +TEST(ISAM2, swapFactors) { // This test builds a graph in the same way as the "slamlike" test above, but // then swaps the 2nd-to-last landmark measurement with a different one @@ -650,8 +444,8 @@ TEST_UNSAFE(ISAM2, swapFactors) fullgraph.remove(swap_idx); NonlinearFactorGraph swapfactors; -// swapfactors.add(BearingRange(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise); // original factor - swapfactors.add(BearingRangeFactor(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 5.0, brNoise)); +// swapfactors += BearingRange(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise; // original factor + swapfactors += BearingRangeFactor(10, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 5.0, brNoise); fullgraph.push_back(swapfactors); isam.update(swapfactors, Values(), toRemove); } @@ -662,28 +456,26 @@ TEST_UNSAFE(ISAM2, swapFactors) // Check gradient at each node typedef ISAM2::sharedClique sharedClique; - BOOST_FOREACH(const sharedClique& clique, isam.nodes()) { + BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) { // Compute expected gradient - FactorGraph jfg; - jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional()))); - VectorValues expectedGradient(*allocateVectorValues(isam)); - gradientAtZero(jfg, expectedGradient); + GaussianFactorGraph jfg; + jfg += clique->conditional(); + VectorValues expectedGradient = jfg.gradientAtZero(); // Compare with actual gradients - int variablePosition = 0; + DenseIndex variablePosition = 0; for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) { - const int dim = clique->conditional()->dim(jit); + const DenseIndex dim = clique->conditional()->getDim(jit); Vector actual = clique->gradientContribution().segment(variablePosition, dim); EXPECT(assert_equal(expectedGradient[*jit], actual)); variablePosition += dim; } - EXPECT_LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition); + EXPECT_LONGS_EQUAL((long)clique->gradientContribution().rows(), (long)variablePosition); } // Check gradient - VectorValues expectedGradient(*allocateVectorValues(isam)); - gradientAtZero(FactorGraph(isam), expectedGradient); - VectorValues expectedGradient2(gradient(FactorGraph(isam), VectorValues::Zero(expectedGradient))); - VectorValues actualGradient(*allocateVectorValues(isam)); + VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero(); + VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient)); + VectorValues actualGradient; gradientAtZero(isam, actualGradient); EXPECT(assert_equal(expectedGradient2, expectedGradient)); EXPECT(assert_equal(expectedGradient, actualGradient)); @@ -708,7 +500,7 @@ TEST(ISAM2, constrained_ordering) // Add a prior at time 0 and update isam { NonlinearFactorGraph newfactors; - newfactors.add(PriorFactor(0, Pose2(0.0, 0.0, 0.0), odoNoise)); + newfactors += PriorFactor(0, Pose2(0.0, 0.0, 0.0), odoNoise); fullgraph.push_back(newfactors); Values init; @@ -723,7 +515,7 @@ TEST(ISAM2, constrained_ordering) // Add odometry from time 0 to time 5 for( ; i<5; ++i) { NonlinearFactorGraph newfactors; - newfactors.add(BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise)); + newfactors += BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); fullgraph.push_back(newfactors); Values init; @@ -739,9 +531,9 @@ TEST(ISAM2, constrained_ordering) // Add odometry from time 5 to 6 and landmark measurement at time 5 { NonlinearFactorGraph newfactors; - newfactors.add(BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise)); - newfactors.add(BearingRangeFactor(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise)); - newfactors.add(BearingRangeFactor(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise)); + newfactors += BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); + newfactors += BearingRangeFactor(i, 100, Rot2::fromAngle(M_PI/4.0), 5.0, brNoise); + newfactors += BearingRangeFactor(i, 101, Rot2::fromAngle(-M_PI/4.0), 5.0, brNoise); fullgraph.push_back(newfactors); Values init; @@ -759,7 +551,7 @@ TEST(ISAM2, constrained_ordering) // Add odometry from time 6 to time 10 for( ; i<10; ++i) { NonlinearFactorGraph newfactors; - newfactors.add(BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise)); + newfactors += BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); fullgraph.push_back(newfactors); Values init; @@ -772,9 +564,9 @@ TEST(ISAM2, constrained_ordering) // Add odometry from time 10 to 11 and landmark measurement at time 10 { NonlinearFactorGraph newfactors; - newfactors.add(BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise)); - newfactors.add(BearingRangeFactor(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise)); - newfactors.add(BearingRangeFactor(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise)); + newfactors += BetweenFactor(i, i+1, Pose2(1.0, 0.0, 0.0), odoNoise); + newfactors += BearingRangeFactor(i, 100, Rot2::fromAngle(M_PI/4.0 + M_PI/16.0), 4.5, brNoise); + newfactors += BearingRangeFactor(i, 101, Rot2::fromAngle(-M_PI/4.0 + M_PI/16.0), 4.5, brNoise); fullgraph.push_back(newfactors); Values init; @@ -788,33 +580,28 @@ TEST(ISAM2, constrained_ordering) // Compare solutions EXPECT(isam_check(fullgraph, fullinit, isam, *this, result_)); - // Check that x3 and x4 are last, but either can come before the other - EXPECT(isam.getOrdering()[(3)] == 12 && isam.getOrdering()[(4)] == 13); - // Check gradient at each node typedef ISAM2::sharedClique sharedClique; - BOOST_FOREACH(const sharedClique& clique, isam.nodes()) { + BOOST_FOREACH(const sharedClique& clique, isam.nodes() | br::map_values) { // Compute expected gradient - FactorGraph jfg; - jfg.push_back(JacobianFactor::shared_ptr(new JacobianFactor(*clique->conditional()))); - VectorValues expectedGradient(*allocateVectorValues(isam)); - gradientAtZero(jfg, expectedGradient); + GaussianFactorGraph jfg; + jfg += clique->conditional(); + VectorValues expectedGradient = jfg.gradientAtZero(); // Compare with actual gradients - int variablePosition = 0; + DenseIndex variablePosition = 0; for(GaussianConditional::const_iterator jit = clique->conditional()->begin(); jit != clique->conditional()->end(); ++jit) { - const int dim = clique->conditional()->dim(jit); + const DenseIndex dim = clique->conditional()->getDim(jit); Vector actual = clique->gradientContribution().segment(variablePosition, dim); EXPECT(assert_equal(expectedGradient[*jit], actual)); variablePosition += dim; } - LONGS_EQUAL(clique->gradientContribution().rows(), variablePosition); + LONGS_EQUAL((long)clique->gradientContribution().rows(), (long)variablePosition); } // Check gradient - VectorValues expectedGradient(*allocateVectorValues(isam)); - gradientAtZero(FactorGraph(isam), expectedGradient); - VectorValues expectedGradient2(gradient(FactorGraph(isam), VectorValues::Zero(expectedGradient))); - VectorValues actualGradient(*allocateVectorValues(isam)); + VectorValues expectedGradient = GaussianFactorGraph(isam).gradientAtZero(); + VectorValues expectedGradient2 = GaussianFactorGraph(isam).gradient(VectorValues::Zero(expectedGradient)); + VectorValues actualGradient; gradientAtZero(isam, actualGradient); EXPECT(assert_equal(expectedGradient2, expectedGradient)); EXPECT(assert_equal(expectedGradient, actualGradient)); @@ -838,27 +625,20 @@ namespace { bool checkMarginalizeLeaves(ISAM2& isam, const FastList& leafKeys) { Matrix expectedAugmentedHessian, expected3AugmentedHessian; vector toKeep; - const Index lastVar = isam.getOrdering().size() - 1; - for(Index i=0; i<=lastVar; ++i) - if(find(leafKeys.begin(), leafKeys.end(), isam.getOrdering().key(i)) == leafKeys.end()) - toKeep.push_back(i); + BOOST_FOREACH(Key j, isam.getDelta() | br::map_keys) + if(find(leafKeys.begin(), leafKeys.end(), j) == leafKeys.end()) + toKeep.push_back(j); // Calculate expected marginal from iSAM2 tree - GaussianFactorGraph isamAsGraph(isam); - GaussianSequentialSolver solver(isamAsGraph); - GaussianFactorGraph marginalgfg = *solver.jointFactorGraph(toKeep); - expectedAugmentedHessian = marginalgfg.augmentedHessian(); + expectedAugmentedHessian = GaussianFactorGraph(isam).marginal(toKeep, EliminateQR)->augmentedHessian(); - //// Calculate expected marginal from cached linear factors + // Calculate expected marginal from cached linear factors //assert(isam.params().cacheLinearizedFactors); - //GaussianSequentialSolver solver2(isam.linearFactors_, isam.params().factorization == ISAM2Params::QR); - //expected2AugmentedHessian = solver2.jointFactorGraph(toKeep)->augmentedHessian(); + //Matrix expected2AugmentedHessian = isam.linearFactors_.marginal(toKeep, EliminateQR)->augmentedHessian(); // Calculate expected marginal from original nonlinear factors - GaussianSequentialSolver solver3( - *isam.getFactorsUnsafe().linearize(isam.getLinearizationPoint(), isam.getOrdering()), - isam.params().factorization == ISAM2Params::QR); - expected3AugmentedHessian = solver3.jointFactorGraph(toKeep)->augmentedHessian(); + expected3AugmentedHessian = isam.getFactorsUnsafe().linearize(isam.getLinearizationPoint()) + ->marginal(toKeep, EliminateQR)->augmentedHessian(); // Do marginalization isam.marginalizeLeaves(leafKeys); @@ -868,19 +648,19 @@ namespace { Matrix actualAugmentedHessian = actualMarginalGraph.augmentedHessian(); //Matrix actual2AugmentedHessian = linearFactors_.augmentedHessian(); Matrix actual3AugmentedHessian = isam.getFactorsUnsafe().linearize( - isam.getLinearizationPoint(), isam.getOrdering())->augmentedHessian(); - assert(actualAugmentedHessian.unaryExpr(std::ptr_fun(&std::isfinite)).all()); + isam.getLinearizationPoint())->augmentedHessian(); + assert(actualAugmentedHessian.allFinite()); // Check full marginalization //cout << "treeEqual" << endl; bool treeEqual = assert_equal(expectedAugmentedHessian, actualAugmentedHessian, 1e-6); - //bool linEqual = assert_equal(expected2AugmentedHessian, actualAugmentedHessian, 1e-6); + //actualAugmentedHessian.bottomRightCorner(1,1) = expected2AugmentedHessian.bottomRightCorner(1,1); bool linEqual = assert_equal(expected2AugmentedHessian, actualAugmentedHessian, 1e-6); //cout << "nonlinEqual" << endl; - bool nonlinEqual = assert_equal(expected3AugmentedHessian, actualAugmentedHessian, 1e-6); + actualAugmentedHessian.bottomRightCorner(1,1) = expected3AugmentedHessian.bottomRightCorner(1,1); bool nonlinEqual = assert_equal(expected3AugmentedHessian, actualAugmentedHessian, 1e-6); //bool linCorrect = assert_equal(expected3AugmentedHessian, expected2AugmentedHessian, 1e-6); //actual2AugmentedHessian.bottomRightCorner(1,1) = expected3AugmentedHessian.bottomRightCorner(1,1); bool afterLinCorrect = assert_equal(expected3AugmentedHessian, actual2AugmentedHessian, 1e-6); //cout << "nonlinCorrect" << endl; - actual3AugmentedHessian.bottomRightCorner(1,1) = expected3AugmentedHessian.bottomRightCorner(1,1); bool afterNonlinCorrect = assert_equal(expected3AugmentedHessian, actual3AugmentedHessian, 1e-6); + bool afterNonlinCorrect = assert_equal(expected3AugmentedHessian, actual3AugmentedHessian, 1e-6); bool ok = treeEqual && /*linEqual &&*/ nonlinEqual && /*linCorrect &&*/ /*afterLinCorrect &&*/ afterNonlinCorrect; return ok; @@ -888,16 +668,16 @@ namespace { } /* ************************************************************************* */ -TEST_UNSAFE(ISAM2, marginalizeLeaves1) +TEST(ISAM2, marginalizeLeaves1) { ISAM2 isam; NonlinearFactorGraph factors; - factors.add(PriorFactor(0, LieVector(0.0), noiseModel::Unit::Create(1))); + factors += PriorFactor(0, LieVector(0.0), noiseModel::Unit::Create(1)); - factors.add(BetweenFactor(0, 1, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(1, 2, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(0, 2, LieVector(0.0), noiseModel::Unit::Create(1))); + factors += BetweenFactor(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)); Values values; values.insert(0, LieVector(0.0)); @@ -911,23 +691,22 @@ TEST_UNSAFE(ISAM2, marginalizeLeaves1) isam.update(factors, values, FastVector(), constrainedKeys); - FastList leafKeys; - leafKeys.push_back(isam.getOrdering().key(0)); + FastList leafKeys = list_of(0); EXPECT(checkMarginalizeLeaves(isam, leafKeys)); } /* ************************************************************************* */ -TEST_UNSAFE(ISAM2, marginalizeLeaves2) +TEST(ISAM2, marginalizeLeaves2) { ISAM2 isam; NonlinearFactorGraph factors; - factors.add(PriorFactor(0, LieVector(0.0), noiseModel::Unit::Create(1))); + factors += PriorFactor(0, LieVector(0.0), noiseModel::Unit::Create(1)); - factors.add(BetweenFactor(0, 1, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(1, 2, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(0, 2, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(2, 3, LieVector(0.0), noiseModel::Unit::Create(1))); + factors += BetweenFactor(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(2, 3, LieVector(0.0), noiseModel::Unit::Create(1)); Values values; values.insert(0, LieVector(0.0)); @@ -943,28 +722,27 @@ TEST_UNSAFE(ISAM2, marginalizeLeaves2) isam.update(factors, values, FastVector(), constrainedKeys); - FastList leafKeys; - leafKeys.push_back(isam.getOrdering().key(0)); + FastList leafKeys = list_of(0); EXPECT(checkMarginalizeLeaves(isam, leafKeys)); } /* ************************************************************************* */ -TEST_UNSAFE(ISAM2, marginalizeLeaves3) +TEST(ISAM2, marginalizeLeaves3) { ISAM2 isam; NonlinearFactorGraph factors; - factors.add(PriorFactor(0, LieVector(0.0), noiseModel::Unit::Create(1))); + factors += PriorFactor(0, LieVector(0.0), noiseModel::Unit::Create(1)); - factors.add(BetweenFactor(0, 1, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(1, 2, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(0, 2, LieVector(0.0), noiseModel::Unit::Create(1))); + factors += BetweenFactor(0, 1, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)); - factors.add(BetweenFactor(2, 3, LieVector(0.0), noiseModel::Unit::Create(1))); + factors += BetweenFactor(2, 3, LieVector(0.0), noiseModel::Unit::Create(1)); - factors.add(BetweenFactor(3, 4, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(4, 5, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(3, 5, LieVector(0.0), noiseModel::Unit::Create(1))); + factors += BetweenFactor(3, 4, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(4, 5, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(3, 5, LieVector(0.0), noiseModel::Unit::Create(1)); Values values; values.insert(0, LieVector(0.0)); @@ -984,20 +762,19 @@ TEST_UNSAFE(ISAM2, marginalizeLeaves3) isam.update(factors, values, FastVector(), constrainedKeys); - FastList leafKeys; - leafKeys.push_back(isam.getOrdering().key(0)); + FastList leafKeys = list_of(0); EXPECT(checkMarginalizeLeaves(isam, leafKeys)); } /* ************************************************************************* */ -TEST_UNSAFE(ISAM2, marginalizeLeaves4) +TEST(ISAM2, marginalizeLeaves4) { ISAM2 isam; NonlinearFactorGraph factors; - factors.add(PriorFactor(0, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(0, 2, LieVector(0.0), noiseModel::Unit::Create(1))); - factors.add(BetweenFactor(1, 2, LieVector(0.0), noiseModel::Unit::Create(1))); + factors += PriorFactor(0, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(0, 2, LieVector(0.0), noiseModel::Unit::Create(1)); + factors += BetweenFactor(1, 2, LieVector(0.0), noiseModel::Unit::Create(1)); Values values; values.insert(0, LieVector(0.0)); @@ -1011,20 +788,18 @@ TEST_UNSAFE(ISAM2, marginalizeLeaves4) isam.update(factors, values, FastVector(), constrainedKeys); - FastList leafKeys; - leafKeys.push_back(isam.getOrdering().key(1)); + FastList leafKeys = list_of(1); EXPECT(checkMarginalizeLeaves(isam, leafKeys)); } /* ************************************************************************* */ -TEST_UNSAFE(ISAM2, marginalizeLeaves5) +TEST(ISAM2, marginalizeLeaves5) { // Create isam2 ISAM2 isam = createSlamlikeISAM2(); // Marginalize - FastList marginalizeKeys; - marginalizeKeys.push_back(isam.getOrdering().key(0)); + FastList marginalizeKeys = list_of(0); EXPECT(checkMarginalizeLeaves(isam, marginalizeKeys)); } diff --git a/tests/testGaussianJunctionTreeB.cpp b/tests/testGaussianJunctionTreeB.cpp index b24b2b901..7af67e039 100644 --- a/tests/testGaussianJunctionTreeB.cpp +++ b/tests/testGaussianJunctionTreeB.cpp @@ -15,6 +15,10 @@ * @author nikai */ +#include + +#if 0 + #include #include #include @@ -24,16 +28,12 @@ #include #include #include -#include -#include #include #include #include #include #include -#include - #include #include // for operator += #include // for operator += @@ -232,6 +232,9 @@ TEST(GaussianJunctionTreeB, simpleMarginal) { EXPECT(assert_equal(expected, actual3)); } +#endif + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr);} /* ************************************************************************* */ + diff --git a/tests/testGradientDescentOptimizer.cpp b/tests/testGradientDescentOptimizer.cpp index 4f556eb0f..cfe81cd2c 100644 --- a/tests/testGradientDescentOptimizer.cpp +++ b/tests/testGradientDescentOptimizer.cpp @@ -31,20 +31,20 @@ boost::tuple generateProblem() { // 2a. Add Gaussian prior Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin SharedDiagonal priorNoise = noiseModel::Diagonal::Sigmas(Vector_(3, 0.3, 0.3, 0.1)); - graph.add(PriorFactor(1, priorMean, priorNoise)); + graph += PriorFactor(1, priorMean, priorNoise); // 2b. Add odometry factors SharedDiagonal odometryNoise = noiseModel::Diagonal::Sigmas(Vector_(3, 0.2, 0.2, 0.1)); - graph.add(BetweenFactor(1, 2, Pose2(2.0, 0.0, 0.0 ), odometryNoise)); - graph.add(BetweenFactor(2, 3, Pose2(2.0, 0.0, M_PI_2), odometryNoise)); - graph.add(BetweenFactor(3, 4, Pose2(2.0, 0.0, M_PI_2), odometryNoise)); - graph.add(BetweenFactor(4, 5, Pose2(2.0, 0.0, M_PI_2), odometryNoise)); + graph += BetweenFactor(1, 2, Pose2(2.0, 0.0, 0.0 ), odometryNoise); + graph += BetweenFactor(2, 3, Pose2(2.0, 0.0, M_PI_2), odometryNoise); + graph += BetweenFactor(3, 4, Pose2(2.0, 0.0, M_PI_2), odometryNoise); + graph += BetweenFactor(4, 5, Pose2(2.0, 0.0, M_PI_2), odometryNoise); // 2c. Add pose constraint SharedDiagonal constraintUncertainty = noiseModel::Diagonal::Sigmas(Vector_(3, 0.2, 0.2, 0.1)); - graph.add(BetweenFactor(5, 2, Pose2(2.0, 0.0, M_PI_2), constraintUncertainty)); + graph += BetweenFactor(5, 2, Pose2(2.0, 0.0, M_PI_2), constraintUncertainty); - // 3. Create the data structure to hold the initialEstimate estinmate to the solution + // 3. Create the data structure to hold the initialEstimate estimate to the solution Values initialEstimate; Pose2 x1(0.5, 0.0, 0.2 ); initialEstimate.insert(1, x1); Pose2 x2(2.3, 0.1,-0.2 ); initialEstimate.insert(2, x2); diff --git a/tests/testGraph.cpp b/tests/testGraph.cpp index dc00b3f4a..842f2bd67 100644 --- a/tests/testGraph.cpp +++ b/tests/testGraph.cpp @@ -48,7 +48,7 @@ TEST ( Ordering, predecessorMap2Keys ) { expected += 4,5,3,2,1; list actual = predecessorMap2Keys(p_map); - LONGS_EQUAL(expected.size(), actual.size()); + LONGS_EQUAL((long)expected.size(), (long)actual.size()); list::const_iterator it1 = expected.begin(); list::const_iterator it2 = actual.begin(); @@ -70,7 +70,7 @@ TEST( Graph, predecessorMap2Graph ) p_map.insert(3, 2); boost::tie(graph, root, key2vertex) = predecessorMap2Graph, SVertex, Key>(p_map); - LONGS_EQUAL(3, boost::num_vertices(graph)); + LONGS_EQUAL(3, (long)boost::num_vertices(graph)); CHECK(root == key2vertex[2]); } @@ -81,9 +81,9 @@ TEST( Graph, composePoses ) SharedNoiseModel cov = noiseModel::Unit::Create(3); Pose2 p1(1.0, 2.0, 0.3), p2(4.0, 5.0, 0.6), p3(7.0, 8.0, 0.9), p4(2.0, 2.0, 2.9); Pose2 p12=p1.between(p2), p23=p2.between(p3), p43=p4.between(p3); - graph.add(BetweenFactor(1,2, p12, cov)); - graph.add(BetweenFactor(2,3, p23, cov)); - graph.add(BetweenFactor(4,3, p43, cov)); + graph += BetweenFactor(1,2, p12, cov); + graph += BetweenFactor(2,3, p23, cov); + graph += BetweenFactor(4,3, p43, cov); PredecessorMap tree; tree.insert(1,2); @@ -101,7 +101,7 @@ TEST( Graph, composePoses ) expected.insert(3, p3); expected.insert(4, p4); - LONGS_EQUAL(4, actual->size()); + LONGS_EQUAL(4, (long)actual->size()); CHECK(assert_equal(expected, *actual)); } @@ -110,12 +110,12 @@ TEST( Graph, composePoses ) // GaussianFactorGraph g; // Matrix I = eye(2); // Vector b = Vector_(0, 0, 0); -// g.add(X(1), I, X(2), I, b, model); -// g.add(X(1), I, X(3), I, b, model); -// g.add(X(1), I, X(4), I, b, model); -// g.add(X(2), I, X(3), I, b, model); -// g.add(X(2), I, X(4), I, b, model); -// g.add(X(3), I, X(4), I, b, model); +// g += X(1), I, X(2), I, b, model; +// g += X(1), I, X(3), I, b, model; +// g += X(1), I, X(4), I, b, model; +// g += X(2), I, X(3), I, b, model; +// g += X(2), I, X(4), I, b, model; +// g += X(3), I, X(4), I, b, model; // // map tree = g.findMinimumSpanningTree(); // EXPECT(tree[X(1)].compare(X(1))==0); @@ -130,11 +130,11 @@ TEST( Graph, composePoses ) // GaussianFactorGraph g; // Matrix I = eye(2); // Vector b = Vector_(0, 0, 0); -// g.add(X(1), I, X(2), I, b, model); -// g.add(X(1), I, X(3), I, b, model); -// g.add(X(1), I, X(4), I, b, model); -// g.add(X(2), I, X(3), I, b, model); -// g.add(X(2), I, X(4), I, b, model); +// g += X(1), I, X(2), I, b, model; +// g += X(1), I, X(3), I, b, model; +// g += X(1), I, X(4), I, b, model; +// g += X(2), I, X(3), I, b, model; +// g += X(2), I, X(4), I, b, model; // // PredecessorMap tree; // tree[X(1)] = X(1); diff --git a/tests/testInferenceB.cpp b/tests/testInferenceB.cpp deleted file mode 100644 index 0df25d082..000000000 --- a/tests/testInferenceB.cpp +++ /dev/null @@ -1,89 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testInferenceB.cpp - * @brief Unit tests for functionality declared in inference.h - * @author Frank Dellaert - */ - -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -using namespace std; -using namespace gtsam; - -// Convenience for named keys -using symbol_shorthand::X; -using symbol_shorthand::L; - -/* ************************************************************************* */ -// The tests below test the *generic* inference algorithms. Some of these have -// specialized versions in the derived classes GaussianFactorGraph etc... -/* ************************************************************************* */ - -/* ************************************************************************* */ -TEST( inference, marginals ) -{ - using namespace example; - // create and marginalize a small Bayes net on "x" - GaussianBayesNet cbn = createSmallGaussianBayesNet(); - vector xvar; xvar.push_back(0); - GaussianBayesNet actual = *GaussianSequentialSolver( - *GaussianSequentialSolver(GaussianFactorGraph(cbn)).jointFactorGraph(xvar)).eliminate(); - - // expected is just scalar Gaussian on x - GaussianBayesNet expected = scalarGaussian(0, 4, sqrt(2.0)); - CHECK(assert_equal(expected,actual)); -} - -/* ************************************************************************* */ -TEST( inference, marginals2) -{ - NonlinearFactorGraph fg; - SharedDiagonal poseModel(noiseModel::Isotropic::Sigma(3, 0.1)); - SharedDiagonal pointModel(noiseModel::Isotropic::Sigma(2, 0.1)); - - fg.add(PriorFactor(X(0), Pose2(), poseModel)); - fg.add(BetweenFactor(X(0), X(1), Pose2(1.0,0.0,0.0), poseModel)); - fg.add(BetweenFactor(X(1), X(2), Pose2(1.0,0.0,0.0), poseModel)); - fg.add(BearingRangeFactor(X(0), L(0), Rot2(), 1.0, pointModel)); - fg.add(BearingRangeFactor(X(1), L(0), Rot2(), 1.0, pointModel)); - fg.add(BearingRangeFactor(X(2), L(0), Rot2(), 1.0, pointModel)); - - Values init; - init.insert(X(0), Pose2(0.0,0.0,0.0)); - init.insert(X(1), Pose2(1.0,0.0,0.0)); - init.insert(X(2), Pose2(2.0,0.0,0.0)); - init.insert(L(0), Point2(1.0,1.0)); - - Ordering ordering(*fg.orderingCOLAMD(init)); - FactorGraph::shared_ptr gfg(fg.linearize(init, ordering)); - GaussianMultifrontalSolver solver(*gfg); - solver.marginalFactor(ordering[L(0)]); -} - -/* ************************************************************************* */ -int main() { TestResult tr; return TestRegistry::runAllTests(tr);} -/* ************************************************************************* */ diff --git a/tests/testIterative.cpp b/tests/testIterative.cpp index 1484052e5..bb4193955 100644 --- a/tests/testIterative.cpp +++ b/tests/testIterative.cpp @@ -19,9 +19,7 @@ #include #include #include -#include #include -#include #include #include @@ -41,12 +39,10 @@ static ConjugateGradientParameters parameters; TEST( Iterative, steepestDescent ) { // Create factor graph - Ordering ordering; - ordering += L(1), X(1), X(2); - GaussianFactorGraph fg = createGaussianFactorGraph(ordering); + GaussianFactorGraph fg = createGaussianFactorGraph(); // eliminate and solve - VectorValues expected = *GaussianSequentialSolver(fg).optimize(); + VectorValues expected = fg.optimize(); // Do gradient descent VectorValues zero = VectorValues::Zero(expected); // TODO, how do we do this normally? @@ -58,12 +54,10 @@ TEST( Iterative, steepestDescent ) TEST( Iterative, conjugateGradientDescent ) { // Create factor graph - Ordering ordering; - ordering += L(1), X(1), X(2); - GaussianFactorGraph fg = createGaussianFactorGraph(ordering); + GaussianFactorGraph fg = createGaussianFactorGraph(); // eliminate and solve - VectorValues expected = *GaussianSequentialSolver(fg).optimize(); + VectorValues expected = fg.optimize(); // get matrices Matrix A; @@ -96,14 +90,12 @@ TEST( Iterative, conjugateGradientDescent_hard_constraint ) config.insert(X(2), Pose2(1.5,0.,0.)); NonlinearFactorGraph graph; - graph.add(NonlinearEquality(X(1), pose1)); - graph.add(BetweenFactor(X(1),X(2), Pose2(1.,0.,0.), noiseModel::Isotropic::Sigma(3, 1))); + graph += NonlinearEquality(X(1), pose1); + graph += BetweenFactor(X(1),X(2), Pose2(1.,0.,0.), noiseModel::Isotropic::Sigma(3, 1)); - Ordering ordering; - ordering += X(1), X(2); - boost::shared_ptr fg = graph.linearize(config,ordering); + boost::shared_ptr fg = graph.linearize(config); - VectorValues zeros = VectorValues::Zero(2, 3); + VectorValues zeros = config.zeroVectors(); ConjugateGradientParameters parameters; parameters.setEpsilon_abs(1e-3); @@ -112,8 +104,8 @@ TEST( Iterative, conjugateGradientDescent_hard_constraint ) VectorValues actual = conjugateGradientDescent(*fg, zeros, parameters); VectorValues expected; - expected.insert(0, zero(3)); - expected.insert(1, Vector_(3,-0.5,0.,0.)); + expected.insert(X(1), zero(3)); + expected.insert(X(2), Vector_(3,-0.5,0.,0.)); CHECK(assert_equal(expected, actual)); } @@ -125,14 +117,12 @@ TEST( Iterative, conjugateGradientDescent_soft_constraint ) config.insert(X(2), Pose2(1.5,0.,0.)); NonlinearFactorGraph graph; - graph.add(PriorFactor(X(1), Pose2(0.,0.,0.), noiseModel::Isotropic::Sigma(3, 1e-10))); - graph.add(BetweenFactor(X(1),X(2), Pose2(1.,0.,0.), noiseModel::Isotropic::Sigma(3, 1))); + graph += PriorFactor(X(1), Pose2(0.,0.,0.), noiseModel::Isotropic::Sigma(3, 1e-10)); + graph += BetweenFactor(X(1),X(2), Pose2(1.,0.,0.), noiseModel::Isotropic::Sigma(3, 1)); - Ordering ordering; - ordering += X(1), X(2); - boost::shared_ptr fg = graph.linearize(config,ordering); + boost::shared_ptr fg = graph.linearize(config); - VectorValues zeros = VectorValues::Zero(2, 3); + VectorValues zeros = config.zeroVectors(); ConjugateGradientParameters parameters; parameters.setEpsilon_abs(1e-3); @@ -141,8 +131,8 @@ TEST( Iterative, conjugateGradientDescent_soft_constraint ) VectorValues actual = conjugateGradientDescent(*fg, zeros, parameters); VectorValues expected; - expected.insert(0, zero(3)); - expected.insert(1, Vector_(3,-0.5,0.,0.)); + expected.insert(X(1), zero(3)); + expected.insert(X(2), Vector_(3,-0.5,0.,0.)); CHECK(assert_equal(expected, actual)); } diff --git a/tests/testMarginals.cpp b/tests/testMarginals.cpp index 93f1431a1..9e0e76ec7 100644 --- a/tests/testMarginals.cpp +++ b/tests/testMarginals.cpp @@ -53,15 +53,15 @@ TEST(Marginals, planarSLAMmarginals) { // gaussian for prior SharedDiagonal priorNoise = noiseModel::Diagonal::Sigmas(Vector_(3, 0.3, 0.3, 0.1)); Pose2 priorMean(0.0, 0.0, 0.0); // prior at origin - graph.add(PriorFactor(x1, priorMean, priorNoise)); // add the factor to the graph + graph += PriorFactor(x1, priorMean, priorNoise); // add the factor to the graph /* add odometry */ // general noisemodel for odometry SharedDiagonal odometryNoise = noiseModel::Diagonal::Sigmas(Vector_(3, 0.2, 0.2, 0.1)); Pose2 odometry(2.0, 0.0, 0.0); // create a measurement for both factors (the same in this case) // create between factors to represent odometry - graph.add(BetweenFactor(x1, x2, odometry, odometryNoise)); - graph.add(BetweenFactor(x2, x3, odometry, odometryNoise)); + graph += BetweenFactor(x1, x2, odometry, odometryNoise); + graph += BetweenFactor(x2, x3, odometry, odometryNoise); /* add measurements */ // general noisemodel for measurements @@ -76,9 +76,9 @@ TEST(Marginals, planarSLAMmarginals) { range32 = 2.0; // create bearing/range factors - graph.add(BearingRangeFactor(x1, l1, bearing11, range11, measurementNoise)); - graph.add(BearingRangeFactor(x2, l1, bearing21, range21, measurementNoise)); - graph.add(BearingRangeFactor(x3, l2, bearing32, range32, measurementNoise)); + graph += BearingRangeFactor(x1, l1, bearing11, range11, measurementNoise); + graph += BearingRangeFactor(x2, l1, bearing21, range21, measurementNoise); + graph += BearingRangeFactor(x3, l2, bearing32, range32, measurementNoise); // linearization point for marginals Values soln; @@ -183,10 +183,10 @@ TEST(Marginals, planarSLAMmarginals) { /* ************************************************************************* */ TEST(Marginals, order) { NonlinearFactorGraph fg; - fg.add(PriorFactor(0, Pose2(), noiseModel::Unit::Create(3))); - fg.add(BetweenFactor(0, 1, Pose2(1,0,0), noiseModel::Unit::Create(3))); - fg.add(BetweenFactor(1, 2, Pose2(1,0,0), noiseModel::Unit::Create(3))); - fg.add(BetweenFactor(2, 3, Pose2(1,0,0), noiseModel::Unit::Create(3))); + fg += PriorFactor(0, Pose2(), noiseModel::Unit::Create(3)); + fg += BetweenFactor(0, 1, Pose2(1,0,0), noiseModel::Unit::Create(3)); + fg += BetweenFactor(1, 2, Pose2(1,0,0), noiseModel::Unit::Create(3)); + fg += BetweenFactor(2, 3, Pose2(1,0,0), noiseModel::Unit::Create(3)); Values vals; vals.insert(0, Pose2()); @@ -197,44 +197,44 @@ TEST(Marginals, order) { vals.insert(100, Point2(0,1)); vals.insert(101, Point2(1,1)); - fg.add(BearingRangeFactor(0, 100, + fg += BearingRangeFactor(0, 100, vals.at(0).bearing(vals.at(100)), - vals.at(0).range(vals.at(100)), noiseModel::Unit::Create(2))); - fg.add(BearingRangeFactor(0, 101, + vals.at(0).range(vals.at(100)), noiseModel::Unit::Create(2)); + fg += BearingRangeFactor(0, 101, vals.at(0).bearing(vals.at(101)), - vals.at(0).range(vals.at(101)), noiseModel::Unit::Create(2))); + vals.at(0).range(vals.at(101)), noiseModel::Unit::Create(2)); - fg.add(BearingRangeFactor(1, 100, + fg += BearingRangeFactor(1, 100, vals.at(1).bearing(vals.at(100)), - vals.at(1).range(vals.at(100)), noiseModel::Unit::Create(2))); - fg.add(BearingRangeFactor(1, 101, + vals.at(1).range(vals.at(100)), noiseModel::Unit::Create(2)); + fg += BearingRangeFactor(1, 101, vals.at(1).bearing(vals.at(101)), - vals.at(1).range(vals.at(101)), noiseModel::Unit::Create(2))); + vals.at(1).range(vals.at(101)), noiseModel::Unit::Create(2)); - fg.add(BearingRangeFactor(2, 100, + fg += BearingRangeFactor(2, 100, vals.at(2).bearing(vals.at(100)), - vals.at(2).range(vals.at(100)), noiseModel::Unit::Create(2))); - fg.add(BearingRangeFactor(2, 101, + vals.at(2).range(vals.at(100)), noiseModel::Unit::Create(2)); + fg += BearingRangeFactor(2, 101, vals.at(2).bearing(vals.at(101)), - vals.at(2).range(vals.at(101)), noiseModel::Unit::Create(2))); + vals.at(2).range(vals.at(101)), noiseModel::Unit::Create(2)); - fg.add(BearingRangeFactor(3, 100, + fg += BearingRangeFactor(3, 100, vals.at(3).bearing(vals.at(100)), - vals.at(3).range(vals.at(100)), noiseModel::Unit::Create(2))); - fg.add(BearingRangeFactor(3, 101, + vals.at(3).range(vals.at(100)), noiseModel::Unit::Create(2)); + fg += BearingRangeFactor(3, 101, vals.at(3).bearing(vals.at(101)), - vals.at(3).range(vals.at(101)), noiseModel::Unit::Create(2))); + vals.at(3).range(vals.at(101)), noiseModel::Unit::Create(2)); Marginals marginals(fg, vals); FastVector keys(fg.keys()); JointMarginal joint = marginals.jointMarginalCovariance(keys); - LONGS_EQUAL(3, joint(0,0).rows()); - LONGS_EQUAL(3, joint(1,1).rows()); - LONGS_EQUAL(3, joint(2,2).rows()); - LONGS_EQUAL(3, joint(3,3).rows()); - LONGS_EQUAL(2, joint(100,100).rows()); - LONGS_EQUAL(2, joint(101,101).rows()); + LONGS_EQUAL(3, (long)joint(0,0).rows()); + LONGS_EQUAL(3, (long)joint(1,1).rows()); + LONGS_EQUAL(3, (long)joint(2,2).rows()); + LONGS_EQUAL(3, (long)joint(3,3).rows()); + LONGS_EQUAL(2, (long)joint(100,100).rows()); + LONGS_EQUAL(2, (long)joint(101,101).rows()); } /* ************************************************************************* */ diff --git a/tests/testNonlinearEquality.cpp b/tests/testNonlinearEquality.cpp index f10ce1b35..e31261869 100644 --- a/tests/testNonlinearEquality.cpp +++ b/tests/testNonlinearEquality.cpp @@ -55,9 +55,9 @@ TEST ( NonlinearEquality, linearization ) { // check linearize SharedDiagonal constraintModel = noiseModel::Constrained::All(3); - JacobianFactor expLF(0, eye(3), zero(3), constraintModel); - GaussianFactor::shared_ptr actualLF = nle->linearize(linearize, *linearize.orderingArbitrary()); - EXPECT(assert_equal(*actualLF, (const GaussianFactor&)expLF)); + JacobianFactor expLF(key, eye(3), zero(3), constraintModel); + GaussianFactor::shared_ptr actualLF = nle->linearize(linearize); + EXPECT(assert_equal((const GaussianFactor&)expLF, *actualLF)); } /* ********************************************************************** */ @@ -71,7 +71,7 @@ TEST ( NonlinearEquality, linearization_pose ) { // create a nonlinear equality constraint shared_poseNLE nle(new PoseNLE(key, value)); - GaussianFactor::shared_ptr actualLF = nle->linearize(config, *config.orderingArbitrary()); + GaussianFactor::shared_ptr actualLF = nle->linearize(config); EXPECT(true); } @@ -86,7 +86,7 @@ TEST ( NonlinearEquality, linearization_fail ) { shared_poseNLE nle(new PoseNLE(key, value)); // check linearize to ensure that it fails for bad linearization points - CHECK_EXCEPTION(nle->linearize(bad_linearize, *bad_linearize.orderingArbitrary()), std::invalid_argument); + CHECK_EXCEPTION(nle->linearize(bad_linearize), std::invalid_argument); } /* ********************************************************************** */ @@ -102,7 +102,7 @@ TEST ( NonlinearEquality, linearization_fail_pose ) { shared_poseNLE nle(new PoseNLE(key, value)); // check linearize to ensure that it fails for bad linearization points - CHECK_EXCEPTION(nle->linearize(bad_linearize, *bad_linearize.orderingArbitrary()), std::invalid_argument); + CHECK_EXCEPTION(nle->linearize(bad_linearize), std::invalid_argument); } /* ********************************************************************** */ @@ -118,7 +118,7 @@ TEST ( NonlinearEquality, linearization_fail_pose_origin ) { shared_poseNLE nle(new PoseNLE(key, value)); // check linearize to ensure that it fails for bad linearization points - CHECK_EXCEPTION(nle->linearize(bad_linearize, *bad_linearize.orderingArbitrary()), std::invalid_argument); + CHECK_EXCEPTION(nle->linearize(bad_linearize), std::invalid_argument); } /* ************************************************************************* */ @@ -176,11 +176,11 @@ TEST ( NonlinearEquality, allow_error_pose ) { DOUBLES_EQUAL(500.0, actError, 1e-9); // check linearization - GaussianFactor::shared_ptr actLinFactor = nle.linearize(config, *config.orderingArbitrary()); + GaussianFactor::shared_ptr actLinFactor = nle.linearize(config); Matrix A1 = eye(3,3); Vector b = expVec; SharedDiagonal model = noiseModel::Constrained::All(3); - GaussianFactor::shared_ptr expLinFactor(new JacobianFactor(0, A1, b, model)); + GaussianFactor::shared_ptr expLinFactor(new JacobianFactor(key1, A1, b, model)); EXPECT(assert_equal(*expLinFactor, *actLinFactor, 1e-5)); } @@ -193,7 +193,7 @@ TEST ( NonlinearEquality, allow_error_optimize ) { // add to a graph NonlinearFactorGraph graph; - graph.add(nle); + graph += nle; // initialize away from the ideal Pose2 initPose(0.0, 2.0, 3.0); @@ -231,8 +231,8 @@ TEST ( NonlinearEquality, allow_error_optimize_with_factors ) { // add to a graph NonlinearFactorGraph graph; - graph.add(nle); - graph.add(prior); + graph += nle; + graph += prior; // optimize Ordering ordering; @@ -277,21 +277,19 @@ TEST( testNonlinearEqualityConstraint, unary_linearization ) { Point2 pt(1.0, 2.0); Symbol key1('x',1); double mu = 1000.0; - Ordering ordering; - ordering += key; eq2D::UnaryEqualityConstraint constraint(pt, key, mu); Values config1; config1.insert(key, pt); - GaussianFactor::shared_ptr actual1 = constraint.linearize(config1, ordering); - GaussianFactor::shared_ptr expected1(new JacobianFactor(ordering[key], eye(2,2), zero(2), hard_model)); + GaussianFactor::shared_ptr actual1 = constraint.linearize(config1); + GaussianFactor::shared_ptr expected1(new JacobianFactor(key, eye(2,2), zero(2), hard_model)); EXPECT(assert_equal(*expected1, *actual1, tol)); Values config2; Point2 ptBad(2.0, 2.0); config2.insert(key, ptBad); - GaussianFactor::shared_ptr actual2 = constraint.linearize(config2, ordering); - GaussianFactor::shared_ptr expected2(new JacobianFactor(ordering[key], eye(2,2), Vector_(2,-1.0,0.0), hard_model)); + GaussianFactor::shared_ptr actual2 = constraint.linearize(config2); + GaussianFactor::shared_ptr expected2(new JacobianFactor(key, eye(2,2), Vector_(2,-1.0,0.0), hard_model)); EXPECT(assert_equal(*expected2, *actual2, tol)); } @@ -359,16 +357,14 @@ TEST( testNonlinearEqualityConstraint, odo_linearization ) { Point2 x1(1.0, 2.0), x2(2.0, 3.0), odom(1.0, 1.0); Symbol key1('x',1), key2('x',2); double mu = 1000.0; - Ordering ordering; - ordering += key1, key2; eq2D::OdoEqualityConstraint constraint(odom, key1, key2, mu); Values config1; config1.insert(key1, x1); config1.insert(key2, x2); - GaussianFactor::shared_ptr actual1 = constraint.linearize(config1, ordering); + GaussianFactor::shared_ptr actual1 = constraint.linearize(config1); GaussianFactor::shared_ptr expected1( - new JacobianFactor(ordering[key1], -eye(2,2), ordering[key2], + new JacobianFactor(key1, -eye(2,2), key2, eye(2,2), zero(2), hard_model)); EXPECT(assert_equal(*expected1, *actual1, tol)); @@ -377,9 +373,9 @@ TEST( testNonlinearEqualityConstraint, odo_linearization ) { Point2 x2bad(2.0, 2.0); config2.insert(key1, x1bad); config2.insert(key2, x2bad); - GaussianFactor::shared_ptr actual2 = constraint.linearize(config2, ordering); + GaussianFactor::shared_ptr actual2 = constraint.linearize(config2); GaussianFactor::shared_ptr expected2( - new JacobianFactor(ordering[key1], -eye(2,2), ordering[key2], + new JacobianFactor(key1, -eye(2,2), key2, eye(2,2), Vector_(2, 1.0, 1.0), hard_model)); EXPECT(assert_equal(*expected2, *actual2, tol)); } @@ -436,17 +432,17 @@ TEST (testNonlinearEqualityConstraint, two_pose ) { Symbol l1('l',1), l2('l',2); Point2 pt_x1(1.0, 1.0), pt_x2(5.0, 6.0); - graph.add(eq2D::UnaryEqualityConstraint(pt_x1, x1)); - graph.add(eq2D::UnaryEqualityConstraint(pt_x2, x2)); + graph += eq2D::UnaryEqualityConstraint(pt_x1, x1); + graph += eq2D::UnaryEqualityConstraint(pt_x2, x2); Point2 z1(0.0, 5.0); SharedNoiseModel sigma(noiseModel::Isotropic::Sigma(2, 0.1)); - graph.add(simulated2D::Measurement(z1, sigma, x1,l1)); + graph += simulated2D::Measurement(z1, sigma, x1,l1); Point2 z2(-4.0, 0.0); - graph.add(simulated2D::Measurement(z2, sigma, x2,l2)); + graph += simulated2D::Measurement(z2, sigma, x2,l2); - graph.add(eq2D::PointEqualityConstraint(l1, l2)); + graph += eq2D::PointEqualityConstraint(l1, l2); Values initialEstimate; initialEstimate.insert(x1, pt_x1); @@ -475,20 +471,20 @@ TEST (testNonlinearEqualityConstraint, map_warp ) { // constant constraint on x1 Point2 pose1(1.0, 1.0); - graph.add(eq2D::UnaryEqualityConstraint(pose1, x1)); + graph += eq2D::UnaryEqualityConstraint(pose1, x1); SharedDiagonal sigma = noiseModel::Isotropic::Sigma(2, 0.1); // measurement from x1 to l1 Point2 z1(0.0, 5.0); - graph.add(simulated2D::Measurement(z1, sigma, x1, l1)); + graph += simulated2D::Measurement(z1, sigma, x1, l1); // measurement from x2 to l2 Point2 z2(-4.0, 0.0); - graph.add(simulated2D::Measurement(z2, sigma, x2, l2)); + graph += simulated2D::Measurement(z2, sigma, x2, l2); // equality constraint between l1 and l2 - graph.add(eq2D::PointEqualityConstraint(l1, l2)); + graph += eq2D::PointEqualityConstraint(l1, l2); // create an initial estimate Values initialEstimate; @@ -510,7 +506,7 @@ TEST (testNonlinearEqualityConstraint, map_warp ) { // make a realistic calibration matrix static double fov = 60; // degrees -static size_t w=640,h=480; +static int w=640,h=480; static Cal3_S2 K(fov,w,h); static boost::shared_ptr shK(new Cal3_S2(K)); @@ -542,16 +538,16 @@ TEST (testNonlinearEqualityConstraint, stereo_constrained ) { VGraph graph; // create equality constraints for poses - graph.add(NonlinearEquality(x1, camera1.pose())); - graph.add(NonlinearEquality(x2, camera2.pose())); + graph += NonlinearEquality(x1, camera1.pose()); + graph += NonlinearEquality(x2, camera2.pose()); // create factors SharedDiagonal vmodel = noiseModel::Unit::Create(2); - graph.add(GenericProjectionFactor(camera1.project(landmark), vmodel, x1, l1, shK)); - graph.add(GenericProjectionFactor(camera2.project(landmark), vmodel, x2, l2, shK)); + graph += GenericProjectionFactor(camera1.project(landmark), vmodel, x1, l1, shK); + graph += GenericProjectionFactor(camera2.project(landmark), vmodel, x2, l2, shK); // add equality constraint - graph.add(Point3Equality(l1, l2)); + graph += Point3Equality(l1, l2); // create initial data Point3 landmark1(0.5, 5.0, 0.0); diff --git a/tests/testNonlinearFactor.cpp b/tests/testNonlinearFactor.cpp index c490cbc6d..f7b212fd7 100644 --- a/tests/testNonlinearFactor.cpp +++ b/tests/testNonlinearFactor.cpp @@ -67,10 +67,10 @@ TEST( NonlinearFactor, equals ) TEST( NonlinearFactor, equals2 ) { // create a non linear factor graph - Graph fg = createNonlinearFactorGraph(); + NonlinearFactorGraph fg = createNonlinearFactorGraph(); // get two factors - Graph::sharedFactor f0 = fg[0], f1 = fg[1]; + NonlinearFactorGraph::sharedFactor f0 = fg[0], f1 = fg[1]; CHECK(f0->equals(*f0)); CHECK(!f0->equals(*f1)); @@ -81,13 +81,13 @@ TEST( NonlinearFactor, equals2 ) TEST( NonlinearFactor, NonlinearFactor ) { // create a non linear factor graph - Graph fg = createNonlinearFactorGraph(); + NonlinearFactorGraph fg = createNonlinearFactorGraph(); // create a values structure for the non linear factor graph Values cfg = createNoisyValues(); // get the factor "f1" from the factor graph - Graph::sharedFactor factor = fg[0]; + NonlinearFactorGraph::sharedFactor factor = fg[0]; // calculate the error_vector from the factor "f1" // error_vector = [0.1 0.1] @@ -108,13 +108,13 @@ TEST( NonlinearFactor, linearize_f1 ) Values c = createNoisyValues(); // Grab a non-linear factor - Graph nfg = createNonlinearFactorGraph(); - Graph::sharedFactor nlf = nfg[0]; + NonlinearFactorGraph nfg = createNonlinearFactorGraph(); + NonlinearFactorGraph::sharedFactor nlf = nfg[0]; // We linearize at noisy config from SmallExample - GaussianFactor::shared_ptr actual = nlf->linearize(c, *c.orderingArbitrary()); + GaussianFactor::shared_ptr actual = nlf->linearize(c); - GaussianFactorGraph lfg = createGaussianFactorGraph(*c.orderingArbitrary()); + GaussianFactorGraph lfg = createGaussianFactorGraph(); GaussianFactor::shared_ptr expected = lfg[0]; CHECK(assert_equal(*expected,*actual)); @@ -130,13 +130,13 @@ TEST( NonlinearFactor, linearize_f2 ) Values c = createNoisyValues(); // Grab a non-linear factor - Graph nfg = createNonlinearFactorGraph(); - Graph::sharedFactor nlf = nfg[1]; + NonlinearFactorGraph nfg = createNonlinearFactorGraph(); + NonlinearFactorGraph::sharedFactor nlf = nfg[1]; // We linearize at noisy config from SmallExample - GaussianFactor::shared_ptr actual = nlf->linearize(c, *c.orderingArbitrary()); + GaussianFactor::shared_ptr actual = nlf->linearize(c); - GaussianFactorGraph lfg = createGaussianFactorGraph(*c.orderingArbitrary()); + GaussianFactorGraph lfg = createGaussianFactorGraph(); GaussianFactor::shared_ptr expected = lfg[1]; CHECK(assert_equal(*expected,*actual)); @@ -146,14 +146,14 @@ TEST( NonlinearFactor, linearize_f2 ) TEST( NonlinearFactor, linearize_f3 ) { // Grab a non-linear factor - Graph nfg = createNonlinearFactorGraph(); - Graph::sharedFactor nlf = nfg[2]; + NonlinearFactorGraph nfg = createNonlinearFactorGraph(); + NonlinearFactorGraph::sharedFactor nlf = nfg[2]; // We linearize at noisy config from SmallExample Values c = createNoisyValues(); - GaussianFactor::shared_ptr actual = nlf->linearize(c, *c.orderingArbitrary()); + GaussianFactor::shared_ptr actual = nlf->linearize(c); - GaussianFactorGraph lfg = createGaussianFactorGraph(*c.orderingArbitrary()); + GaussianFactorGraph lfg = createGaussianFactorGraph(); GaussianFactor::shared_ptr expected = lfg[2]; CHECK(assert_equal(*expected,*actual)); @@ -163,14 +163,14 @@ TEST( NonlinearFactor, linearize_f3 ) TEST( NonlinearFactor, linearize_f4 ) { // Grab a non-linear factor - Graph nfg = createNonlinearFactorGraph(); - Graph::sharedFactor nlf = nfg[3]; + NonlinearFactorGraph nfg = createNonlinearFactorGraph(); + NonlinearFactorGraph::sharedFactor nlf = nfg[3]; // We linearize at noisy config from SmallExample Values c = createNoisyValues(); - GaussianFactor::shared_ptr actual = nlf->linearize(c, *c.orderingArbitrary()); + GaussianFactor::shared_ptr actual = nlf->linearize(c); - GaussianFactorGraph lfg = createGaussianFactorGraph(*c.orderingArbitrary()); + GaussianFactorGraph lfg = createGaussianFactorGraph(); GaussianFactor::shared_ptr expected = lfg[3]; CHECK(assert_equal(*expected,*actual)); @@ -180,13 +180,13 @@ TEST( NonlinearFactor, linearize_f4 ) TEST( NonlinearFactor, size ) { // create a non linear factor graph - Graph fg = createNonlinearFactorGraph(); + NonlinearFactorGraph fg = createNonlinearFactorGraph(); // create a values structure for the non linear factor graph Values cfg = createNoisyValues(); // get some factors from the graph - Graph::sharedFactor factor1 = fg[0], factor2 = fg[1], + NonlinearFactorGraph::sharedFactor factor1 = fg[0], factor2 = fg[1], factor3 = fg[2]; CHECK(factor1->size() == 1); @@ -201,16 +201,16 @@ TEST( NonlinearFactor, linearize_constraint1 ) SharedDiagonal constraint = noiseModel::Constrained::MixedSigmas(sigmas); Point2 mu(1., -1.); - Graph::sharedFactor f0(new simulated2D::Prior(mu, constraint, X(1))); + NonlinearFactorGraph::sharedFactor f0(new simulated2D::Prior(mu, constraint, X(1))); Values config; config.insert(X(1), Point2(1.0, 2.0)); - GaussianFactor::shared_ptr actual = f0->linearize(config, *config.orderingArbitrary()); + GaussianFactor::shared_ptr actual = f0->linearize(config); // create expected - Ordering ord(*config.orderingArbitrary()); Vector b = Vector_(2, 0., -3.); - JacobianFactor expected(ord[X(1)], Matrix_(2,2, 5.0, 0.0, 0.0, 1.0), b, constraint); + JacobianFactor expected(X(1), Matrix_(2,2, 5.0, 0.0, 0.0, 1.0), b, + noiseModel::Constrained::MixedSigmas(Vector_(2, 1.0, 0.0))); CHECK(assert_equal((const GaussianFactor&)expected, *actual)); } @@ -226,13 +226,13 @@ TEST( NonlinearFactor, linearize_constraint2 ) Values config; config.insert(X(1), Point2(1.0, 2.0)); config.insert(L(1), Point2(5.0, 4.0)); - GaussianFactor::shared_ptr actual = f0.linearize(config, *config.orderingArbitrary()); + GaussianFactor::shared_ptr actual = f0.linearize(config); // create expected - Ordering ord(*config.orderingArbitrary()); Matrix A = Matrix_(2,2, 5.0, 0.0, 0.0, 1.0); Vector b = Vector_(2, -15., -3.); - JacobianFactor expected(ord[X(1)], -1*A, ord[L(1)], A, b, constraint); + JacobianFactor expected(X(1), -1*A, L(1), A, b, + noiseModel::Constrained::MixedSigmas(Vector_(2, 1.0, 0.0))); CHECK(assert_equal((const GaussianFactor&)expected, *actual)); } @@ -272,12 +272,11 @@ TEST(NonlinearFactor, NoiseModelFactor4) { tv.insert(X(4), LieVector(1, 4.0)); EXPECT(assert_equal(Vector_(1, 10.0), tf.unwhitenedError(tv))); DOUBLES_EQUAL(25.0/2.0, tf.error(tv), 1e-9); - Ordering ordering; ordering += X(1), X(2), X(3), X(4); - JacobianFactor jf(*boost::dynamic_pointer_cast(tf.linearize(tv, ordering))); - LONGS_EQUAL(jf.keys()[0], 0); - LONGS_EQUAL(jf.keys()[1], 1); - LONGS_EQUAL(jf.keys()[2], 2); - LONGS_EQUAL(jf.keys()[3], 3); + JacobianFactor jf(*boost::dynamic_pointer_cast(tf.linearize(tv))); + LONGS_EQUAL((long)X(1), (long)jf.keys()[0]); + LONGS_EQUAL((long)X(2), (long)jf.keys()[1]); + LONGS_EQUAL((long)X(3), (long)jf.keys()[2]); + LONGS_EQUAL((long)X(4), (long)jf.keys()[3]); EXPECT(assert_equal(Matrix_(1,1, 0.5), jf.getA(jf.begin()))); EXPECT(assert_equal(Matrix_(1,1, 1.0), jf.getA(jf.begin()+1))); EXPECT(assert_equal(Matrix_(1,1, 1.5), jf.getA(jf.begin()+2))); @@ -320,13 +319,12 @@ TEST(NonlinearFactor, NoiseModelFactor5) { tv.insert(X(5), LieVector(1, 5.0)); EXPECT(assert_equal(Vector_(1, 15.0), tf.unwhitenedError(tv))); DOUBLES_EQUAL(56.25/2.0, tf.error(tv), 1e-9); - Ordering ordering; ordering += X(1), X(2), X(3), X(4), X(5); - JacobianFactor jf(*boost::dynamic_pointer_cast(tf.linearize(tv, ordering))); - LONGS_EQUAL(jf.keys()[0], 0); - LONGS_EQUAL(jf.keys()[1], 1); - LONGS_EQUAL(jf.keys()[2], 2); - LONGS_EQUAL(jf.keys()[3], 3); - LONGS_EQUAL(jf.keys()[4], 4); + JacobianFactor jf(*boost::dynamic_pointer_cast(tf.linearize(tv))); + LONGS_EQUAL((long)X(1), (long)jf.keys()[0]); + LONGS_EQUAL((long)X(2), (long)jf.keys()[1]); + LONGS_EQUAL((long)X(3), (long)jf.keys()[2]); + LONGS_EQUAL((long)X(4), (long)jf.keys()[3]); + LONGS_EQUAL((long)X(5), (long)jf.keys()[4]); EXPECT(assert_equal(Matrix_(1,1, 0.5), jf.getA(jf.begin()))); EXPECT(assert_equal(Matrix_(1,1, 1.0), jf.getA(jf.begin()+1))); EXPECT(assert_equal(Matrix_(1,1, 1.5), jf.getA(jf.begin()+2))); @@ -374,14 +372,13 @@ TEST(NonlinearFactor, NoiseModelFactor6) { tv.insert(X(6), LieVector(1, 6.0)); EXPECT(assert_equal(Vector_(1, 21.0), tf.unwhitenedError(tv))); DOUBLES_EQUAL(110.25/2.0, tf.error(tv), 1e-9); - Ordering ordering; ordering += X(1), X(2), X(3), X(4), X(5), X(6); - JacobianFactor jf(*boost::dynamic_pointer_cast(tf.linearize(tv, ordering))); - LONGS_EQUAL(jf.keys()[0], 0); - LONGS_EQUAL(jf.keys()[1], 1); - LONGS_EQUAL(jf.keys()[2], 2); - LONGS_EQUAL(jf.keys()[3], 3); - LONGS_EQUAL(jf.keys()[4], 4); - LONGS_EQUAL(jf.keys()[5], 5); + JacobianFactor jf(*boost::dynamic_pointer_cast(tf.linearize(tv))); + LONGS_EQUAL((long)X(1), (long)jf.keys()[0]); + LONGS_EQUAL((long)X(2), (long)jf.keys()[1]); + LONGS_EQUAL((long)X(3), (long)jf.keys()[2]); + LONGS_EQUAL((long)X(4), (long)jf.keys()[3]); + LONGS_EQUAL((long)X(5), (long)jf.keys()[4]); + LONGS_EQUAL((long)X(6), (long)jf.keys()[5]); EXPECT(assert_equal(Matrix_(1,1, 0.5), jf.getA(jf.begin()))); EXPECT(assert_equal(Matrix_(1,1, 1.0), jf.getA(jf.begin()+1))); EXPECT(assert_equal(Matrix_(1,1, 1.5), jf.getA(jf.begin()+2))); @@ -396,10 +393,10 @@ TEST(NonlinearFactor, NoiseModelFactor6) { TEST( NonlinearFactor, clone_rekey ) { shared_nlf init(new TestFactor4()); - EXPECT_LONGS_EQUAL(X(1), init->keys()[0]); - EXPECT_LONGS_EQUAL(X(2), init->keys()[1]); - EXPECT_LONGS_EQUAL(X(3), init->keys()[2]); - EXPECT_LONGS_EQUAL(X(4), init->keys()[3]); + EXPECT_LONGS_EQUAL((long)X(1), (long)init->keys()[0]); + EXPECT_LONGS_EQUAL((long)X(2), (long)init->keys()[1]); + EXPECT_LONGS_EQUAL((long)X(3), (long)init->keys()[2]); + EXPECT_LONGS_EQUAL((long)X(4), (long)init->keys()[3]); // Standard clone shared_nlf actClone = init->clone(); @@ -416,16 +413,16 @@ TEST( NonlinearFactor, clone_rekey ) EXPECT(actRekey.get() != init.get()); // Ensure different pointers // Ensure init is unchanged - EXPECT_LONGS_EQUAL(X(1), init->keys()[0]); - EXPECT_LONGS_EQUAL(X(2), init->keys()[1]); - EXPECT_LONGS_EQUAL(X(3), init->keys()[2]); - EXPECT_LONGS_EQUAL(X(4), init->keys()[3]); + EXPECT_LONGS_EQUAL((long)X(1), (long)init->keys()[0]); + EXPECT_LONGS_EQUAL((long)X(2), (long)init->keys()[1]); + EXPECT_LONGS_EQUAL((long)X(3), (long)init->keys()[2]); + EXPECT_LONGS_EQUAL((long)X(4), (long)init->keys()[3]); // Check new keys - EXPECT_LONGS_EQUAL(X(5), actRekey->keys()[0]); - EXPECT_LONGS_EQUAL(X(6), actRekey->keys()[1]); - EXPECT_LONGS_EQUAL(X(7), actRekey->keys()[2]); - EXPECT_LONGS_EQUAL(X(8), actRekey->keys()[3]); + EXPECT_LONGS_EQUAL((long)X(5), (long)actRekey->keys()[0]); + EXPECT_LONGS_EQUAL((long)X(6), (long)actRekey->keys()[1]); + EXPECT_LONGS_EQUAL((long)X(7), (long)actRekey->keys()[2]); + EXPECT_LONGS_EQUAL((long)X(8), (long)actRekey->keys()[3]); } /* ************************************************************************* */ diff --git a/tests/testNonlinearFactorGraph.cpp b/tests/testNonlinearFactorGraph.cpp index 5a35cbd94..ee62c5893 100644 --- a/tests/testNonlinearFactorGraph.cpp +++ b/tests/testNonlinearFactorGraph.cpp @@ -11,7 +11,7 @@ /** * @file testNonlinearFactorGraph.cpp - * @brief Unit tests for Non-Linear Factor Graph + * @brief Unit tests for Non-Linear Factor NonlinearFactorGraph * @brief testNonlinearFactorGraph * @author Carlos Nieto * @author Christian Potthast @@ -41,17 +41,17 @@ using symbol_shorthand::X; using symbol_shorthand::L; /* ************************************************************************* */ -TEST( Graph, equals ) +TEST( NonlinearFactorGraph, equals ) { - Graph fg = createNonlinearFactorGraph(); - Graph fg2 = createNonlinearFactorGraph(); + NonlinearFactorGraph fg = createNonlinearFactorGraph(); + NonlinearFactorGraph fg2 = createNonlinearFactorGraph(); CHECK( fg.equals(fg2) ); } /* ************************************************************************* */ -TEST( Graph, error ) +TEST( NonlinearFactorGraph, error ) { - Graph fg = createNonlinearFactorGraph(); + NonlinearFactorGraph fg = createNonlinearFactorGraph(); Values c1 = createValues(); double actual1 = fg.error(c1); DOUBLES_EQUAL( 0.0, actual1, 1e-9 ); @@ -62,41 +62,37 @@ TEST( Graph, error ) } /* ************************************************************************* */ -TEST( Graph, keys ) +TEST( NonlinearFactorGraph, keys ) { - Graph fg = createNonlinearFactorGraph(); + NonlinearFactorGraph fg = createNonlinearFactorGraph(); FastSet actual = fg.keys(); - LONGS_EQUAL(3, actual.size()); + LONGS_EQUAL(3, (long)actual.size()); FastSet::const_iterator it = actual.begin(); - LONGS_EQUAL(L(1), *(it++)); - LONGS_EQUAL(X(1), *(it++)); - LONGS_EQUAL(X(2), *(it++)); + LONGS_EQUAL((long)L(1), (long)*(it++)); + LONGS_EQUAL((long)X(1), (long)*(it++)); + LONGS_EQUAL((long)X(2), (long)*(it++)); } /* ************************************************************************* */ -TEST( Graph, GET_ORDERING) +TEST( NonlinearFactorGraph, GET_ORDERING) { -// Ordering expected; expected += "x1","l1","x2"; // For starting with x1,x2,l1 Ordering expected; expected += L(1), X(2), X(1); // For starting with l1,x1,x2 - Graph nlfg = createNonlinearFactorGraph(); - SymbolicFactorGraph::shared_ptr symbolic; - Ordering::shared_ptr ordering; - boost::tie(symbolic, ordering) = nlfg.symbolic(createNoisyValues()); - Ordering actual = *nlfg.orderingCOLAMD(createNoisyValues()); + NonlinearFactorGraph nlfg = createNonlinearFactorGraph(); + Ordering actual = Ordering::COLAMD(nlfg); EXPECT(assert_equal(expected,actual)); // Constrained ordering - put x2 at the end - std::map constraints; - constraints[X(2)] = 1; - Ordering actualConstrained = *nlfg.orderingCOLAMDConstrained(createNoisyValues(), constraints); Ordering expectedConstrained; expectedConstrained += L(1), X(1), X(2); + FastMap constraints; + constraints[X(2)] = 1; + Ordering actualConstrained = Ordering::COLAMDConstrained(nlfg, constraints); EXPECT(assert_equal(expectedConstrained, actualConstrained)); } /* ************************************************************************* */ -TEST( Graph, probPrime ) +TEST( NonlinearFactorGraph, probPrime ) { - Graph fg = createNonlinearFactorGraph(); + NonlinearFactorGraph fg = createNonlinearFactorGraph(); Values cfg = createValues(); // evaluate the probability of the factor graph @@ -106,39 +102,39 @@ TEST( Graph, probPrime ) } /* ************************************************************************* */ -TEST( Graph, linearize ) +TEST( NonlinearFactorGraph, linearize ) { - Graph fg = createNonlinearFactorGraph(); + NonlinearFactorGraph fg = createNonlinearFactorGraph(); Values initial = createNoisyValues(); - boost::shared_ptr > linearized = fg.linearize(initial, *initial.orderingArbitrary()); - FactorGraph expected = createGaussianFactorGraph(*initial.orderingArbitrary()); - CHECK(assert_equal(expected,*linearized)); // Needs correct linearizations + GaussianFactorGraph linearized = *fg.linearize(initial); + GaussianFactorGraph expected = createGaussianFactorGraph(); + CHECK(assert_equal(expected,linearized)); // Needs correct linearizations } /* ************************************************************************* */ -TEST( Graph, clone ) +TEST( NonlinearFactorGraph, clone ) { - Graph fg = createNonlinearFactorGraph(); - Graph actClone = fg.clone(); + NonlinearFactorGraph fg = createNonlinearFactorGraph(); + NonlinearFactorGraph actClone = fg.clone(); EXPECT(assert_equal(fg, actClone)); for (size_t i=0; i rekey_mapping; rekey_mapping.insert(make_pair(L(1), L(4))); - Graph actRekey = init.rekey(rekey_mapping); + NonlinearFactorGraph actRekey = init.rekey(rekey_mapping); // ensure deep clone - LONGS_EQUAL(init.size(), actRekey.size()); + LONGS_EQUAL((long)init.size(), (long)actRekey.size()); for (size_t i=0; i(0, cur_pose)); + start_factors += NonlinearEquality(0, cur_pose); Values init; Values expected; init.insert(0, cur_pose); expected.insert(0, cur_pose); - isam.update(start_factors, init); + isamChol.update(start_factors, init); + isamQR.update(start_factors, init); // loop for a period of time to verify memory usage size_t nrPoses = 21; Pose2 z(1.0, 2.0, 0.1); for (size_t i=1; i<=nrPoses; ++i) { NonlinearFactorGraph new_factors; - new_factors.add(BetweenFactor(i-1, i, z, model)); + new_factors += BetweenFactor(i-1, i, z, model); Values new_init; - // perform a check on changing orderings - if (i == 5) { - Ordering ordering = isam.getOrdering(); - - // swap last two elements - Key last = ordering.pop_back().first; - Key secondLast = ordering.pop_back().first; - ordering.push_back(last); - ordering.push_back(secondLast); - isam.setOrdering(ordering); - - Ordering expected; expected += (0), (1), (2), (4), (3); - EXPECT(assert_equal(expected, isam.getOrdering())); - } - cur_pose = cur_pose.compose(z); new_init.insert(i, cur_pose.retract(sampler.sample())); expected.insert(i, cur_pose); - isam.update(new_factors, new_init); + isamChol.update(new_factors, new_init); + isamQR.update(new_factors, new_init); } // verify values - all but the last one should be very close - Values actual = isam.estimate(); + Values actualChol = isamChol.estimate(); for (size_t i=0; i(i), actual.at(i), tol)); + EXPECT(assert_equal(expected.at(i), actualChol.at(i), tol)); + } + Values actualQR = isamQR.estimate(); + for (size_t i=0; i(i), actualQR.at(i), tol)); } } diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index aff07c15b..bf911dc78 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -50,7 +50,7 @@ using symbol_shorthand::L; TEST( NonlinearOptimizer, iterateLM ) { // really non-linear factor graph - example::Graph fg(example::createReallyNonlinearFactorGraph()); + NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph()); // config far from minimum Point2 x0(3,0); @@ -74,7 +74,7 @@ TEST( NonlinearOptimizer, iterateLM ) /* ************************************************************************* */ TEST( NonlinearOptimizer, optimize ) { - example::Graph fg(example::createReallyNonlinearFactorGraph()); + NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph()); // test error at minimum Point2 xstar(0,0); @@ -114,7 +114,7 @@ TEST( NonlinearOptimizer, optimize ) /* ************************************************************************* */ TEST( NonlinearOptimizer, SimpleLMOptimizer ) { - example::Graph fg(example::createReallyNonlinearFactorGraph()); + NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph()); Point2 x0(3,3); Values c0; @@ -127,7 +127,7 @@ TEST( NonlinearOptimizer, SimpleLMOptimizer ) /* ************************************************************************* */ TEST( NonlinearOptimizer, SimpleGNOptimizer ) { - example::Graph fg(example::createReallyNonlinearFactorGraph()); + NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph()); Point2 x0(3,3); Values c0; @@ -140,7 +140,7 @@ TEST( NonlinearOptimizer, SimpleGNOptimizer ) /* ************************************************************************* */ TEST( NonlinearOptimizer, SimpleDLOptimizer ) { - example::Graph fg(example::createReallyNonlinearFactorGraph()); + NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph()); Point2 x0(3,3); Values c0; @@ -158,7 +158,7 @@ TEST( NonlinearOptimizer, optimization_method ) LevenbergMarquardtParams paramsChol; paramsChol.linearSolverType = LevenbergMarquardtParams::MULTIFRONTAL_CHOLESKY; - example::Graph fg = example::createReallyNonlinearFactorGraph(); + NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); Point2 x0(3,3); Values c0; @@ -179,8 +179,8 @@ TEST( NonlinearOptimizer, Factorization ) config.insert(X(2), Pose2(1.5,0.,0.)); NonlinearFactorGraph graph; - graph.add(PriorFactor(X(1), Pose2(0.,0.,0.), noiseModel::Isotropic::Sigma(3, 1e-10))); - graph.add(BetweenFactor(X(1),X(2), Pose2(1.,0.,0.), noiseModel::Isotropic::Sigma(3, 1))); + graph += PriorFactor(X(1), Pose2(0.,0.,0.), noiseModel::Isotropic::Sigma(3, 1e-10)); + graph += BetweenFactor(X(1),X(2), Pose2(1.,0.,0.), noiseModel::Isotropic::Sigma(3, 1)); Ordering ordering; ordering.push_back(X(1)); @@ -198,10 +198,10 @@ TEST( NonlinearOptimizer, Factorization ) /* ************************************************************************* */ TEST(NonlinearOptimizer, NullFactor) { - example::Graph fg = example::createReallyNonlinearFactorGraph(); + NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph(); // Add null factor - fg.push_back(example::Graph::sharedFactor()); + fg.push_back(NonlinearFactorGraph::sharedFactor()); // test error at minimum Point2 xstar(0,0); @@ -236,9 +236,9 @@ TEST(NonlinearOptimizer, NullFactor) { TEST(NonlinearOptimizer, MoreOptimization) { NonlinearFactorGraph fg; - fg.add(PriorFactor(0, Pose2(0,0,0), noiseModel::Isotropic::Sigma(3,1))); - fg.add(BetweenFactor(0, 1, Pose2(1,0,M_PI/2), noiseModel::Isotropic::Sigma(3,1))); - fg.add(BetweenFactor(1, 2, Pose2(1,0,M_PI/2), noiseModel::Isotropic::Sigma(3,1))); + fg += PriorFactor(0, Pose2(0,0,0), noiseModel::Isotropic::Sigma(3,1)); + fg += BetweenFactor(0, 1, Pose2(1,0,M_PI/2), noiseModel::Isotropic::Sigma(3,1)); + fg += BetweenFactor(1, 2, Pose2(1,0,M_PI/2), noiseModel::Isotropic::Sigma(3,1)); Values init; init.insert(0, Pose2(3,4,-M_PI)); @@ -259,13 +259,13 @@ TEST(NonlinearOptimizer, MoreOptimization) { TEST(NonlinearOptimizer, MoreOptimizationWithHuber) { NonlinearFactorGraph fg; - fg.add(PriorFactor(0, Pose2(0,0,0), noiseModel::Isotropic::Sigma(3,1))); - fg.add(BetweenFactor(0, 1, Pose2(1,0,M_PI/2), + fg += PriorFactor(0, Pose2(0,0,0), noiseModel::Isotropic::Sigma(3,1)); + fg += BetweenFactor(0, 1, Pose2(1,0,M_PI/2), noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(2.0), - noiseModel::Isotropic::Sigma(3,1)))); - fg.add(BetweenFactor(1, 2, Pose2(1,0,M_PI/2), + noiseModel::Isotropic::Sigma(3,1))); + fg += BetweenFactor(1, 2, Pose2(1,0,M_PI/2), noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(3.0), - noiseModel::Isotropic::Sigma(3,1)))); + noiseModel::Isotropic::Sigma(3,1))); Values init; init.insert(0, Pose2(10,10,0)); diff --git a/tests/testOccupancyGrid.cpp b/tests/testOccupancyGrid.cpp index 3f24003c0..d92e5442e 100644 --- a/tests/testOccupancyGrid.cpp +++ b/tests/testOccupancyGrid.cpp @@ -5,10 +5,12 @@ * @author Frank Dellaert */ +#include + +#if 0 #include #include -#include #include //#include // FIXME: does not exist in boost 1.46 #include // Old header - should still exist @@ -282,7 +284,7 @@ public: }; /* ************************************************************************* */ -TEST_UNSAFE( OccupancyGrid, Test1) { +TEST( OccupancyGrid, Test1) { //Build a small grid and test optimization //Build small grid @@ -323,6 +325,8 @@ TEST_UNSAFE( OccupancyGrid, Test1) { } +#endif + /* ************************************************************************* */ int main() { TestResult tr; diff --git a/tests/testRot3Optimization.cpp b/tests/testRot3Optimization.cpp index 89b621ed2..866df9ca3 100644 --- a/tests/testRot3Optimization.cpp +++ b/tests/testRot3Optimization.cpp @@ -41,11 +41,11 @@ TEST(Rot3, optimize) { Values truth; Values initial; Graph fg; - fg.add(Prior(Symbol('r',0), Rot3(), noiseModel::Isotropic::Sigma(3, 0.01))); + fg += Prior(Symbol('r',0), Rot3(), noiseModel::Isotropic::Sigma(3, 0.01)); for(int j=0; j<6; ++j) { truth.insert(Symbol('r',j), Rot3::Rz(M_PI/3.0 * double(j))); initial.insert(Symbol('r',j), Rot3::Rz(M_PI/3.0 * double(j) + 0.1 * double(j%2))); - fg.add(Between(Symbol('r',j), Symbol('r',(j+1)%6), Rot3::Rz(M_PI/3.0), noiseModel::Isotropic::Sigma(3, 0.01))); + fg += Between(Symbol('r',j), Symbol('r',(j+1)%6), Rot3::Rz(M_PI/3.0), noiseModel::Isotropic::Sigma(3, 0.01)); } Values final = GaussNewtonOptimizer(fg, initial).optimize(); diff --git a/tests/testSerializationSLAM.cpp b/tests/testSerializationSLAM.cpp index 57a1e5cb3..8c0254a4e 100644 --- a/tests/testSerializationSLAM.cpp +++ b/tests/testSerializationSLAM.cpp @@ -16,6 +16,10 @@ * @date Feb 7, 2012 */ +#include + +#if 0 + #include //#include #include @@ -31,7 +35,6 @@ #include #include #include -#include #include #include #include @@ -49,7 +52,6 @@ #include #include -#include using namespace std; using namespace gtsam; @@ -235,11 +237,11 @@ TEST (testSerializationSLAM, smallExample_linear) { using namespace example; Ordering ordering; ordering += X(1),X(2),L(1); - GaussianFactorGraph fg = createGaussianFactorGraph(ordering); EXPECT(equalsObj(ordering)); EXPECT(equalsXML(ordering)); EXPECT(equalsBinary(ordering)); + GaussianFactorGraph fg = createGaussianFactorGraph(); EXPECT(equalsObj(fg)); EXPECT(equalsXML(fg)); EXPECT(equalsBinary(fg)); @@ -253,10 +255,8 @@ TEST (testSerializationSLAM, smallExample_linear) { /* ************************************************************************* */ TEST (testSerializationSLAM, gaussianISAM) { using namespace example; - Ordering ordering; - GaussianFactorGraph smoother; - boost::tie(smoother, ordering) = createSmoother(7); - BayesTree bayesTree = *GaussianMultifrontalSolver(smoother).eliminate(); + GaussianFactorGraph smoother = createSmoother(7); + GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(); GaussianISAM isam(bayesTree); EXPECT(equalsObj(isam)); @@ -408,66 +408,66 @@ TEST (testSerializationSLAM, factors) { NonlinearFactorGraph graph; - graph.add(priorFactorLieVector); - graph.add(priorFactorLieMatrix); - graph.add(priorFactorPoint2); - graph.add(priorFactorStereoPoint2); - graph.add(priorFactorPoint3); - graph.add(priorFactorRot2); - graph.add(priorFactorRot3); - graph.add(priorFactorPose2); - graph.add(priorFactorPose3); - graph.add(priorFactorCal3_S2); - graph.add(priorFactorCal3DS2); - graph.add(priorFactorCalibratedCamera); - graph.add(priorFactorSimpleCamera); - graph.add(priorFactorStereoCamera); + graph += priorFactorLieVector; + graph += priorFactorLieMatrix; + graph += priorFactorPoint2; + graph += priorFactorStereoPoint2; + graph += priorFactorPoint3; + graph += priorFactorRot2; + graph += priorFactorRot3; + graph += priorFactorPose2; + graph += priorFactorPose3; + graph += priorFactorCal3_S2; + graph += priorFactorCal3DS2; + graph += priorFactorCalibratedCamera; + graph += priorFactorSimpleCamera; + graph += priorFactorStereoCamera; - graph.add(betweenFactorLieVector); - graph.add(betweenFactorLieMatrix); - graph.add(betweenFactorPoint2); - graph.add(betweenFactorPoint3); - graph.add(betweenFactorRot2); - graph.add(betweenFactorRot3); - graph.add(betweenFactorPose2); - graph.add(betweenFactorPose3); + graph += betweenFactorLieVector; + graph += betweenFactorLieMatrix; + graph += betweenFactorPoint2; + graph += betweenFactorPoint3; + graph += betweenFactorRot2; + graph += betweenFactorRot3; + graph += betweenFactorPose2; + graph += betweenFactorPose3; - graph.add(nonlinearEqualityLieVector); - graph.add(nonlinearEqualityLieMatrix); - graph.add(nonlinearEqualityPoint2); - graph.add(nonlinearEqualityStereoPoint2); - graph.add(nonlinearEqualityPoint3); - graph.add(nonlinearEqualityRot2); - graph.add(nonlinearEqualityRot3); - graph.add(nonlinearEqualityPose2); - graph.add(nonlinearEqualityPose3); - graph.add(nonlinearEqualityCal3_S2); - graph.add(nonlinearEqualityCal3DS2); - graph.add(nonlinearEqualityCalibratedCamera); - graph.add(nonlinearEqualitySimpleCamera); - graph.add(nonlinearEqualityStereoCamera); + graph += nonlinearEqualityLieVector; + graph += nonlinearEqualityLieMatrix; + graph += nonlinearEqualityPoint2; + graph += nonlinearEqualityStereoPoint2; + graph += nonlinearEqualityPoint3; + graph += nonlinearEqualityRot2; + graph += nonlinearEqualityRot3; + graph += nonlinearEqualityPose2; + graph += nonlinearEqualityPose3; + graph += nonlinearEqualityCal3_S2; + graph += nonlinearEqualityCal3DS2; + graph += nonlinearEqualityCalibratedCamera; + graph += nonlinearEqualitySimpleCamera; + graph += nonlinearEqualityStereoCamera; - graph.add(rangeFactorPosePoint2); - graph.add(rangeFactorPosePoint3); - graph.add(rangeFactorPose2); - graph.add(rangeFactorPose3); - graph.add(rangeFactorCalibratedCameraPoint); - graph.add(rangeFactorSimpleCameraPoint); - graph.add(rangeFactorCalibratedCamera); - graph.add(rangeFactorSimpleCamera); + graph += rangeFactorPosePoint2; + graph += rangeFactorPosePoint3; + graph += rangeFactorPose2; + graph += rangeFactorPose3; + graph += rangeFactorCalibratedCameraPoint; + graph += rangeFactorSimpleCameraPoint; + graph += rangeFactorCalibratedCamera; + graph += rangeFactorSimpleCamera; - graph.add(bearingFactor2D); + graph += bearingFactor2D; - graph.add(bearingRangeFactor2D); + graph += bearingRangeFactor2D; - graph.add(genericProjectionFactorCal3_S2); - graph.add(genericProjectionFactorCal3DS2); + graph += genericProjectionFactorCal3_S2; + graph += genericProjectionFactorCal3DS2; - graph.add(generalSFMFactorCal3_S2); + graph += generalSFMFactorCal3_S2; - graph.add(generalSFMFactor2Cal3_S2); + graph += generalSFMFactor2Cal3_S2; - graph.add(genericStereoFactor3D); + graph += genericStereoFactor3D; // text @@ -674,6 +674,8 @@ TEST (testSerializationSLAM, factors) { EXPECT(equalsBinary(genericStereoFactor3D)); } +#endif + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } /* ************************************************************************* */ diff --git a/tests/testSimulated2DOriented.cpp b/tests/testSimulated2DOriented.cpp index 05d64653d..9e82bd2a7 100644 --- a/tests/testSimulated2DOriented.cpp +++ b/tests/testSimulated2DOriented.cpp @@ -64,7 +64,7 @@ TEST( simulated2DOriented, constructor ) simulated2DOriented::Values config; config.insert(X(1), Pose2(1., 0., 0.2)); config.insert(X(2), Pose2(2., 0., 0.1)); - boost::shared_ptr lf = factor.linearize(config, *config.orderingArbitrary()); + boost::shared_ptr lf = factor.linearize(config); } /* ************************************************************************* */ diff --git a/tests/testSubgraphPreconditioner.cpp b/tests/testSubgraphPreconditioner.cpp index 53ba92331..679bdcf4b 100644 --- a/tests/testSubgraphPreconditioner.cpp +++ b/tests/testSubgraphPreconditioner.cpp @@ -15,17 +15,18 @@ * @author Frank Dellaert **/ +#include + +#if 0 + #include -#include #include #include #include -#include #include +#include #include -#include - #include #include #include @@ -217,6 +218,8 @@ TEST( SubgraphPreconditioner, conjugateGradients ) CHECK(assert_equal(xtrue,actual2,1e-4)); } +#endif + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } /* ************************************************************************* */ diff --git a/tests/testSubgraphSolver.cpp b/tests/testSubgraphSolver.cpp index 700f35261..b8a0346de 100644 --- a/tests/testSubgraphSolver.cpp +++ b/tests/testSubgraphSolver.cpp @@ -15,18 +15,19 @@ * @author Yong-Dian Jian **/ +#include + +#if 0 + #include -#include #include #include #include #include #include -#include +#include #include -#include - #include #include #include @@ -109,6 +110,8 @@ TEST( SubgraphSolver, constructor3 ) DOUBLES_EQUAL(0.0, error(Ab, optimized), 1e-5); } +#endif + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } /* ************************************************************************* */ diff --git a/tests/testSummarization.cpp b/tests/testSummarization.cpp index bf7342202..9db7586ef 100644 --- a/tests/testSummarization.cpp +++ b/tests/testSummarization.cpp @@ -7,15 +7,17 @@ * @author Alex Cunningham */ +#include + #include #include -#include - #include #include +#include + #include #include #include @@ -62,35 +64,35 @@ TEST( testSummarization, example_from_ddf1 ) { // build from nonlinear graph/values NonlinearFactorGraph graph; - graph.add(PosePrior(xA0, Pose2(), model3)); - graph.add(PoseBetween(xA0, xA1, pose0.between(pose1), model3)); - graph.add(PoseBetween(xA1, xA2, pose1.between(pose2), model3)); - graph.add(PosePointBearingRange(xA0, lA3, pose0.bearing(landmark3), pose0.range(landmark3), model2)); - graph.add(PosePointBearingRange(xA1, lA3, pose1.bearing(landmark3), pose1.range(landmark3), model2)); - graph.add(PosePointBearingRange(xA2, lA5, pose2.bearing(landmark5), pose2.range(landmark5), model2)); + graph += PosePrior(xA0, Pose2(), model3); + graph += PoseBetween(xA0, xA1, pose0.between(pose1), model3); + graph += PoseBetween(xA1, xA2, pose1.between(pose2), model3); + graph += PosePointBearingRange(xA0, lA3, pose0.bearing(landmark3), pose0.range(landmark3), model2); + graph += PosePointBearingRange(xA1, lA3, pose1.bearing(landmark3), pose1.range(landmark3), model2); + graph += PosePointBearingRange(xA2, lA5, pose2.bearing(landmark5), pose2.range(landmark5), model2); KeySet saved_keys; saved_keys += lA3, lA5; { // Summarize to a linear system - GaussianFactorGraph actLinGraph; Ordering actOrdering; + GaussianFactorGraph actLinGraph; SummarizationMode mode = PARTIAL_QR; - boost::tie(actLinGraph, actOrdering) = summarize(graph, values, saved_keys, mode); + actLinGraph = summarize(graph, values, saved_keys, mode); - Ordering expSumOrdering; expSumOrdering += xA0, xA1, xA2, lA3, lA5; - EXPECT(assert_equal(expSumOrdering, actOrdering)); +// Ordering expSumOrdering; expSumOrdering += xA0, xA1, xA2, lA3, lA5; +// EXPECT(assert_equal(expSumOrdering, actOrdering)); // Does not split out subfactors where possible GaussianFactorGraph expLinGraph; - expLinGraph.add( - expSumOrdering[lA3], + expLinGraph += JacobianFactor( + lA3, Matrix_(4,2, 0.595867, 0.605092, 0.0, -0.406109, 0.0, 0.0, 0.0, 0.0), - expSumOrdering[lA5], + lA5, Matrix_(4,2, -0.125971, -0.160052, 0.13586, 0.301096, @@ -101,69 +103,62 @@ TEST( testSummarization, example_from_ddf1 ) { // Summarize directly from a nonlinear graph to another nonlinear graph NonlinearFactorGraph actContainerGraph = summarizeAsNonlinearContainer(graph, values, saved_keys, mode); - NonlinearFactorGraph expContainerGraph = LinearContainerFactor::convertLinearGraph(expLinGraph, expSumOrdering); + NonlinearFactorGraph expContainerGraph = LinearContainerFactor::convertLinearGraph(expLinGraph); EXPECT(assert_equal(expContainerGraph, actContainerGraph, tol)); } { // Summarize to a linear system using cholesky - compare to previous version - GaussianFactorGraph actLinGraph; Ordering actOrdering; + GaussianFactorGraph actLinGraph; SummarizationMode mode = PARTIAL_CHOLESKY; - boost::tie(actLinGraph, actOrdering) = summarize(graph, values, saved_keys, mode); - - Ordering expSumOrdering; expSumOrdering += xA0, xA1, xA2, lA3, lA5; - EXPECT(assert_equal(expSumOrdering, actOrdering)); + actLinGraph = summarize(graph, values, saved_keys, mode); // Does not split out subfactors where possible GaussianFactorGraph expLinGraph; - expLinGraph.add(HessianFactor(JacobianFactor( - expSumOrdering[lA3], + expLinGraph += HessianFactor(JacobianFactor( + lA3, Matrix_(4,2, 0.595867, 0.605092, 0.0, -0.406109, 0.0, 0.0, 0.0, 0.0), - expSumOrdering[lA5], + lA5, Matrix_(4,2, -0.125971, -0.160052, 0.13586, 0.301096, 0.268667, 0.31703, 0.0, -0.131698), - zero(4), diagmodel4))); + zero(4), diagmodel4)); EXPECT(assert_equal(expLinGraph, actLinGraph, tol)); // Summarize directly from a nonlinear graph to another nonlinear graph NonlinearFactorGraph actContainerGraph = summarizeAsNonlinearContainer(graph, values, saved_keys, mode); - NonlinearFactorGraph expContainerGraph = LinearContainerFactor::convertLinearGraph(expLinGraph, expSumOrdering); + NonlinearFactorGraph expContainerGraph = LinearContainerFactor::convertLinearGraph(expLinGraph); EXPECT(assert_equal(expContainerGraph, actContainerGraph, tol)); } { // Summarize to a linear system with joint factor graph version - GaussianFactorGraph actLinGraph; Ordering actOrdering; SummarizationMode mode = SEQUENTIAL_QR; - boost::tie(actLinGraph, actOrdering) = summarize(graph, values, saved_keys, mode); - - Ordering expSumOrdering; expSumOrdering += xA0, xA1, xA2, lA3, lA5; - EXPECT(assert_equal(expSumOrdering, actOrdering)); + GaussianFactorGraph actLinGraph = summarize(graph, values, saved_keys, mode); // Does not split out subfactors where possible GaussianFactorGraph expLinGraph; - expLinGraph.add( - expSumOrdering[lA3], + expLinGraph += JacobianFactor( + lA3, Matrix_(2,2, 0.595867, 0.605092, 0.0, 0.406109), - expSumOrdering[lA5], + lA5, Matrix_(2,2, -0.125971, -0.160052, -0.13586, -0.301096), zero(2), diagmodel2); - expLinGraph.add( - expSumOrdering[lA5], + expLinGraph += JacobianFactor( + lA5, Matrix_(2,2, 0.268667, 0.31703, 0.0, 0.131698), @@ -173,35 +168,31 @@ TEST( testSummarization, example_from_ddf1 ) { // Summarize directly from a nonlinear graph to another nonlinear graph NonlinearFactorGraph actContainerGraph = summarizeAsNonlinearContainer(graph, values, saved_keys, mode); - NonlinearFactorGraph expContainerGraph = LinearContainerFactor::convertLinearGraph(expLinGraph, expSumOrdering); + NonlinearFactorGraph expContainerGraph = LinearContainerFactor::convertLinearGraph(expLinGraph); EXPECT(assert_equal(expContainerGraph, actContainerGraph, tol)); } { // Summarize to a linear system with joint factor graph version - GaussianFactorGraph actLinGraph; Ordering actOrdering; SummarizationMode mode = SEQUENTIAL_CHOLESKY; - boost::tie(actLinGraph, actOrdering) = summarize(graph, values, saved_keys, mode); - - Ordering expSumOrdering; expSumOrdering += xA0, xA1, xA2, lA3, lA5; - EXPECT(assert_equal(expSumOrdering, actOrdering)); + GaussianFactorGraph actLinGraph = summarize(graph, values, saved_keys, mode); // Does not split out subfactors where possible GaussianFactorGraph expLinGraph; - expLinGraph.add( - expSumOrdering[lA3], + expLinGraph += JacobianFactor( + lA3, Matrix_(2,2, 0.595867, 0.605092, 0.0, 0.406109), - expSumOrdering[lA5], + lA5, Matrix_(2,2, -0.125971, -0.160052, -0.13586, -0.301096), zero(2), diagmodel2); - expLinGraph.add( - expSumOrdering[lA5], + expLinGraph += JacobianFactor( + lA5, Matrix_(2,2, 0.268667, 0.31703, 0.0, 0.131698), @@ -211,7 +202,7 @@ TEST( testSummarization, example_from_ddf1 ) { // Summarize directly from a nonlinear graph to another nonlinear graph NonlinearFactorGraph actContainerGraph = summarizeAsNonlinearContainer(graph, values, saved_keys, mode); - NonlinearFactorGraph expContainerGraph = LinearContainerFactor::convertLinearGraph(expLinGraph, expSumOrdering); + NonlinearFactorGraph expContainerGraph = LinearContainerFactor::convertLinearGraph(expLinGraph); EXPECT(assert_equal(expContainerGraph, actContainerGraph, tol)); } @@ -223,17 +214,14 @@ TEST( testSummarization, no_summarize_case ) { gtsam::Key key = 7; gtsam::KeySet saved_keys; saved_keys.insert(key); NonlinearFactorGraph graph; - graph.add(PosePrior(key, Pose2(1.0, 2.0, 0.3), model3)); - graph.add(PosePrior(key, Pose2(2.0, 3.0, 0.4), model3)); + graph += PosePrior(key, Pose2(1.0, 2.0, 0.3), model3); + graph += PosePrior(key, Pose2(2.0, 3.0, 0.4), model3); Values values; values.insert(key, Pose2(0.0, 0.0, 0.1)); SummarizationMode mode = SEQUENTIAL_CHOLESKY; - GaussianFactorGraph actLinGraph; Ordering actOrdering; - boost::tie(actLinGraph, actOrdering) = summarize(graph, values, saved_keys, mode); - Ordering expOrdering; expOrdering += key; - GaussianFactorGraph expLinGraph = *graph.linearize(values, expOrdering); - EXPECT(assert_equal(expOrdering, actOrdering)); + GaussianFactorGraph actLinGraph = summarize(graph, values, saved_keys, mode); + GaussianFactorGraph expLinGraph = *graph.linearize(values); EXPECT(assert_equal(expLinGraph, actLinGraph)); } diff --git a/tests/testSymbolicBayesNetB.cpp b/tests/testSymbolicBayesNetB.cpp deleted file mode 100644 index e046d6ba4..000000000 --- a/tests/testSymbolicBayesNetB.cpp +++ /dev/null @@ -1,79 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testSymbolicBayesNetB.cpp - * @brief Unit tests for a symbolic Bayes chain - * @author Frank Dellaert - */ - -#include // for 'insert()' -#include // for operator += -using namespace boost::assign; - -#include - -#include -#include -#include -#include -#include -#include - -using namespace std; -using namespace gtsam; -using namespace example; - -using symbol_shorthand::X; -using symbol_shorthand::L; - -/* ************************************************************************* */ -TEST( SymbolicBayesNet, constructor ) -{ - Ordering o; o += X(2),L(1),X(1); - // Create manually - IndexConditional::shared_ptr - x2(new IndexConditional(o[X(2)],o[L(1)], o[X(1)])), - l1(new IndexConditional(o[L(1)],o[X(1)])), - x1(new IndexConditional(o[X(1)])); - BayesNet expected; - expected.push_back(x2); - expected.push_back(l1); - expected.push_back(x1); - - // Create from a factor graph - GaussianFactorGraph factorGraph = createGaussianFactorGraph(o); - SymbolicFactorGraph fg(factorGraph); - - // eliminate it - SymbolicBayesNet actual = *SymbolicSequentialSolver(fg).eliminate(); - - CHECK(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -TEST( SymbolicBayesNet, FromGaussian) { - SymbolicBayesNet expected; - expected.push_back(IndexConditional::shared_ptr(new IndexConditional(0, 1))); - expected.push_back(IndexConditional::shared_ptr(new IndexConditional(1))); - - GaussianBayesNet gbn = createSmallGaussianBayesNet(); - SymbolicBayesNet actual(gbn); - - EXPECT(assert_equal(expected, actual)); -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ diff --git a/tests/testSymbolicFactorGraphB.cpp b/tests/testSymbolicFactorGraphB.cpp deleted file mode 100644 index 5d6b478bf..000000000 --- a/tests/testSymbolicFactorGraphB.cpp +++ /dev/null @@ -1,156 +0,0 @@ -/* ---------------------------------------------------------------------------- - - * GTSAM Copyright 2010, Georgia Tech Research Corporation, - * Atlanta, Georgia 30332-0415 - * All Rights Reserved - * Authors: Frank Dellaert, et al. (see THANKS for the full author list) - - * See LICENSE for the license information - - * -------------------------------------------------------------------------- */ - -/** - * @file testSymbolicFactorGraphB.cpp - * @brief Unit tests for a symbolic Factor Graph - * @author Frank Dellaert - */ - -#include -#include -#include -#include -#include -#include - -#include - -#include // for operator += -using namespace boost::assign; - -using namespace std; -using namespace gtsam; - -using symbol_shorthand::X; -using symbol_shorthand::L; - -/* ************************************************************************* */ -TEST( SymbolicFactorGraph, symbolicFactorGraph ) -{ - Ordering o; o += X(1),L(1),X(2); - // construct expected symbolic graph - SymbolicFactorGraph expected; - expected.push_factor(o[X(1)]); - expected.push_factor(o[X(1)],o[X(2)]); - expected.push_factor(o[X(1)],o[L(1)]); - expected.push_factor(o[X(2)],o[L(1)]); - - // construct it from the factor graph - GaussianFactorGraph factorGraph = example::createGaussianFactorGraph(o); - SymbolicFactorGraph actual(factorGraph); - - CHECK(assert_equal(expected, actual)); -} - -///* ************************************************************************* */ -//TEST( SymbolicFactorGraph, findAndRemoveFactors ) -//{ -// // construct it from the factor graph graph -// GaussianFactorGraph factorGraph = example::createGaussianFactorGraph(); -// SymbolicFactorGraph actual(factorGraph); -// SymbolicFactor::shared_ptr f1 = actual[0]; -// SymbolicFactor::shared_ptr f3 = actual[2]; -// actual.findAndRemoveFactors(X(2)); -// -// // construct expected graph after find_factors_and_remove -// SymbolicFactorGraph expected; -// SymbolicFactor::shared_ptr null; -// expected.push_back(f1); -// expected.push_back(null); -// expected.push_back(f3); -// expected.push_back(null); -// -// CHECK(assert_equal(expected, actual)); -//} -///* ************************************************************************* */ -//TEST( SymbolicFactorGraph, factors) -//{ -// // create a test graph -// GaussianFactorGraph factorGraph = example::createGaussianFactorGraph(); -// SymbolicFactorGraph fg(factorGraph); -// -// // ask for all factor indices connected to x1 -// list x1_factors = fg.factors(X(1)); -// int x1_indices[] = { 0, 1, 2 }; -// list x1_expected(x1_indices, x1_indices + 3); -// CHECK(x1_factors==x1_expected); -// -// // ask for all factor indices connected to x2 -// list x2_factors = fg.factors(X(2)); -// int x2_indices[] = { 1, 3 }; -// list x2_expected(x2_indices, x2_indices + 2); -// CHECK(x2_factors==x2_expected); -//} - -///* ************************************************************************* */ -//TEST( SymbolicFactorGraph, removeAndCombineFactors ) -//{ -// // create a test graph -// GaussianFactorGraph factorGraph = example::createGaussianFactorGraph(); -// SymbolicFactorGraph fg(factorGraph); -// -// // combine all factors connected to x1 -// SymbolicFactor::shared_ptr actual = removeAndCombineFactors(fg,X(1)); -// -// // check result -// SymbolicFactor expected(L(1),X(1),X(2)); -// CHECK(assert_equal(expected,*actual)); -//} - -///* ************************************************************************* */ -//TEST( SymbolicFactorGraph, eliminateOne ) -//{ -// Ordering o; o += X(1),L(1),X(2); -// // create a test graph -// GaussianFactorGraph factorGraph = example::createGaussianFactorGraph(o); -// SymbolicFactorGraph fg(factorGraph); -// -// // eliminate -// IndexConditional::shared_ptr actual = GaussianSequentialSolver::EliminateUntil(fg, o[X(1)]+1); -// -// // create expected symbolic IndexConditional -// IndexConditional expected(o[X(1)],o[L(1)],o[X(2)]); -// -// CHECK(assert_equal(expected,*actual)); -//} - -/* ************************************************************************* */ -TEST( SymbolicFactorGraph, eliminate ) -{ - Ordering o; o += X(2),L(1),X(1); - - // create expected Chordal bayes Net - IndexConditional::shared_ptr x2(new IndexConditional(o[X(2)], o[L(1)], o[X(1)])); - IndexConditional::shared_ptr l1(new IndexConditional(o[L(1)], o[X(1)])); - IndexConditional::shared_ptr x1(new IndexConditional(o[X(1)])); - - SymbolicBayesNet expected; - expected.push_back(x2); - expected.push_back(l1); - expected.push_back(x1); - - // create a test graph - GaussianFactorGraph factorGraph = example::createGaussianFactorGraph(o); - SymbolicFactorGraph fg(factorGraph); - - // eliminate it - SymbolicBayesNet actual = *SymbolicSequentialSolver(fg).eliminate(); - - CHECK(assert_equal(expected,actual)); -} - -/* ************************************************************************* */ -int main() { - TestResult tr; - return TestRegistry::runAllTests(tr); -} -/* ************************************************************************* */ diff --git a/tests/timeGaussianFactorGraph.cpp b/tests/timeGaussianFactorGraph.cpp index f7dd8601d..00d24bd13 100644 --- a/tests/timeGaussianFactorGraph.cpp +++ b/tests/timeGaussianFactorGraph.cpp @@ -20,7 +20,6 @@ #include // for operator += in Ordering #include #include -#include using namespace std; using namespace gtsam; diff --git a/wrap/tests/testWrap.cpp b/wrap/tests/testWrap.cpp index 461f00405..8bf2c1412 100644 --- a/wrap/tests/testWrap.cpp +++ b/wrap/tests/testWrap.cpp @@ -58,7 +58,7 @@ TEST( wrap, ArgumentList ) { } /* ************************************************************************* */ -TEST_UNSAFE( wrap, check_exception ) { +TEST( wrap, check_exception ) { THROWS_EXCEPTION(Module("/notarealpath", "geometry",enable_verbose)); CHECK_EXCEPTION(Module("/alsonotarealpath", "geometry",enable_verbose), CantOpenFile);