Revamped timing statements - much easier to use, exception-safe (see email to frankcvs list)
parent
4297d24c96
commit
4876cc7ff7
|
@ -38,7 +38,7 @@ endif()
|
|||
|
||||
# Configurable Options
|
||||
option(GTSAM_BUILD_TESTS "Enable/Disable building of tests" ON)
|
||||
#option(GTSAM_BUILD_TIMING "Enable/Disable building of timing scripts" ON) # These do not currently work
|
||||
option(GTSAM_BUILD_TIMING "Enable/Disable building of timing scripts" ON) # These do not currently work
|
||||
option(GTSAM_BUILD_EXAMPLES "Enable/Disable building of examples" ON)
|
||||
if(GTSAM_UNSTABLE_AVAILABLE)
|
||||
option(GTSAM_BUILD_UNSTABLE "Enable/Disable libgtsam_unstable" ON)
|
||||
|
@ -108,8 +108,8 @@ endif()
|
|||
if(CYGWIN OR MSVC OR WIN32)
|
||||
set(Boost_USE_STATIC_LIBS 1)
|
||||
endif()
|
||||
find_package(Boost 1.43 COMPONENTS serialization system filesystem thread date_time regex REQUIRED)
|
||||
set(GTSAM_BOOST_LIBRARIES ${Boost_SERIALIZATION_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${Boost_FILESYSTEM_LIBRARY})
|
||||
find_package(Boost 1.43 COMPONENTS serialization system filesystem thread date_time regex timer chrono REQUIRED)
|
||||
set(GTSAM_BOOST_LIBRARIES ${Boost_SERIALIZATION_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${Boost_FILESYSTEM_LIBRARY} ${Boost_TIMER_LIBRARY})
|
||||
|
||||
# General build settings
|
||||
include_directories(
|
||||
|
@ -187,7 +187,7 @@ set(CPACK_DEBIAN_PACKAGE_DEPENDS "libboost-dev (>= 1.43)") #Example: "libc6 (>=
|
|||
message(STATUS "===============================================================")
|
||||
message(STATUS "================ Configuration Options ======================")
|
||||
message(STATUS "Build flags ")
|
||||
#print_config_flag(${GTSAM_BUILD_TIMING} "Build Timing scripts ")
|
||||
print_config_flag(${GTSAM_BUILD_TIMING} "Build Timing scripts ")
|
||||
print_config_flag(${GTSAM_BUILD_EXAMPLES} "Build Examples ")
|
||||
print_config_flag(${GTSAM_BUILD_TESTS} "Build Tests ")
|
||||
if (DOXYGEN_FOUND)
|
||||
|
|
|
@ -410,17 +410,17 @@ void householder_(Matrix& A, size_t k, bool copy_vectors) {
|
|||
double beta = houseInPlace(vjm);
|
||||
|
||||
// do outer product update A(j:m,:) = (I-beta vv')*A = A - v*(beta*A'*v)' = A - v*w'
|
||||
tic(1, "householder_update"); // bottleneck for system
|
||||
tic(householder_update); // bottleneck for system
|
||||
// don't touch old columns
|
||||
Vector w = beta * A.block(j,j,m-j,n-j).transpose() * vjm;
|
||||
A.block(j,j,m-j,n-j) -= vjm * w.transpose();
|
||||
toc(1, "householder_update");
|
||||
toc(householder_update);
|
||||
|
||||
// the Householder vector is copied in the zeroed out part
|
||||
if (copy_vectors) {
|
||||
tic(2, "householder_vector_copy");
|
||||
tic(householder_vector_copy);
|
||||
A.col(j).segment(j+1, m-(j+1)) = vjm.segment(1, m-(j+1));
|
||||
toc(2, "householder_vector_copy");
|
||||
toc(householder_vector_copy);
|
||||
}
|
||||
} // column j
|
||||
}
|
||||
|
@ -428,14 +428,14 @@ void householder_(Matrix& A, size_t k, bool copy_vectors) {
|
|||
/* ************************************************************************* */
|
||||
void householder(Matrix& A, size_t k) {
|
||||
// version with zeros below diagonal
|
||||
tic(1, "householder_");
|
||||
tic(householder_);
|
||||
householder_(A,k,false);
|
||||
toc(1, "householder_");
|
||||
// tic(2, "householder_zero_fill");
|
||||
toc(householder_);
|
||||
// tic(householder_zero_fill);
|
||||
// const size_t m = A.rows(), n = A.cols(), kprime = min(k,min(m,n));
|
||||
// for(size_t j=0; j < kprime; j++)
|
||||
// A.col(j).segment(j+1, m-(j+1)).setZero();
|
||||
// toc(2, "householder_zero_fill");
|
||||
// toc(householder_zero_fill);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -131,30 +131,30 @@ bool choleskyPartial(Matrix& ABC, size_t nFrontal) {
|
|||
const size_t n = ABC.rows();
|
||||
|
||||
// Compute Cholesky factorization of A, overwrites A.
|
||||
tic(1, "lld");
|
||||
tic(lld);
|
||||
Eigen::LLT<Matrix, Eigen::Upper> llt = ABC.block(0,0,nFrontal,nFrontal).selfadjointView<Eigen::Upper>().llt();
|
||||
ABC.block(0,0,nFrontal,nFrontal).triangularView<Eigen::Upper>() = llt.matrixU();
|
||||
toc(1, "lld");
|
||||
toc(lld);
|
||||
|
||||
if(debug) cout << "R:\n" << Eigen::MatrixXd(ABC.topLeftCorner(nFrontal,nFrontal).triangularView<Eigen::Upper>()) << endl;
|
||||
|
||||
// Compute S = inv(R') * B
|
||||
tic(2, "compute S");
|
||||
tic(compute_S);
|
||||
if(n - nFrontal > 0) {
|
||||
ABC.topLeftCorner(nFrontal,nFrontal).triangularView<Eigen::Upper>().transpose().solveInPlace(
|
||||
ABC.topRightCorner(nFrontal, n-nFrontal));
|
||||
}
|
||||
if(debug) cout << "S:\n" << ABC.topRightCorner(nFrontal, n-nFrontal) << endl;
|
||||
toc(2, "compute S");
|
||||
toc(compute_S);
|
||||
|
||||
// Compute L = C - S' * S
|
||||
tic(3, "compute L");
|
||||
tic(compute_L);
|
||||
if(debug) cout << "C:\n" << Eigen::MatrixXd(ABC.bottomRightCorner(n-nFrontal,n-nFrontal).selfadjointView<Eigen::Upper>()) << endl;
|
||||
if(n - nFrontal > 0)
|
||||
ABC.bottomRightCorner(n-nFrontal,n-nFrontal).selfadjointView<Eigen::Upper>().rankUpdate(
|
||||
ABC.topRightCorner(nFrontal, n-nFrontal).transpose(), -1.0);
|
||||
if(debug) cout << "L:\n" << Eigen::MatrixXd(ABC.bottomRightCorner(n-nFrontal,n-nFrontal).selfadjointView<Eigen::Upper>()) << endl;
|
||||
toc(3, "compute L");
|
||||
toc(compute_L);
|
||||
|
||||
// Check last diagonal element - Eigen does not check it
|
||||
bool ok;
|
||||
|
|
|
@ -49,10 +49,10 @@ int main(int argc, char *argv[]) {
|
|||
}
|
||||
|
||||
for(size_t i=0; i<1000000; ++i) {
|
||||
tic(1, "overhead a");
|
||||
tic(1, "overhead b");
|
||||
toc(1, "overhead b");
|
||||
toc(1, "overhead a");
|
||||
tic(overhead_a);
|
||||
tic(overhead_b);
|
||||
toc(overhead_b);
|
||||
toc(overhead_a);
|
||||
}
|
||||
|
||||
tictoc_print_();
|
||||
|
|
|
@ -27,13 +27,13 @@
|
|||
#include <gtsam/base/debug.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
|
||||
boost::shared_ptr<TimingOutline> timingRoot(new TimingOutline("Total"));
|
||||
boost::weak_ptr<TimingOutline> timingCurrent(timingRoot);
|
||||
namespace gtsam {
|
||||
|
||||
#ifdef ENABLE_OLD_TIMING
|
||||
Timing timing;
|
||||
#endif
|
||||
std::string timingPrefix;
|
||||
/* ************************************************************************* */
|
||||
namespace internal {
|
||||
|
||||
boost::shared_ptr<TimingOutline> timingRoot(new TimingOutline("Total", getTicTocID("Total")));
|
||||
boost::weak_ptr<TimingOutline> timingCurrent(timingRoot);
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Implementation of TimingOutline
|
||||
|
@ -48,18 +48,21 @@ void TimingOutline::add(size_t usecs) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TimingOutline::TimingOutline(const std::string& label) :
|
||||
t_(0), t2_(0.0), tIt_(0), tMax_(0), tMin_(0), n_(0), label_(label) {}
|
||||
TimingOutline::TimingOutline(const std::string& label, size_t myId) :
|
||||
myId_(myId), t_(0), t2_(0.0), tIt_(0), tMax_(0), tMin_(0), n_(0), label_(label)
|
||||
{
|
||||
#ifdef GTSAM_USING_NEW_BOOST_TIMERS
|
||||
timer_.stop();
|
||||
#endif
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
size_t TimingOutline::time() const {
|
||||
size_t time = 0;
|
||||
bool hasChildren = false;
|
||||
BOOST_FOREACH(const boost::shared_ptr<TimingOutline>& child, children_) {
|
||||
if(child) {
|
||||
time += child->time();
|
||||
hasChildren = true;
|
||||
}
|
||||
BOOST_FOREACH(const ChildMap::value_type& child, children_) {
|
||||
time += child.second->time();
|
||||
hasChildren = true;
|
||||
}
|
||||
if(hasChildren)
|
||||
return time;
|
||||
|
@ -72,18 +75,10 @@ void TimingOutline::print(const std::string& outline) const {
|
|||
std::cout << outline << " " << label_ << ": " << double(t_)/1000000.0 << " (" <<
|
||||
n_ << " times, " << double(time())/1000000.0 << " children, min: " << double(tMin_)/1000000.0 <<
|
||||
" max: " << double(tMax_)/1000000.0 << ")\n";
|
||||
for(size_t i=0; i<children_.size(); ++i) {
|
||||
if(children_[i]) {
|
||||
std::string childOutline(outline);
|
||||
#if 0
|
||||
if(childOutline.size() > 0)
|
||||
childOutline += ".";
|
||||
childOutline += (boost::format("%d") % i).str();
|
||||
#else
|
||||
childOutline += " ";
|
||||
#endif
|
||||
children_[i]->print(childOutline);
|
||||
}
|
||||
BOOST_FOREACH(const ChildMap::value_type& child, children_) {
|
||||
std::string childOutline(outline);
|
||||
childOutline += " ";
|
||||
child.second->print(childOutline);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,16 +109,14 @@ void TimingOutline::print2(const std::string& outline, const double parentTotal)
|
|||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
for(size_t i=0; i<children_.size(); ++i) {
|
||||
if(children_[i]) {
|
||||
std::string childOutline(outline);
|
||||
if ( n_ == 0 ) {
|
||||
children_[i]->print2(childOutline, childTotal);
|
||||
}
|
||||
else {
|
||||
childOutline += " ";
|
||||
children_[i]->print2(childOutline, selfTotal);
|
||||
}
|
||||
BOOST_FOREACH(const ChildMap::value_type& child, children_) {
|
||||
std::string childOutline(outline);
|
||||
if ( n_ == 0 ) {
|
||||
child.second->print2(childOutline, childTotal);
|
||||
}
|
||||
else {
|
||||
childOutline += " ";
|
||||
child.second->print2(childOutline, selfTotal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -131,27 +124,17 @@ void TimingOutline::print2(const std::string& outline, const double parentTotal)
|
|||
/* ************************************************************************* */
|
||||
const boost::shared_ptr<TimingOutline>& TimingOutline::child(size_t child, const std::string& label, const boost::weak_ptr<TimingOutline>& thisPtr) {
|
||||
assert(thisPtr.lock().get() == this);
|
||||
// Resize if necessary
|
||||
if(child >= children_.size())
|
||||
children_.resize(child + 1);
|
||||
// Create child if necessary
|
||||
if(children_[child]) {
|
||||
#ifndef NDEBUG
|
||||
if(children_[child]->label_ != label) {
|
||||
timingRoot->print();
|
||||
std::cerr << "gtsam timing: tic called with id=" << child << ", label=" << label << ", but this id already has the label " << children_[child]->label_ << std::endl;
|
||||
exit(1);
|
||||
}
|
||||
#endif
|
||||
} else {
|
||||
children_[child].reset(new TimingOutline(label));
|
||||
children_[child]->parent_ = thisPtr;
|
||||
boost::shared_ptr<TimingOutline>& result = children_[child];
|
||||
if(!result) {
|
||||
// Create child if necessary
|
||||
result.reset(new TimingOutline(label, child));
|
||||
result->parent_ = thisPtr;
|
||||
}
|
||||
return children_[child];
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void TimingOutline::tic() {
|
||||
void TimingOutline::ticInternal() {
|
||||
#ifdef GTSAM_USING_NEW_BOOST_TIMERS
|
||||
assert(timer_.is_stopped());
|
||||
timer_.start();
|
||||
|
@ -163,7 +146,7 @@ void TimingOutline::tic() {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void TimingOutline::toc() {
|
||||
void TimingOutline::tocInternal() {
|
||||
#ifdef GTSAM_USING_NEW_BOOST_TIMERS
|
||||
assert(!timer_.is_stopped());
|
||||
timer_.stop();
|
||||
|
@ -183,95 +166,61 @@ void TimingOutline::finishedIteration() {
|
|||
if(tMin_ == 0 || tIt_ < tMin_)
|
||||
tMin_ = tIt_;
|
||||
tIt_ = 0;
|
||||
for(size_t i=0; i<children_.size(); ++i)
|
||||
if(children_[i])
|
||||
children_[i]->finishedIteration();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void tic_(size_t id, const std::string& label) {
|
||||
if(ISDEBUG("timing-verbose"))
|
||||
std::cout << "tic(" << id << ", " << label << ")" << std::endl;
|
||||
boost::shared_ptr<TimingOutline> node = timingCurrent.lock()->child(id, label, timingCurrent);
|
||||
timingCurrent = node;
|
||||
node->tic();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void toc_(size_t id) {
|
||||
if(ISDEBUG("timing-verbose"))
|
||||
std::cout << "toc(" << id << ")" << std::endl;
|
||||
boost::shared_ptr<TimingOutline> current(timingCurrent.lock());
|
||||
if(!(id < current->parent_.lock()->children_.size() && current->parent_.lock()->children_[id] == current)) {
|
||||
if(std::find(current->parent_.lock()->children_.begin(), current->parent_.lock()->children_.end(), current)
|
||||
!= current->parent_.lock()->children_.end())
|
||||
std::cout << "gtsam timing: Incorrect ID passed to toc, expected "
|
||||
<< std::find(current->parent_.lock()->children_.begin(), current->parent_.lock()->children_.end(), current) - current->parent_.lock()->children_.begin()
|
||||
<< " \"" << current->label_ << "\", got " << id << std::endl;
|
||||
else
|
||||
std::cout << "gtsam timing: Incorrect ID passed to toc, id " << id << " does not exist" << std::endl;
|
||||
timingRoot->print();
|
||||
throw std::invalid_argument("gtsam timing: Incorrect ID passed to toc");
|
||||
BOOST_FOREACH(ChildMap::value_type& child, children_) {
|
||||
child.second->finishedIteration();
|
||||
}
|
||||
current->toc();
|
||||
if(!current->parent_.lock()) {
|
||||
std::cout << "gtsam timing: extra toc, already at the root" << std::endl;
|
||||
timingRoot->print();
|
||||
throw std::invalid_argument("gtsam timing: extra toc, already at the root");
|
||||
}
|
||||
timingCurrent = current->parent_;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void toc_(size_t id, const std::string& label) {
|
||||
if(ISDEBUG("timing-verbose"))
|
||||
std::cout << "toc(" << id << ", " << label << ")" << std::endl;
|
||||
bool check = false;
|
||||
#ifndef NDEBUG
|
||||
// If NDEBUG is defined, still do this debug check if the granular debugging
|
||||
// flag is enabled. If NDEBUG is not defined, always do this check.
|
||||
check = true;
|
||||
#endif
|
||||
if(check || ISDEBUG("timing-debug")) {
|
||||
if(label != timingCurrent.lock()->label_) {
|
||||
std::cerr << "gtsam timing: toc called with id=" << id << ", label=\"" << label << "\", but expecting \"" << timingCurrent.lock()->label_ << "\"" << std::endl;
|
||||
timingRoot->print();
|
||||
exit(1);
|
||||
/* ************************************************************************* */
|
||||
// Generate or retrieve a unique global ID number that will be used to look up tic_/toc statements
|
||||
size_t getTicTocID(const char *descriptionC) {
|
||||
const std::string description(descriptionC);
|
||||
// Global (static) map from strings to ID numbers and current next ID number
|
||||
static size_t nextId = 0;
|
||||
static gtsam::FastMap<std::string, size_t> idMap;
|
||||
|
||||
// Retrieve or add this string
|
||||
gtsam::FastMap<std::string, size_t>::const_iterator it = idMap.find(description);
|
||||
if(it == idMap.end()) {
|
||||
it = idMap.insert(std::make_pair(description, nextId)).first;
|
||||
++ nextId;
|
||||
}
|
||||
|
||||
// Return ID
|
||||
return it->second;
|
||||
}
|
||||
toc_(id);
|
||||
}
|
||||
|
||||
#ifdef ENABLE_OLD_TIMING
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Timing class implementation
|
||||
void Timing::print() {
|
||||
std::map<std::string, Timing::Stats>::iterator it;
|
||||
for(it = this->stats.begin(); it!=stats.end(); it++) {
|
||||
Stats& s = it->second;
|
||||
printf("%s: %g (%i times, min: %g, max: %g)\n",
|
||||
it->first.c_str(), s.t, s.n, s.t_min, s.t_max);
|
||||
/* ************************************************************************* */
|
||||
void ticInternal(size_t id, const char *labelC) {
|
||||
const std::string label(labelC);
|
||||
if(ISDEBUG("timing-verbose"))
|
||||
std::cout << "tic_(" << id << ", " << label << ")" << std::endl;
|
||||
boost::shared_ptr<TimingOutline> node = timingCurrent.lock()->child(id, label, timingCurrent);
|
||||
timingCurrent = node;
|
||||
node->ticInternal();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void tocInternal(size_t id, const char *label) {
|
||||
if(ISDEBUG("timing-verbose"))
|
||||
std::cout << "toc(" << id << ", " << label << ")" << std::endl;
|
||||
boost::shared_ptr<TimingOutline> current(timingCurrent.lock());
|
||||
if(id != current->myId_) {
|
||||
timingRoot->print();
|
||||
throw std::invalid_argument(
|
||||
(boost::format("gtsam timing: Mismatched tic/toc: toc(\"%s\") called when last tic was \"%s\".") %
|
||||
label % current->label_).str());
|
||||
}
|
||||
if(!current->parent_.lock()) {
|
||||
timingRoot->print();
|
||||
throw std::invalid_argument(
|
||||
(boost::format("gtsam timing: Mismatched tic/toc: extra toc(\"%s\"), already at the root") %
|
||||
label).str());
|
||||
}
|
||||
current->tocInternal();
|
||||
timingCurrent = current->parent_;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double _tic_() {
|
||||
struct timeval t;
|
||||
gettimeofday(&t, NULL);
|
||||
return ((double)t.tv_sec + ((double)t.tv_usec)/1000000.);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void ticPop_(const std::string& prefix, const std::string& id) {
|
||||
toc_(id);
|
||||
if(timingPrefix.size() < prefix.size()) {
|
||||
fprintf(stderr, "Seems to be a mismatched push/pop in timing, exiting\n");
|
||||
exit(1);
|
||||
} else if(timingPrefix.size() == prefix.size())
|
||||
timingPrefix.resize(0);
|
||||
else
|
||||
timingPrefix.resize(timingPrefix.size() - prefix.size() - 1);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -18,20 +18,19 @@
|
|||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/weak_ptr.hpp>
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/FastMap.h>
|
||||
|
||||
// Enabling the new Boost timers introduces dependencies on other Boost
|
||||
// libraries so this is disabled for now until we modify the build scripts
|
||||
// to link each component or MATLAB wrapper with only the libraries it needs.
|
||||
#if 0
|
||||
//#if 0
|
||||
#if BOOST_VERSION >= 104800
|
||||
#define GTSAM_USING_NEW_BOOST_TIMERS
|
||||
#endif
|
||||
#endif
|
||||
//#endif
|
||||
|
||||
#ifdef GTSAM_USING_NEW_BOOST_TIMERS
|
||||
#include <boost/timer/timer.hpp>
|
||||
|
@ -39,186 +38,102 @@
|
|||
#include <boost/timer.hpp>
|
||||
#endif
|
||||
|
||||
class TimingOutline;
|
||||
extern boost::shared_ptr<TimingOutline> timingRoot;
|
||||
extern boost::weak_ptr<TimingOutline> timingCurrent;
|
||||
namespace gtsam {
|
||||
|
||||
class TimingOutline {
|
||||
protected:
|
||||
size_t t_;
|
||||
double t2_ ; /* cache the \sum t_i^2 */
|
||||
size_t tIt_;
|
||||
size_t tMax_;
|
||||
size_t tMin_;
|
||||
size_t n_;
|
||||
std::string label_;
|
||||
namespace internal {
|
||||
size_t getTicTocID(const char *description);
|
||||
void ticInternal(size_t id, const char *label);
|
||||
void tocInternal(size_t id, const char *label);
|
||||
|
||||
boost::weak_ptr<TimingOutline> parent_;
|
||||
std::vector<boost::shared_ptr<TimingOutline> > children_;
|
||||
class TimingOutline {
|
||||
protected:
|
||||
size_t myId_;
|
||||
size_t t_;
|
||||
double t2_ ; /* cache the \sum t_i^2 */
|
||||
size_t tIt_;
|
||||
size_t tMax_;
|
||||
size_t tMin_;
|
||||
size_t n_;
|
||||
std::string label_;
|
||||
boost::weak_ptr<TimingOutline> parent_;
|
||||
typedef FastMap<size_t, boost::shared_ptr<TimingOutline> > ChildMap;
|
||||
ChildMap children_;
|
||||
#ifdef GTSAM_USING_NEW_BOOST_TIMERS
|
||||
boost::timer::cpu_timer timer_;
|
||||
boost::timer::cpu_timer timer_;
|
||||
#else
|
||||
boost::timer timer_;
|
||||
gtsam::ValueWithDefault<bool,false> timerActive_;
|
||||
boost::timer timer_;
|
||||
gtsam::ValueWithDefault<bool,false> timerActive_;
|
||||
#endif
|
||||
void add(size_t usecs);
|
||||
public:
|
||||
TimingOutline(const std::string& label, size_t myId);
|
||||
size_t time() const;
|
||||
void print(const std::string& outline = "") const;
|
||||
void print2(const std::string& outline = "", const double parentTotal = -1.0) const;
|
||||
const boost::shared_ptr<TimingOutline>& child(size_t child, const std::string& label, const boost::weak_ptr<TimingOutline>& thisPtr);
|
||||
void ticInternal();
|
||||
void tocInternal();
|
||||
void finishedIteration();
|
||||
|
||||
void add(size_t usecs);
|
||||
friend void tocInternal(size_t id);
|
||||
friend void tocInternal(size_t id, const char *label);
|
||||
}; // \TimingOutline
|
||||
|
||||
public:
|
||||
class AutoTicToc {
|
||||
private:
|
||||
size_t id_;
|
||||
const char *label_;
|
||||
bool isSet_;
|
||||
public:
|
||||
AutoTicToc(size_t id, const char* label) : id_(id), label_(label), isSet_(true) { ticInternal(id_, label_); }
|
||||
void stop() { tocInternal(id_, label_); isSet_ = false; }
|
||||
~AutoTicToc() { if(isSet_) stop(); }
|
||||
};
|
||||
|
||||
TimingOutline(const std::string& label);
|
||||
|
||||
size_t time() const;
|
||||
|
||||
void print(const std::string& outline = "") const;
|
||||
|
||||
void print2(const std::string& outline = "", const double parentTotal = -1.0) const;
|
||||
|
||||
const boost::shared_ptr<TimingOutline>& child(size_t child, const std::string& label, const boost::weak_ptr<TimingOutline>& thisPtr);
|
||||
|
||||
void tic();
|
||||
|
||||
void toc();
|
||||
|
||||
void finishedIteration();
|
||||
|
||||
friend class AutoTimer;
|
||||
friend void toc_(size_t id);
|
||||
friend void toc_(size_t id, const std::string& label);
|
||||
}; // \TimingOutline
|
||||
|
||||
void tic_(size_t id, const std::string& label);
|
||||
|
||||
void toc_(size_t id);
|
||||
|
||||
void toc_(size_t id, const std::string& label);
|
||||
extern boost::shared_ptr<TimingOutline> timingRoot;
|
||||
extern boost::weak_ptr<TimingOutline> timingCurrent;
|
||||
}
|
||||
|
||||
inline void tictoc_finishedIteration_() {
|
||||
timingRoot->finishedIteration();
|
||||
internal::timingRoot->finishedIteration();
|
||||
}
|
||||
|
||||
inline void tictoc_print_() {
|
||||
timingRoot->print();
|
||||
internal::timingRoot->print();
|
||||
}
|
||||
|
||||
/* print mean and standard deviation */
|
||||
inline void tictoc_print2_() {
|
||||
timingRoot->print2();
|
||||
internal::timingRoot->print2();
|
||||
}
|
||||
|
||||
// Tic and toc functions using a string label
|
||||
#define tic_(label) \
|
||||
static const size_t label##_id_tic = ::gtsam::internal::getTicTocID(#label); \
|
||||
::gtsam::internal::AutoTicToc label##_obj = ::gtsam::internal::AutoTicToc(label##_id_tic, #label)
|
||||
#define toc_(label) \
|
||||
label##_obj.stop()
|
||||
#define longtic_(label) \
|
||||
static const size_t label##_id_tic = ::gtsam::internal::getTicTocID(#label); \
|
||||
::gtsam::internal::ticInternal(label##_id_tic, #label)
|
||||
#define longtoc_(label) \
|
||||
static const size_t label##_id_toc = ::gtsam::internal::getTicTocID(#label); \
|
||||
::gtsam::internal::tocInternal(label##_id_toc, #label)
|
||||
|
||||
#ifdef ENABLE_TIMING
|
||||
inline void tic(size_t id, const std::string& label) { tic_(id, label); }
|
||||
inline void toc(size_t id) { toc_(id); }
|
||||
inline void toc(size_t id, const std::string& label) { toc_(id, label); }
|
||||
inline void tictoc_finishedIteration() { tictoc_finishedIteration_(); }
|
||||
inline void tictoc_print() { tictoc_print_(); }
|
||||
#define tic(label) tic_(label)
|
||||
#define toc(label) toc_(label)
|
||||
#define longtic(label) longtic_(label)
|
||||
#define longtoc(label) longtoc_(label)
|
||||
#define tictoc_finishedIteration tictoc_finishedIteration_
|
||||
#define tictoc_print tictoc_print_
|
||||
#else
|
||||
inline void tic(size_t, const char*) {}
|
||||
inline void toc(size_t) {}
|
||||
inline void toc(size_t, const char*) {}
|
||||
#define tic(label) ((void)0)
|
||||
#define toc(label) ((void)0)
|
||||
#define longtic(label) ((void)0)
|
||||
#define longtoc(label) ((void)0)
|
||||
inline void tictoc_finishedIteration() {}
|
||||
inline void tictoc_print() {}
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef ENABLE_OLD_TIMING
|
||||
|
||||
// simple class for accumulating execution timing information by name
|
||||
class Timing;
|
||||
extern Timing timing;
|
||||
extern std::string timingPrefix;
|
||||
|
||||
double _tic();
|
||||
double _toc(double t);
|
||||
double tic(const std::string& id);
|
||||
double toc(const std::string& id);
|
||||
void ticPush(const std::string& id);
|
||||
void ticPop(const std::string& id);
|
||||
void tictoc_finishedIteration();
|
||||
|
||||
/** These underscore versions work evening when ENABLE_TIMING is not defined */
|
||||
double _tic_();
|
||||
double _toc_(double t);
|
||||
double tic_(const std::string& id);
|
||||
double toc_(const std::string& id);
|
||||
void ticPush_(const std::string& id);
|
||||
void ticPop_(const std::string& id);
|
||||
void tictoc_finishedIteration_();
|
||||
|
||||
|
||||
|
||||
// simple class for accumulating execution timing information by name
|
||||
class Timing {
|
||||
class Stats {
|
||||
public:
|
||||
std::string label;
|
||||
double t0;
|
||||
double t;
|
||||
double t_max;
|
||||
double t_min;
|
||||
int n;
|
||||
};
|
||||
std::map<std::string, Stats> stats;
|
||||
public:
|
||||
void add_t0(const std::string& id, double t0) {
|
||||
stats[id].t0 = t0;
|
||||
}
|
||||
double get_t0(const std::string& id) {
|
||||
return stats[id].t0;
|
||||
}
|
||||
void add_dt(const std::string& id, double dt) {
|
||||
Stats& s = stats[id];
|
||||
s.t += dt;
|
||||
s.n++;
|
||||
if (s.n==1 || s.t_max < dt) s.t_max = dt;
|
||||
if (s.n==1 || s.t_min > dt) s.t_min = dt;
|
||||
}
|
||||
void print();
|
||||
|
||||
double time(const std::string& id) {
|
||||
Stats& s = stats[id];
|
||||
return s.t;
|
||||
}
|
||||
};
|
||||
|
||||
double _tic_();
|
||||
inline double _toc_(double t) {
|
||||
double s = _tic_();
|
||||
return (std::max(0., s-t));
|
||||
}
|
||||
inline double tic_(const std::string& id) {
|
||||
double t0 = _tic_();
|
||||
timing.add_t0(timingPrefix + " " + id, t0);
|
||||
return t0;
|
||||
}
|
||||
inline double toc_(const std::string& id) {
|
||||
std::string comb(timingPrefix + " " + id);
|
||||
double dt = _toc_(timing.get_t0(comb));
|
||||
timing.add_dt(comb, dt);
|
||||
return dt;
|
||||
}
|
||||
inline void ticPush_(const std::string& prefix, const std::string& id) {
|
||||
if(timingPrefix.size() > 0)
|
||||
timingPrefix += ".";
|
||||
timingPrefix += prefix;
|
||||
tic_(id);
|
||||
}
|
||||
void ticPop_(const std::string& prefix, const std::string& id);
|
||||
|
||||
#ifdef ENABLE_TIMING
|
||||
inline double _tic() { return _tic_(); }
|
||||
inline double _toc(double t) { return _toc_(t); }
|
||||
inline double tic(const std::string& id) { return tic_(id); }
|
||||
inline double toc(const std::string& id) { return toc_(id); }
|
||||
inline void ticPush(const std::string& prefix, const std::string& id) { ticPush_(prefix, id); }
|
||||
inline void ticPop(const std::string& prefix, const std::string& id) { ticPop_(prefix, id); }
|
||||
#else
|
||||
inline double _tic() {return 0.;}
|
||||
inline double _toc(double) {return 0.;}
|
||||
inline double tic(const std::string&) {return 0.;}
|
||||
inline double toc(const std::string&) {return 0.;}
|
||||
inline void ticPush(const std::string&, const std::string&) {}
|
||||
inline void ticPop(const std::string&, const std::string&) {}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
}
|
|
@ -1,79 +1,79 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file DiscreteFactorGraph.cpp
|
||||
* @date Feb 14, 2011
|
||||
* @author Duy-Nguyen Ta
|
||||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
//#define ENABLE_TIMING
|
||||
#include <gtsam/discrete/DiscreteFactorGraph.h>
|
||||
#include <gtsam/discrete/DiscreteConditional.h>
|
||||
#include <gtsam/inference/EliminationTree-inl.h>
|
||||
#include <boost/make_shared.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// Explicitly instantiate so we don't have to include everywhere
|
||||
template class FactorGraph<DiscreteFactor> ;
|
||||
template class EliminationTree<DiscreteFactor> ;
|
||||
|
||||
/* ************************************************************************* */
|
||||
DiscreteFactorGraph::DiscreteFactorGraph() {
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
DiscreteFactorGraph::DiscreteFactorGraph(
|
||||
const BayesNet<DiscreteConditional>& bayesNet) :
|
||||
FactorGraph<DiscreteFactor>(bayesNet) {
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
FastSet<Index> DiscreteFactorGraph::keys() const {
|
||||
FastSet<Index> keys;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this)
|
||||
if (factor) keys.insert(factor->begin(), factor->end());
|
||||
return keys;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
DecisionTreeFactor DiscreteFactorGraph::product() const {
|
||||
DecisionTreeFactor result;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this)
|
||||
if (factor) result = (*factor) * result;
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double DiscreteFactorGraph::operator()(
|
||||
const DiscreteFactor::Values &values) const {
|
||||
double product = 1.0;
|
||||
BOOST_FOREACH( const sharedFactor& factor, factors_ )
|
||||
product *= (*factor)(values);
|
||||
return product;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void DiscreteFactorGraph::print(const std::string& s,
|
||||
const IndexFormatter& formatter) const {
|
||||
std::cout << s << std::endl;
|
||||
std::cout << "size: " << size() << std::endl;
|
||||
for (size_t i = 0; i < factors_.size(); i++) {
|
||||
std::stringstream ss;
|
||||
ss << "factor " << i << ": ";
|
||||
if (factors_[i] != NULL) factors_[i]->print(ss.str(), formatter);
|
||||
}
|
||||
}
|
||||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file DiscreteFactorGraph.cpp
|
||||
* @date Feb 14, 2011
|
||||
* @author Duy-Nguyen Ta
|
||||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
//#define ENABLE_TIMING
|
||||
#include <gtsam/discrete/DiscreteFactorGraph.h>
|
||||
#include <gtsam/discrete/DiscreteConditional.h>
|
||||
#include <gtsam/inference/EliminationTree-inl.h>
|
||||
#include <boost/make_shared.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// Explicitly instantiate so we don't have to include everywhere
|
||||
template class FactorGraph<DiscreteFactor> ;
|
||||
template class EliminationTree<DiscreteFactor> ;
|
||||
|
||||
/* ************************************************************************* */
|
||||
DiscreteFactorGraph::DiscreteFactorGraph() {
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
DiscreteFactorGraph::DiscreteFactorGraph(
|
||||
const BayesNet<DiscreteConditional>& bayesNet) :
|
||||
FactorGraph<DiscreteFactor>(bayesNet) {
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
FastSet<Index> DiscreteFactorGraph::keys() const {
|
||||
FastSet<Index> keys;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this)
|
||||
if (factor) keys.insert(factor->begin(), factor->end());
|
||||
return keys;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
DecisionTreeFactor DiscreteFactorGraph::product() const {
|
||||
DecisionTreeFactor result;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this)
|
||||
if (factor) result = (*factor) * result;
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
double DiscreteFactorGraph::operator()(
|
||||
const DiscreteFactor::Values &values) const {
|
||||
double product = 1.0;
|
||||
BOOST_FOREACH( const sharedFactor& factor, factors_ )
|
||||
product *= (*factor)(values);
|
||||
return product;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void DiscreteFactorGraph::print(const std::string& s,
|
||||
const IndexFormatter& formatter) const {
|
||||
std::cout << s << std::endl;
|
||||
std::cout << "size: " << size() << std::endl;
|
||||
for (size_t i = 0; i < factors_.size(); i++) {
|
||||
std::stringstream ss;
|
||||
ss << "factor " << i << ": ";
|
||||
if (factors_[i] != NULL) factors_[i]->print(ss.str(), formatter);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void DiscreteFactorGraph::permuteWithInverse(
|
||||
|
@ -92,35 +92,34 @@ namespace gtsam {
|
|||
factor->reduceWithInverse(inverseReduction);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::pair<DiscreteConditional::shared_ptr, DecisionTreeFactor::shared_ptr> //
|
||||
EliminateDiscrete(const FactorGraph<DiscreteFactor>& factors, size_t num) {
|
||||
|
||||
// PRODUCT: multiply all factors
|
||||
tic(1, "product");
|
||||
DecisionTreeFactor product;
|
||||
BOOST_FOREACH(const DiscreteFactor::shared_ptr& factor, factors){
|
||||
product = (*factor) * product;
|
||||
}
|
||||
|
||||
toc(1, "product");
|
||||
|
||||
// sum out frontals, this is the factor on the separator
|
||||
tic(2, "sum");
|
||||
DecisionTreeFactor::shared_ptr sum = product.sum(num);
|
||||
toc(2, "sum");
|
||||
|
||||
// now divide product/sum to get conditional
|
||||
tic(3, "divide");
|
||||
DiscreteConditional::shared_ptr cond(new DiscreteConditional(product, *sum));
|
||||
toc(3, "divide");
|
||||
tictoc_finishedIteration();
|
||||
|
||||
return std::make_pair(cond, sum);
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
} // namespace
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::pair<DiscreteConditional::shared_ptr, DecisionTreeFactor::shared_ptr> //
|
||||
EliminateDiscrete(const FactorGraph<DiscreteFactor>& factors, size_t num) {
|
||||
|
||||
// PRODUCT: multiply all factors
|
||||
tic(product);
|
||||
DecisionTreeFactor product;
|
||||
BOOST_FOREACH(const DiscreteFactor::shared_ptr& factor, factors){
|
||||
product = (*factor) * product;
|
||||
}
|
||||
|
||||
toc(product);
|
||||
|
||||
// sum out frontals, this is the factor on the separator
|
||||
tic(sum);
|
||||
DecisionTreeFactor::shared_ptr sum = product.sum(num);
|
||||
toc(sum);
|
||||
|
||||
// now divide product/sum to get conditional
|
||||
tic(divide);
|
||||
DiscreteConditional::shared_ptr cond(new DiscreteConditional(product, *sum));
|
||||
toc(divide);
|
||||
|
||||
return std::make_pair(cond, sum);
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
} // namespace
|
||||
|
||||
|
|
|
@ -35,18 +35,18 @@ namespace gtsam {
|
|||
"DiscreteSequentialSolver, elimination tree ");
|
||||
|
||||
// Eliminate using the elimination tree
|
||||
tic(1, "eliminate");
|
||||
tic(eliminate);
|
||||
DiscreteBayesNet::shared_ptr bayesNet = eliminate();
|
||||
toc(1, "eliminate");
|
||||
toc(eliminate);
|
||||
|
||||
if (debug) bayesNet->print("DiscreteSequentialSolver, Bayes net ");
|
||||
|
||||
// Allocate the solution vector if it is not already allocated
|
||||
|
||||
// Back-substitute
|
||||
tic(2, "optimize");
|
||||
tic(optimize);
|
||||
DiscreteFactor::sharedValues solution = gtsam::optimize(*bayesNet);
|
||||
toc(2, "optimize");
|
||||
toc(optimize);
|
||||
|
||||
if (debug) solution->print("DiscreteSequentialSolver, solution ");
|
||||
|
||||
|
|
|
@ -115,10 +115,10 @@ typename EliminationTree<FACTOR>::shared_ptr EliminationTree<FACTOR>::Create(
|
|||
|
||||
static const bool debug = false;
|
||||
|
||||
tic(1, "ET ComputeParents");
|
||||
tic(ET_ComputeParents);
|
||||
// Compute the tree structure
|
||||
std::vector<Index> parents(ComputeParents(structure));
|
||||
toc(1, "ET ComputeParents");
|
||||
toc(ET_ComputeParents);
|
||||
|
||||
// Number of variables
|
||||
const size_t n = structure.size();
|
||||
|
@ -126,7 +126,7 @@ typename EliminationTree<FACTOR>::shared_ptr EliminationTree<FACTOR>::Create(
|
|||
static const Index none = std::numeric_limits<Index>::max();
|
||||
|
||||
// Create tree structure
|
||||
tic(2, "assemble tree");
|
||||
tic(assemble_tree);
|
||||
std::vector<shared_ptr> trees(n);
|
||||
for (Index k = 1; k <= n; k++) {
|
||||
Index j = n - k; // Start at the last variable and loop down to 0
|
||||
|
@ -136,10 +136,10 @@ typename EliminationTree<FACTOR>::shared_ptr EliminationTree<FACTOR>::Create(
|
|||
else if(!structure[j].empty() && j != n - 1) // If a node other than the last has no parents, this is a forest
|
||||
throw DisconnectedGraphException();
|
||||
}
|
||||
toc(2, "assemble tree");
|
||||
toc(assemble_tree);
|
||||
|
||||
// Hang factors in right places
|
||||
tic(3, "hang factors");
|
||||
tic(hang_factors);
|
||||
BOOST_FOREACH(const typename boost::shared_ptr<DERIVEDFACTOR>& derivedFactor, factorGraph) {
|
||||
// Here we upwards-cast to the factor type of this EliminationTree. This
|
||||
// allows performing symbolic elimination on, for example, GaussianFactors.
|
||||
|
@ -150,7 +150,7 @@ typename EliminationTree<FACTOR>::shared_ptr EliminationTree<FACTOR>::Create(
|
|||
trees[j]->add(factor);
|
||||
}
|
||||
}
|
||||
toc(3, "hang factors");
|
||||
toc(hang_factors);
|
||||
|
||||
if(debug)
|
||||
trees.back()->print("ETree: ");
|
||||
|
@ -165,9 +165,9 @@ typename EliminationTree<FACTOR>::shared_ptr
|
|||
EliminationTree<FACTOR>::Create(const FactorGraph<DERIVEDFACTOR>& factorGraph) {
|
||||
|
||||
// Build variable index
|
||||
tic(0, "ET Create, variable index");
|
||||
tic(ET_Create_variable_index);
|
||||
const VariableIndex variableIndex(factorGraph);
|
||||
toc(0, "ET Create, variable index");
|
||||
toc(ET_Create_variable_index);
|
||||
|
||||
// Build elimination tree
|
||||
return Create(factorGraph, variableIndex);
|
||||
|
@ -205,21 +205,21 @@ typename EliminationTree<FACTOR>::BayesNet::shared_ptr
|
|||
EliminationTree<FACTOR>::eliminatePartial(typename EliminationTree<FACTOR>::Eliminate function, size_t nrToEliminate) const {
|
||||
|
||||
// call recursive routine
|
||||
tic(1, "ET recursive eliminate");
|
||||
tic(ET_recursive_eliminate);
|
||||
if(nrToEliminate > this->key_ + 1)
|
||||
throw std::invalid_argument("Requested that EliminationTree::eliminatePartial eliminate more variables than exist");
|
||||
Conditionals conditionals(nrToEliminate); // reserve a vector of conditional shared pointers
|
||||
(void)eliminate_(function, conditionals); // modify in place
|
||||
toc(1, "ET recursive eliminate");
|
||||
toc(ET_recursive_eliminate);
|
||||
|
||||
// Add conditionals to BayesNet
|
||||
tic(2, "assemble BayesNet");
|
||||
tic(assemble_BayesNet);
|
||||
typename BayesNet::shared_ptr bayesNet(new BayesNet);
|
||||
BOOST_FOREACH(const typename BayesNet::sharedConditional& conditional, conditionals) {
|
||||
if(conditional)
|
||||
bayesNet->push_back(conditional);
|
||||
}
|
||||
toc(2, "assemble BayesNet");
|
||||
toc(assemble_BayesNet);
|
||||
|
||||
return bayesNet;
|
||||
}
|
||||
|
|
|
@ -31,31 +31,29 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
template <class FG, class BTCLIQUE>
|
||||
void JunctionTree<FG,BTCLIQUE>::construct(const FG& fg, const VariableIndex& variableIndex) {
|
||||
tic(1, "JT Constructor");
|
||||
tic(1, "JT symbolic ET");
|
||||
tic(JT_symbolic_ET);
|
||||
const typename EliminationTree<IndexFactor>::shared_ptr symETree =
|
||||
EliminationTree<IndexFactor>::Create(fg, variableIndex);
|
||||
toc(1, "JT symbolic ET");
|
||||
tic(2, "JT symbolic eliminate");
|
||||
toc(JT_symbolic_ET);
|
||||
tic(JT_symbolic_eliminate);
|
||||
SymbolicBayesNet::shared_ptr sbn = symETree->eliminate(&EliminateSymbolic);
|
||||
toc(2, "JT symbolic eliminate");
|
||||
tic(3, "symbolic BayesTree");
|
||||
toc(JT_symbolic_eliminate);
|
||||
tic(symbolic_BayesTree);
|
||||
SymbolicBayesTree sbt(*sbn);
|
||||
toc(3, "symbolic BayesTree");
|
||||
toc(symbolic_BayesTree);
|
||||
|
||||
// distribute factors
|
||||
tic(4, "distributeFactors");
|
||||
tic(distributeFactors);
|
||||
this->root_ = distributeFactors(fg, sbt.root());
|
||||
toc(4, "distributeFactors");
|
||||
toc(1, "JT Constructor");
|
||||
toc(distributeFactors);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class FG, class BTCLIQUE>
|
||||
JunctionTree<FG,BTCLIQUE>::JunctionTree(const FG& fg) {
|
||||
tic(0, "VariableIndex");
|
||||
tic(VariableIndex);
|
||||
VariableIndex varIndex(fg);
|
||||
toc(0, "VariableIndex");
|
||||
toc(VariableIndex);
|
||||
construct(fg, varIndex);
|
||||
}
|
||||
|
||||
|
@ -164,14 +162,14 @@ namespace gtsam {
|
|||
|
||||
// Now that we know which factors and variables, and where variables
|
||||
// come from and go to, create and eliminate the new joint factor.
|
||||
tic(2, "CombineAndEliminate");
|
||||
tic(CombineAndEliminate);
|
||||
typename FG::EliminationResult eliminated(function(fg,
|
||||
current->frontal.size()));
|
||||
toc(2, "CombineAndEliminate");
|
||||
toc(CombineAndEliminate);
|
||||
|
||||
assert(std::equal(eliminated.second->begin(), eliminated.second->end(), current->separator.begin()));
|
||||
|
||||
tic(3, "Update tree");
|
||||
tic(Update_tree);
|
||||
// create a new clique corresponding the combined factors
|
||||
typename BTClique::shared_ptr new_clique(BTClique::Create(eliminated));
|
||||
new_clique->children_ = children;
|
||||
|
@ -179,7 +177,7 @@ namespace gtsam {
|
|||
BOOST_FOREACH(typename BTClique::shared_ptr& childRoot, children) {
|
||||
childRoot->parent_ = new_clique;
|
||||
}
|
||||
toc(3, "Update tree");
|
||||
toc(Update_tree);
|
||||
|
||||
return std::make_pair(new_clique, eliminated.second);
|
||||
}
|
||||
|
@ -189,12 +187,12 @@ namespace gtsam {
|
|||
typename BTCLIQUE::shared_ptr JunctionTree<FG,BTCLIQUE>::eliminate(
|
||||
typename FG::Eliminate function) const {
|
||||
if (this->root()) {
|
||||
tic(2, "JT eliminate");
|
||||
tic(JT_eliminate);
|
||||
std::pair<typename BTClique::shared_ptr, typename FG::sharedFactor> ret =
|
||||
this->eliminateOneClique(function, this->root());
|
||||
if (ret.second->size() != 0) throw std::runtime_error(
|
||||
"JuntionTree::eliminate: elimination failed because of factors left over!");
|
||||
toc(2, "JT eliminate");
|
||||
toc(JT_eliminate);
|
||||
return ret.first;
|
||||
} else
|
||||
return typename BTClique::shared_ptr();
|
||||
|
|
|
@ -142,9 +142,9 @@ VectorValues backSubstituteTranspose(const GaussianBayesNet& bn,
|
|||
|
||||
/* ************************************************************************* */
|
||||
VectorValues optimizeGradientSearch(const GaussianBayesNet& Rd) {
|
||||
tic(0, "Allocate VectorValues");
|
||||
tic(Allocate_VectorValues);
|
||||
VectorValues grad = *allocateVectorValues(Rd);
|
||||
toc(0, "Allocate VectorValues");
|
||||
toc(Allocate_VectorValues);
|
||||
|
||||
optimizeGradientSearchInPlace(Rd, grad);
|
||||
|
||||
|
@ -153,27 +153,27 @@ VectorValues optimizeGradientSearch(const GaussianBayesNet& Rd) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
void optimizeGradientSearchInPlace(const GaussianBayesNet& Rd, VectorValues& grad) {
|
||||
tic(1, "Compute Gradient");
|
||||
tic(Compute_Gradient);
|
||||
// Compute gradient (call gradientAtZero function, which is defined for various linear systems)
|
||||
gradientAtZero(Rd, grad);
|
||||
double gradientSqNorm = grad.dot(grad);
|
||||
toc(1, "Compute Gradient");
|
||||
toc(Compute_Gradient);
|
||||
|
||||
tic(2, "Compute R*g");
|
||||
tic(Compute_Rg);
|
||||
// Compute R * g
|
||||
FactorGraph<JacobianFactor> Rd_jfg(Rd);
|
||||
Errors Rg = Rd_jfg * grad;
|
||||
toc(2, "Compute R*g");
|
||||
toc(Compute_Rg);
|
||||
|
||||
tic(3, "Compute minimizing step size");
|
||||
tic(Compute_minimizing_step_size);
|
||||
// Compute minimizing step size
|
||||
double step = -gradientSqNorm / dot(Rg, Rg);
|
||||
toc(3, "Compute minimizing step size");
|
||||
toc(Compute_minimizing_step_size);
|
||||
|
||||
tic(4, "Compute point");
|
||||
tic(Compute_point);
|
||||
// Compute steepest descent point
|
||||
scal(step, grad);
|
||||
toc(4, "Compute point");
|
||||
toc(Compute_point);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -36,9 +36,9 @@ void optimizeInPlace(const GaussianBayesTree& bayesTree, VectorValues& result) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
VectorValues optimizeGradientSearch(const GaussianBayesTree& bayesTree) {
|
||||
tic(0, "Allocate VectorValues");
|
||||
tic(Allocate_VectorValues);
|
||||
VectorValues grad = *allocateVectorValues(bayesTree);
|
||||
toc(0, "Allocate VectorValues");
|
||||
toc(Allocate_VectorValues);
|
||||
|
||||
optimizeGradientSearchInPlace(bayesTree, grad);
|
||||
|
||||
|
@ -47,27 +47,27 @@ VectorValues optimizeGradientSearch(const GaussianBayesTree& bayesTree) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
void optimizeGradientSearchInPlace(const GaussianBayesTree& bayesTree, VectorValues& grad) {
|
||||
tic(1, "Compute Gradient");
|
||||
tic(Compute_Gradient);
|
||||
// Compute gradient (call gradientAtZero function, which is defined for various linear systems)
|
||||
gradientAtZero(bayesTree, grad);
|
||||
double gradientSqNorm = grad.dot(grad);
|
||||
toc(1, "Compute Gradient");
|
||||
toc(Compute_Gradient);
|
||||
|
||||
tic(2, "Compute R*g");
|
||||
tic(Compute_Rg);
|
||||
// Compute R * g
|
||||
FactorGraph<JacobianFactor> Rd_jfg(bayesTree);
|
||||
Errors Rg = Rd_jfg * grad;
|
||||
toc(2, "Compute R*g");
|
||||
toc(Compute_Rg);
|
||||
|
||||
tic(3, "Compute minimizing step size");
|
||||
tic(Compute_minimizing_step_size);
|
||||
// Compute minimizing step size
|
||||
double step = -gradientSqNorm / dot(Rg, Rg);
|
||||
toc(3, "Compute minimizing step size");
|
||||
toc(Compute_minimizing_step_size);
|
||||
|
||||
tic(4, "Compute point");
|
||||
tic(Compute_point);
|
||||
// Compute steepest descent point
|
||||
scal(step, grad);
|
||||
toc(4, "Compute point");
|
||||
toc(Compute_point);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -253,7 +253,7 @@ break;
|
|||
if (debug) variableSlots.print();
|
||||
|
||||
if (debug) cout << "Determine dimensions" << endl;
|
||||
tic(1, "countDims");
|
||||
tic(countDims);
|
||||
vector<size_t> varDims;
|
||||
size_t m, n;
|
||||
boost::tie(varDims, m, n) = countDims(factors, variableSlots);
|
||||
|
@ -262,17 +262,17 @@ break;
|
|||
BOOST_FOREACH(const size_t dim, varDims) cout << dim << " ";
|
||||
cout << endl;
|
||||
}
|
||||
toc(1, "countDims");
|
||||
toc(countDims);
|
||||
|
||||
if (debug) cout << "Allocate new factor" << endl;
|
||||
tic(3, "allocate");
|
||||
tic(allocate);
|
||||
JacobianFactor::shared_ptr combined(new JacobianFactor());
|
||||
combined->allocate(variableSlots, varDims, m);
|
||||
Vector sigmas(m);
|
||||
toc(3, "allocate");
|
||||
toc(allocate);
|
||||
|
||||
if (debug) cout << "Copy blocks" << endl;
|
||||
tic(4, "copy blocks");
|
||||
tic(copy_blocks);
|
||||
// Loop over slots in combined factor
|
||||
Index combinedSlot = 0;
|
||||
BOOST_FOREACH(const VariableSlots::value_type& varslot, variableSlots) {
|
||||
|
@ -293,10 +293,10 @@ break;
|
|||
}
|
||||
++combinedSlot;
|
||||
}
|
||||
toc(4, "copy blocks");
|
||||
toc(copy_blocks);
|
||||
|
||||
if (debug) cout << "Copy rhs (b) and sigma" << endl;
|
||||
tic(5, "copy vectors");
|
||||
tic(copy_vectors);
|
||||
bool anyConstrained = false;
|
||||
// Loop over source factors
|
||||
size_t nextRow = 0;
|
||||
|
@ -307,12 +307,12 @@ break;
|
|||
if (factors[factorI]->isConstrained()) anyConstrained = true;
|
||||
nextRow += sourceRows;
|
||||
}
|
||||
toc(5, "copy vectors");
|
||||
toc(copy_vectors);
|
||||
|
||||
if (debug) cout << "Create noise model from sigmas" << endl;
|
||||
tic(6, "noise model");
|
||||
tic(noise_model);
|
||||
combined->setModel(anyConstrained, sigmas);
|
||||
toc(6, "noise model");
|
||||
toc(noise_model);
|
||||
|
||||
if (debug) cout << "Assert Invariants" << endl;
|
||||
combined->assertInvariants();
|
||||
|
@ -323,13 +323,13 @@ break;
|
|||
/* ************************************************************************* */
|
||||
GaussianFactorGraph::EliminationResult EliminateJacobians(const FactorGraph<
|
||||
JacobianFactor>& factors, size_t nrFrontals) {
|
||||
tic(1, "Combine");
|
||||
tic(Combine);
|
||||
JacobianFactor::shared_ptr jointFactor =
|
||||
CombineJacobians(factors, VariableSlots(factors));
|
||||
toc(1, "Combine");
|
||||
tic(2, "eliminate");
|
||||
toc(Combine);
|
||||
tic(eliminate);
|
||||
GaussianConditional::shared_ptr gbn = jointFactor->eliminate(nrFrontals);
|
||||
toc(2, "eliminate");
|
||||
toc(eliminate);
|
||||
return make_pair(gbn, jointFactor);
|
||||
}
|
||||
|
||||
|
@ -397,42 +397,42 @@ break;
|
|||
const bool debug = ISDEBUG("EliminateCholesky");
|
||||
|
||||
// Find the scatter and variable dimensions
|
||||
tic(1, "find scatter");
|
||||
tic(find_scatter);
|
||||
Scatter scatter(findScatterAndDims(factors));
|
||||
toc(1, "find scatter");
|
||||
toc(find_scatter);
|
||||
|
||||
// Pull out keys and dimensions
|
||||
tic(2, "keys");
|
||||
tic(keys);
|
||||
vector<size_t> dimensions(scatter.size() + 1);
|
||||
BOOST_FOREACH(const Scatter::value_type& var_slot, scatter) {
|
||||
dimensions[var_slot.second.slot] = var_slot.second.dimension;
|
||||
}
|
||||
// This is for the r.h.s. vector
|
||||
dimensions.back() = 1;
|
||||
toc(2, "keys");
|
||||
toc(keys);
|
||||
|
||||
// Form Ab' * Ab
|
||||
tic(3, "combine");
|
||||
tic(combine);
|
||||
HessianFactor::shared_ptr combinedFactor(new HessianFactor(factors, dimensions, scatter));
|
||||
toc(3, "combine");
|
||||
toc(combine);
|
||||
|
||||
// Do Cholesky, note that after this, the lower triangle still contains
|
||||
// some untouched non-zeros that should be zero. We zero them while
|
||||
// extracting submatrices next.
|
||||
tic(4, "partial Cholesky");
|
||||
tic(partial_Cholesky);
|
||||
combinedFactor->partialCholesky(nrFrontals);
|
||||
|
||||
toc(4, "partial Cholesky");
|
||||
toc(partial_Cholesky);
|
||||
|
||||
// Extract conditional and fill in details of the remaining factor
|
||||
tic(5, "split");
|
||||
tic(split);
|
||||
GaussianConditional::shared_ptr conditional =
|
||||
combinedFactor->splitEliminatedFactor(nrFrontals);
|
||||
if (debug) {
|
||||
conditional->print("Extracted conditional: ");
|
||||
combinedFactor->print("Eliminated factor (L piece): ");
|
||||
}
|
||||
toc(5, "split");
|
||||
toc(split);
|
||||
|
||||
combinedFactor->assertInvariants();
|
||||
return make_pair(conditional, combinedFactor);
|
||||
|
@ -482,15 +482,15 @@ break;
|
|||
// Convert all factors to the appropriate type and call the type-specific EliminateGaussian.
|
||||
if (debug) cout << "Using QR" << endl;
|
||||
|
||||
tic(1, "convert to Jacobian");
|
||||
tic(convert_to_Jacobian);
|
||||
FactorGraph<JacobianFactor> jacobians = convertToJacobians(factors);
|
||||
toc(1, "convert to Jacobian");
|
||||
toc(convert_to_Jacobian);
|
||||
|
||||
tic(2, "Jacobian EliminateGaussian");
|
||||
tic(Jacobian_EliminateGaussian);
|
||||
GaussianConditional::shared_ptr conditional;
|
||||
GaussianFactor::shared_ptr factor;
|
||||
boost::tie(conditional, factor) = EliminateJacobians(jacobians, nrFrontals);
|
||||
toc(2, "Jacobian EliminateGaussian");
|
||||
toc(Jacobian_EliminateGaussian);
|
||||
|
||||
return make_pair(conditional, factor);
|
||||
} // \EliminateQR
|
||||
|
@ -522,9 +522,9 @@ break;
|
|||
return EliminateQR(factors, nrFrontals);
|
||||
else {
|
||||
GaussianFactorGraph::EliminationResult ret;
|
||||
tic(2, "EliminateCholesky");
|
||||
tic(EliminateCholesky);
|
||||
ret = EliminateCholesky(factors, nrFrontals);
|
||||
toc(2, "EliminateCholesky");
|
||||
toc(EliminateCholesky);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,22 +36,22 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
VectorValues GaussianJunctionTree::optimize(Eliminate function) const {
|
||||
tic(1, "GJT eliminate");
|
||||
tic(GJT_eliminate);
|
||||
// eliminate from leaves to the root
|
||||
BTClique::shared_ptr rootClique(this->eliminate(function));
|
||||
toc(1, "GJT eliminate");
|
||||
toc(GJT_eliminate);
|
||||
|
||||
// Allocate solution vector and copy RHS
|
||||
tic(2, "allocate VectorValues");
|
||||
tic(allocate_VectorValues);
|
||||
vector<size_t> dims(rootClique->conditional()->back()+1, 0);
|
||||
countDims(rootClique, dims);
|
||||
VectorValues result(dims);
|
||||
toc(2, "allocate VectorValues");
|
||||
toc(allocate_VectorValues);
|
||||
|
||||
// back-substitution
|
||||
tic(3, "back-substitute");
|
||||
tic(backsubstitute);
|
||||
internal::optimizeInPlace<GaussianBayesTree>(rootClique, result);
|
||||
toc(3, "back-substitute");
|
||||
toc(backsubstitute);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -51,13 +51,13 @@ GaussianBayesTree::shared_ptr GaussianMultifrontalSolver::eliminate() const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
VectorValues::shared_ptr GaussianMultifrontalSolver::optimize() const {
|
||||
tic(2,"optimize");
|
||||
tic(optimize);
|
||||
VectorValues::shared_ptr values;
|
||||
if (useQR_)
|
||||
values = VectorValues::shared_ptr(new VectorValues(junctionTree_->optimize(&EliminateQR)));
|
||||
else
|
||||
values= VectorValues::shared_ptr(new VectorValues(junctionTree_->optimize(&EliminatePreferCholesky)));
|
||||
toc(2,"optimize");
|
||||
toc(optimize);
|
||||
return values;
|
||||
}
|
||||
|
||||
|
|
|
@ -64,21 +64,21 @@ VectorValues::shared_ptr GaussianSequentialSolver::optimize() const {
|
|||
if(debug) this->factors_->print("GaussianSequentialSolver, eliminating ");
|
||||
if(debug) this->eliminationTree_->print("GaussianSequentialSolver, elimination tree ");
|
||||
|
||||
tic(1,"eliminate");
|
||||
tic(eliminate);
|
||||
// Eliminate using the elimination tree
|
||||
GaussianBayesNet::shared_ptr bayesNet(this->eliminate());
|
||||
toc(1,"eliminate");
|
||||
toc(eliminate);
|
||||
|
||||
if(debug) bayesNet->print("GaussianSequentialSolver, Bayes net ");
|
||||
|
||||
// Allocate the solution vector if it is not already allocated
|
||||
// VectorValues::shared_ptr solution = allocateVectorValues(*bayesNet);
|
||||
|
||||
tic(2,"optimize");
|
||||
tic(optimize);
|
||||
// Back-substitute
|
||||
VectorValues::shared_ptr solution(
|
||||
new VectorValues(gtsam::optimize(*bayesNet)));
|
||||
toc(2,"optimize");
|
||||
toc(optimize);
|
||||
|
||||
if(debug) solution->print("GaussianSequentialSolver, solution ");
|
||||
|
||||
|
|
|
@ -251,16 +251,16 @@ HessianFactor::HessianFactor(const FactorGraph<GaussianFactor>& factors,
|
|||
|
||||
const bool debug = ISDEBUG("EliminateCholesky");
|
||||
// Form Ab' * Ab
|
||||
tic(1, "allocate");
|
||||
tic(allocate);
|
||||
info_.resize(dimensions.begin(), dimensions.end(), false);
|
||||
// Fill in keys
|
||||
keys_.resize(scatter.size());
|
||||
std::transform(scatter.begin(), scatter.end(), keys_.begin(), boost::bind(&Scatter::value_type::first, ::_1));
|
||||
toc(1, "allocate");
|
||||
tic(2, "zero");
|
||||
toc(allocate);
|
||||
tic(zero);
|
||||
matrix_.noalias() = Matrix::Zero(matrix_.rows(),matrix_.cols());
|
||||
toc(2, "zero");
|
||||
tic(3, "update");
|
||||
toc(zero);
|
||||
tic(update);
|
||||
if (debug) cout << "Combining " << factors.size() << " factors" << endl;
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, factors)
|
||||
{
|
||||
|
@ -273,7 +273,7 @@ HessianFactor::HessianFactor(const FactorGraph<GaussianFactor>& factors,
|
|||
throw invalid_argument("GaussianFactor is neither Hessian nor Jacobian");
|
||||
}
|
||||
}
|
||||
toc(3, "update");
|
||||
toc(update);
|
||||
|
||||
if (debug) gtsam::print(matrix_, "Ab' * Ab: ");
|
||||
|
||||
|
@ -335,14 +335,14 @@ void HessianFactor::updateATA(const HessianFactor& update, const Scatter& scatte
|
|||
const bool debug = ISDEBUG("updateATA");
|
||||
|
||||
// First build an array of slots
|
||||
tic(1, "slots");
|
||||
tic(slots);
|
||||
size_t* slots = (size_t*)alloca(sizeof(size_t)*update.size()); // FIXME: alloca is bad, just ask Google.
|
||||
size_t slot = 0;
|
||||
BOOST_FOREACH(Index j, update) {
|
||||
slots[slot] = scatter.find(j)->second.slot;
|
||||
++ slot;
|
||||
}
|
||||
toc(1, "slots");
|
||||
toc(slots);
|
||||
|
||||
if(debug) {
|
||||
this->print("Updating this: ");
|
||||
|
@ -350,7 +350,7 @@ void HessianFactor::updateATA(const HessianFactor& update, const Scatter& scatte
|
|||
}
|
||||
|
||||
// Apply updates to the upper triangle
|
||||
tic(3, "update");
|
||||
tic(update);
|
||||
for(size_t j2=0; j2<update.info_.nBlocks(); ++j2) {
|
||||
size_t slot2 = (j2 == update.size()) ? this->info_.nBlocks()-1 : slots[j2];
|
||||
for(size_t j1=0; j1<=j2; ++j1) {
|
||||
|
@ -375,7 +375,7 @@ void HessianFactor::updateATA(const HessianFactor& update, const Scatter& scatte
|
|||
if(debug) this->print();
|
||||
}
|
||||
}
|
||||
toc(3, "update");
|
||||
toc(update);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -388,16 +388,16 @@ void HessianFactor::updateATA(const JacobianFactor& update, const Scatter& scatt
|
|||
const bool debug = ISDEBUG("updateATA");
|
||||
|
||||
// First build an array of slots
|
||||
tic(1, "slots");
|
||||
tic(slots);
|
||||
size_t* slots = (size_t*)alloca(sizeof(size_t)*update.size()); // FIXME: alloca is bad, just ask Google.
|
||||
size_t slot = 0;
|
||||
BOOST_FOREACH(Index j, update) {
|
||||
slots[slot] = scatter.find(j)->second.slot;
|
||||
++ slot;
|
||||
}
|
||||
toc(1, "slots");
|
||||
toc(slots);
|
||||
|
||||
tic(2, "form A^T*A");
|
||||
tic(form_ATA);
|
||||
if(update.model_->isConstrained())
|
||||
throw invalid_argument("Cannot update HessianFactor from JacobianFactor with constrained noise model");
|
||||
|
||||
|
@ -423,10 +423,10 @@ void HessianFactor::updateATA(const JacobianFactor& update, const Scatter& scatt
|
|||
throw invalid_argument("In HessianFactor::updateATA, JacobianFactor noise model is neither Unit nor Diagonal");
|
||||
}
|
||||
if (debug) cout << "updateInform: \n" << updateInform << endl;
|
||||
toc(2, "form A^T*A");
|
||||
toc(form_ATA);
|
||||
|
||||
// Apply updates to the upper triangle
|
||||
tic(3, "update");
|
||||
tic(update);
|
||||
for(size_t j2=0; j2<update.Ab_.nBlocks(); ++j2) {
|
||||
size_t slot2 = (j2 == update.size()) ? this->info_.nBlocks()-1 : slots[j2];
|
||||
for(size_t j1=0; j1<=j2; ++j1) {
|
||||
|
@ -452,7 +452,7 @@ void HessianFactor::updateATA(const JacobianFactor& update, const Scatter& scatt
|
|||
if(debug) this->print();
|
||||
}
|
||||
}
|
||||
toc(3, "update");
|
||||
toc(update);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -467,7 +467,7 @@ GaussianConditional::shared_ptr HessianFactor::splitEliminatedFactor(size_t nrFr
|
|||
static const bool debug = false;
|
||||
|
||||
// Extract conditionals
|
||||
tic(1, "extract conditionals");
|
||||
tic(extract_conditionals);
|
||||
GaussianConditional::shared_ptr conditional(new GaussianConditional());
|
||||
typedef VerticalBlockView<Matrix> BlockAb;
|
||||
BlockAb Ab(matrix_, info_);
|
||||
|
@ -476,22 +476,22 @@ GaussianConditional::shared_ptr HessianFactor::splitEliminatedFactor(size_t nrFr
|
|||
Ab.rowEnd() = Ab.rowStart() + varDim;
|
||||
|
||||
// Create one big conditionals with many frontal variables.
|
||||
tic(2, "construct cond");
|
||||
tic(construct_cond);
|
||||
Vector sigmas = Vector::Ones(varDim);
|
||||
conditional = boost::make_shared<ConditionalType>(keys_.begin(), keys_.end(), nrFrontals, Ab, sigmas);
|
||||
toc(2, "construct cond");
|
||||
toc(construct_cond);
|
||||
if(debug) conditional->print("Extracted conditional: ");
|
||||
|
||||
toc(1, "extract conditionals");
|
||||
toc(extract_conditionals);
|
||||
|
||||
// Take lower-right block of Ab_ to get the new factor
|
||||
tic(2, "remaining factor");
|
||||
tic(remaining_factor);
|
||||
info_.blockStart() = nrFrontals;
|
||||
// Assign the keys
|
||||
vector<Index> remainingKeys(keys_.size() - nrFrontals);
|
||||
remainingKeys.assign(keys_.begin() + nrFrontals, keys_.end());
|
||||
keys_.swap(remainingKeys);
|
||||
toc(2, "remaining factor");
|
||||
toc(remaining_factor);
|
||||
|
||||
return conditional;
|
||||
}
|
||||
|
|
|
@ -388,7 +388,7 @@ namespace gtsam {
|
|||
throw IndeterminantLinearSystemException(this->keys().front());
|
||||
|
||||
// Extract conditional
|
||||
tic(3, "cond Rd");
|
||||
tic(cond_Rd);
|
||||
|
||||
// Restrict the matrix to be in the first nrFrontals variables
|
||||
Ab_.rowEnd() = Ab_.rowStart() + frontalDim;
|
||||
|
@ -397,11 +397,11 @@ namespace gtsam {
|
|||
if(debug) conditional->print("Extracted conditional: ");
|
||||
Ab_.rowStart() += frontalDim;
|
||||
Ab_.firstBlock() += nrFrontals;
|
||||
toc(3, "cond Rd");
|
||||
toc(cond_Rd);
|
||||
|
||||
if(debug) conditional->print("Extracted conditional: ");
|
||||
|
||||
tic(4, "remaining factor");
|
||||
tic(remaining_factor);
|
||||
// Take lower-right block of Ab to get the new factor
|
||||
Ab_.rowEnd() = model_->dim();
|
||||
keys_.erase(begin(), begin() + nrFrontals);
|
||||
|
@ -412,7 +412,7 @@ namespace gtsam {
|
|||
model_ = noiseModel::Diagonal::Sigmas(sub(model_->sigmas(), frontalDim, model_->dim()));
|
||||
if(debug) this->print("Eliminated factor: ");
|
||||
assert(Ab_.rows() <= Ab_.cols()-1);
|
||||
toc(4, "remaining factor");
|
||||
toc(remaining_factor);
|
||||
|
||||
if(debug) print("Eliminated factor: ");
|
||||
|
||||
|
@ -439,9 +439,9 @@ namespace gtsam {
|
|||
if(debug) cout << "frontalDim = " << frontalDim << endl;
|
||||
|
||||
// Use in-place QR dense Ab appropriate to NoiseModel
|
||||
tic(2, "QR");
|
||||
tic(QR);
|
||||
SharedDiagonal noiseModel = model_->QR(matrix_);
|
||||
toc(2, "QR");
|
||||
toc(QR);
|
||||
|
||||
// Zero the lower-left triangle. todo: not all of these entries actually
|
||||
// need to be zeroed if we are careful to start copying rows after the last
|
||||
|
|
|
@ -329,22 +329,22 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const {
|
|||
Vector a = Ab.col(j);
|
||||
|
||||
// Calculate weighted pseudo-inverse and corresponding precision
|
||||
tic(1, "constrained_QR weightedPseudoinverse");
|
||||
tic(constrained_QR_weightedPseudoinverse);
|
||||
double precision = weightedPseudoinverse(a, weights, pseudo);
|
||||
toc(1, "constrained_QR weightedPseudoinverse");
|
||||
toc(constrained_QR_weightedPseudoinverse);
|
||||
|
||||
// If precision is zero, no information on this column
|
||||
// This is actually not limited to constraints, could happen in Gaussian::QR
|
||||
// In that case, we're probably hosed. TODO: make sure Householder is rank-revealing
|
||||
if (precision < 1e-8) continue;
|
||||
|
||||
tic(2, "constrained_QR create rd");
|
||||
tic(constrained_QR_create_rd);
|
||||
// create solution [r d], rhs is automatically r(n)
|
||||
Vector rd(n+1); // uninitialized !
|
||||
rd(j)=1.0; // put 1 on diagonal
|
||||
for (size_t j2=j+1; j2<n+1; ++j2) // and fill in remainder with dot-products
|
||||
rd(j2) = pseudo.dot(Ab.col(j2));
|
||||
toc(2, "constrained_QR create rd");
|
||||
toc(constrained_QR_create_rd);
|
||||
|
||||
// construct solution (r, d, sigma)
|
||||
Rd.push_back(boost::make_tuple(j, rd, precision));
|
||||
|
@ -353,15 +353,15 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const {
|
|||
if (Rd.size()>=maxRank) break;
|
||||
|
||||
// update Ab, expensive, using outer product
|
||||
tic(3, "constrained_QR update Ab");
|
||||
tic(constrained_QR_update_Ab);
|
||||
Ab.middleCols(j+1,n-j) -= a * rd.segment(j+1, n-j).transpose();
|
||||
toc(3, "constrained_QR update Ab");
|
||||
toc(constrained_QR_update_Ab);
|
||||
}
|
||||
|
||||
// Create storage for precisions
|
||||
Vector precisions(Rd.size());
|
||||
|
||||
tic(4, "constrained_QR write back into Ab");
|
||||
tic(constrained_QR_write_back_into_Ab);
|
||||
// Write back result in Ab, imperative as we are
|
||||
// TODO: test that is correct if a column was skipped !!!!
|
||||
size_t i = 0; // start with first row
|
||||
|
@ -377,7 +377,7 @@ SharedDiagonal Constrained::QR(Matrix& Ab) const {
|
|||
Ab(i,j2) = rd(j2);
|
||||
i+=1;
|
||||
}
|
||||
toc(4, "constrained_QR write back into Ab");
|
||||
toc(constrained_QR_write_back_into_Ab);
|
||||
|
||||
// Must include mu, as the defaults might be higher, resulting in non-convergence
|
||||
return mixed ? Constrained::MixedPrecisions(mu_, precisions) : Diagonal::Precisions(precisions);
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/linear/SharedDiagonal.h>
|
||||
#include <gtsam/linear/NoiseModel.h>
|
||||
#include <gtsam/inference/EliminationTree-inl.h>
|
||||
|
||||
#include <boost/random.hpp>
|
||||
|
|
|
@ -28,7 +28,7 @@ using namespace std;
|
|||
#include <gtsam/linear/JacobianFactor.h>
|
||||
#include <gtsam/linear/JacobianFactor.h>
|
||||
#include <gtsam/linear/GaussianConditional.h>
|
||||
#include <gtsam/linear/SharedDiagonal.h>
|
||||
#include <gtsam/linear/NoiseModel.h>
|
||||
|
||||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
|
|
@ -149,22 +149,22 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
|
|||
const F& f, const VALUES& x0, const Ordering& ordering, const double f_error, const bool verbose) {
|
||||
|
||||
// Compute steepest descent and Newton's method points
|
||||
tic(0, "optimizeGradientSearch");
|
||||
tic(0, "allocateVectorValues");
|
||||
tic(optimizeGradientSearch);
|
||||
tic(allocateVectorValues);
|
||||
VectorValues dx_u = *allocateVectorValues(Rd);
|
||||
toc(0, "allocateVectorValues");
|
||||
tic(1, "optimizeGradientSearchInPlace");
|
||||
toc(allocateVectorValues);
|
||||
tic(optimizeGradientSearchInPlace);
|
||||
optimizeGradientSearchInPlace(Rd, dx_u);
|
||||
toc(1, "optimizeGradientSearchInPlace");
|
||||
toc(0, "optimizeGradientSearch");
|
||||
tic(1, "optimizeInPlace");
|
||||
toc(optimizeGradientSearchInPlace);
|
||||
toc(optimizeGradientSearch);
|
||||
tic(optimizeInPlace);
|
||||
VectorValues dx_n(VectorValues::SameStructure(dx_u));
|
||||
optimizeInPlace(Rd, dx_n);
|
||||
toc(1, "optimizeInPlace");
|
||||
tic(2, "jfg error");
|
||||
toc(optimizeInPlace);
|
||||
tic(jfg_error);
|
||||
const GaussianFactorGraph jfg(Rd);
|
||||
const double M_error = jfg.error(VectorValues::Zero(dx_u));
|
||||
toc(2, "jfg error");
|
||||
toc(jfg_error);
|
||||
|
||||
// Result to return
|
||||
IterationResult result;
|
||||
|
@ -172,32 +172,32 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
|
|||
bool stay = true;
|
||||
enum { NONE, INCREASED_DELTA, DECREASED_DELTA } lastAction = NONE; // Used to prevent alternating between increasing and decreasing in one iteration
|
||||
while(stay) {
|
||||
tic(3, "Dog leg point");
|
||||
tic(Dog_leg_point);
|
||||
// Compute dog leg point
|
||||
result.dx_d = ComputeDoglegPoint(Delta, dx_u, dx_n, verbose);
|
||||
toc(3, "Dog leg point");
|
||||
toc(Dog_leg_point);
|
||||
|
||||
if(verbose) std::cout << "Delta = " << Delta << ", dx_d_norm = " << result.dx_d.vector().norm() << std::endl;
|
||||
|
||||
tic(4, "retract");
|
||||
tic(retract);
|
||||
// Compute expmapped solution
|
||||
const VALUES x_d(x0.retract(result.dx_d, ordering));
|
||||
toc(4, "retract");
|
||||
toc(retract);
|
||||
|
||||
tic(5, "decrease in f");
|
||||
tic(decrease_in_f);
|
||||
// Compute decrease in f
|
||||
result.f_error = f.error(x_d);
|
||||
toc(5, "decrease in f");
|
||||
toc(decrease_in_f);
|
||||
|
||||
tic(6, "decrease in M");
|
||||
tic(decrease_in_M);
|
||||
// Compute decrease in M
|
||||
const double new_M_error = jfg.error(result.dx_d);
|
||||
toc(6, "decrease in M");
|
||||
toc(decrease_in_M);
|
||||
|
||||
if(verbose) std::cout << std::setprecision(15) << "f error: " << f_error << " -> " << result.f_error << std::endl;
|
||||
if(verbose) std::cout << std::setprecision(15) << "M error: " << M_error << " -> " << new_M_error << std::endl;
|
||||
|
||||
tic(7, "adjust Delta");
|
||||
tic(adjust_Delta);
|
||||
// Compute gain ratio. Here we take advantage of the invariant that the
|
||||
// Bayes' net error at zero is equal to the nonlinear error
|
||||
const double rho = fabs(f_error - result.f_error) < 1e-15 || fabs(M_error - new_M_error) < 1e-15 ?
|
||||
|
@ -266,7 +266,7 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
|
|||
stay = false;
|
||||
}
|
||||
}
|
||||
toc(7, "adjust Delta");
|
||||
toc(adjust_Delta);
|
||||
}
|
||||
|
||||
// dx_d and f_error have already been filled in during the loop
|
||||
|
|
|
@ -302,7 +302,7 @@ ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors,
|
|||
|
||||
PartialSolveResult result;
|
||||
|
||||
tic(1,"select affected variables");
|
||||
tic(select_affected_variables);
|
||||
#ifndef NDEBUG
|
||||
// Debug check that all variables involved in the factors to be re-eliminated
|
||||
// are in affectedKeys, since we will use it to select a subset of variables.
|
||||
|
@ -326,12 +326,12 @@ ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors,
|
|||
if(debug) affectedKeysSelectorInverse.print("affectedKeysSelectorInverse: ");
|
||||
factors.permuteWithInverse(affectedKeysSelectorInverse);
|
||||
if(debug) factors.print("Factors to reorder/re-eliminate: ");
|
||||
toc(1,"select affected variables");
|
||||
tic(2,"variable index");
|
||||
toc(select_affected_variables);
|
||||
tic(variable_index);
|
||||
VariableIndex affectedFactorsIndex(factors); // Create a variable index for the factors to be re-eliminated
|
||||
if(debug) affectedFactorsIndex.print("affectedFactorsIndex: ");
|
||||
toc(2,"variable index");
|
||||
tic(3,"ccolamd");
|
||||
toc(variable_index);
|
||||
tic(ccolamd);
|
||||
vector<int> cmember(affectedKeysSelector.size(), 0);
|
||||
if(reorderingMode.constrain == ReorderingMode::CONSTRAIN_LAST) {
|
||||
assert(reorderingMode.constrainedKeys);
|
||||
|
@ -348,8 +348,8 @@ ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors,
|
|||
}
|
||||
}
|
||||
Permutation::shared_ptr affectedColamd(inference::PermutationCOLAMD_(affectedFactorsIndex, cmember));
|
||||
toc(3,"ccolamd");
|
||||
tic(4,"ccolamd permutations");
|
||||
toc(ccolamd);
|
||||
tic(ccolamd_permutations);
|
||||
Permutation::shared_ptr affectedColamdInverse(affectedColamd->inverse());
|
||||
if(debug) affectedColamd->print("affectedColamd: ");
|
||||
if(debug) affectedColamdInverse->print("affectedColamdInverse: ");
|
||||
|
@ -358,15 +358,15 @@ ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors,
|
|||
result.fullReorderingInverse =
|
||||
*Permutation::Identity(reorderingMode.nFullSystemVars).partialPermutation(affectedKeysSelector, *affectedColamdInverse);
|
||||
if(debug) result.fullReordering.print("partialReordering: ");
|
||||
toc(4,"ccolamd permutations");
|
||||
toc(ccolamd_permutations);
|
||||
|
||||
tic(5,"permute affected variable index");
|
||||
tic(permute_affected_variable_index);
|
||||
affectedFactorsIndex.permuteInPlace(*affectedColamd);
|
||||
toc(5,"permute affected variable index");
|
||||
toc(permute_affected_variable_index);
|
||||
|
||||
tic(6,"permute affected factors");
|
||||
tic(permute_affected_factors);
|
||||
factors.permuteWithInverse(*affectedColamdInverse);
|
||||
toc(6,"permute affected factors");
|
||||
toc(permute_affected_factors);
|
||||
|
||||
if(debug) factors.print("Colamd-ordered affected factors: ");
|
||||
|
||||
|
@ -376,15 +376,15 @@ ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors,
|
|||
#endif
|
||||
|
||||
// eliminate into a Bayes net
|
||||
tic(7,"eliminate");
|
||||
tic(eliminate);
|
||||
JunctionTree<GaussianFactorGraph, ISAM2::Clique> jt(factors, affectedFactorsIndex);
|
||||
if(!useQR)
|
||||
result.bayesTree = jt.eliminate(EliminatePreferCholesky);
|
||||
else
|
||||
result.bayesTree = jt.eliminate(EliminateQR);
|
||||
toc(7,"eliminate");
|
||||
toc(eliminate);
|
||||
|
||||
tic(8,"permute eliminated");
|
||||
tic(permute_eliminated);
|
||||
if(result.bayesTree) result.bayesTree->permuteWithInverse(affectedKeysSelector);
|
||||
if(debug && result.bayesTree) {
|
||||
cout << "Full var-ordered eliminated BT:\n";
|
||||
|
@ -393,7 +393,7 @@ ISAM2::Impl::PartialSolve(GaussianFactorGraph& factors,
|
|||
// Undo permutation on our subset of cached factors, we must later permute *all* of the cached factors
|
||||
factors.permuteWithInverse(*affectedColamd);
|
||||
factors.permuteWithInverse(affectedKeysSelector);
|
||||
toc(8,"permute eliminated");
|
||||
toc(permute_eliminated);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -171,19 +171,19 @@ FastList<size_t> ISAM2::getAffectedFactors(const FastList<Index>& keys) const {
|
|||
FactorGraph<GaussianFactor>::shared_ptr
|
||||
ISAM2::relinearizeAffectedFactors(const FastList<Index>& affectedKeys, const FastSet<Index>& relinKeys) const {
|
||||
|
||||
tic(1,"getAffectedFactors");
|
||||
tic(getAffectedFactors);
|
||||
FastList<size_t> candidates = getAffectedFactors(affectedKeys);
|
||||
toc(1,"getAffectedFactors");
|
||||
toc(getAffectedFactors);
|
||||
|
||||
NonlinearFactorGraph nonlinearAffectedFactors;
|
||||
|
||||
tic(2,"affectedKeysSet");
|
||||
tic(affectedKeysSet);
|
||||
// for fast lookup below
|
||||
FastSet<Index> affectedKeysSet;
|
||||
affectedKeysSet.insert(affectedKeys.begin(), affectedKeys.end());
|
||||
toc(2,"affectedKeysSet");
|
||||
toc(affectedKeysSet);
|
||||
|
||||
tic(3,"check candidates and linearize");
|
||||
tic(check_candidates_and_linearize);
|
||||
FactorGraph<GaussianFactor>::shared_ptr linearized = boost::make_shared<FactorGraph<GaussianFactor> >();
|
||||
BOOST_FOREACH(size_t idx, candidates) {
|
||||
bool inside = true;
|
||||
|
@ -212,7 +212,7 @@ ISAM2::relinearizeAffectedFactors(const FastList<Index>& affectedKeys, const Fas
|
|||
}
|
||||
}
|
||||
}
|
||||
toc(3,"check candidates and linearize");
|
||||
toc(check_candidates_and_linearize);
|
||||
|
||||
return linearized;
|
||||
}
|
||||
|
@ -283,11 +283,11 @@ boost::shared_ptr<FastSet<Index> > ISAM2::recalculate(const FastSet<Index>& mark
|
|||
// 1. Remove top of Bayes tree and convert to a factor graph:
|
||||
// (a) For each affected variable, remove the corresponding clique and all parents up to the root.
|
||||
// (b) Store orphaned sub-trees \BayesTree_{O} of removed cliques.
|
||||
tic(1, "removetop");
|
||||
tic(removetop);
|
||||
Cliques orphans;
|
||||
BayesNet<GaussianConditional> affectedBayesNet;
|
||||
this->removeTop(markedKeys, affectedBayesNet, orphans);
|
||||
toc(1, "removetop");
|
||||
toc(removetop);
|
||||
|
||||
if(debug) affectedBayesNet.print("Removed top: ");
|
||||
if(debug) orphans.print("Orphans: ");
|
||||
|
@ -304,22 +304,22 @@ boost::shared_ptr<FastSet<Index> > ISAM2::recalculate(const FastSet<Index>& mark
|
|||
// BEGIN OF COPIED CODE
|
||||
|
||||
// ordering provides all keys in conditionals, there cannot be others because path to root included
|
||||
tic(2,"affectedKeys");
|
||||
tic(affectedKeys);
|
||||
FastList<Index> affectedKeys = affectedBayesNet.ordering();
|
||||
toc(2,"affectedKeys");
|
||||
toc(affectedKeys);
|
||||
|
||||
boost::shared_ptr<FastSet<Index> > affectedKeysSet(new FastSet<Index>()); // Will return this result
|
||||
|
||||
if(affectedKeys.size() >= theta_.size() * batchThreshold) {
|
||||
|
||||
tic(3,"batch");
|
||||
tic(batch);
|
||||
|
||||
tic(0,"add keys");
|
||||
tic(add_keys);
|
||||
BOOST_FOREACH(const Ordering::value_type& key_index, ordering_) { affectedKeysSet->insert(key_index.second); }
|
||||
toc(0,"add keys");
|
||||
toc(add_keys);
|
||||
|
||||
tic(1,"reorder");
|
||||
tic(1,"CCOLAMD");
|
||||
tic(reorder);
|
||||
tic(CCOLAMD);
|
||||
// Do a batch step - reorder and relinearize all variables
|
||||
vector<int> cmember(theta_.size(), 0);
|
||||
if(constrainKeys) {
|
||||
|
@ -341,29 +341,29 @@ boost::shared_ptr<FastSet<Index> > ISAM2::recalculate(const FastSet<Index>& mark
|
|||
}
|
||||
Permutation::shared_ptr colamd(inference::PermutationCOLAMD_(variableIndex_, cmember));
|
||||
Permutation::shared_ptr colamdInverse(colamd->inverse());
|
||||
toc(1,"CCOLAMD");
|
||||
toc(CCOLAMD);
|
||||
|
||||
// Reorder
|
||||
tic(2,"permute global variable index");
|
||||
tic(permute_global_variable_index);
|
||||
variableIndex_.permuteInPlace(*colamd);
|
||||
toc(2,"permute global variable index");
|
||||
tic(3,"permute delta");
|
||||
toc(permute_global_variable_index);
|
||||
tic(permute_delta);
|
||||
delta_ = delta_.permute(*colamd);
|
||||
deltaNewton_ = deltaNewton_.permute(*colamd);
|
||||
RgProd_ = RgProd_.permute(*colamd);
|
||||
toc(3,"permute delta");
|
||||
tic(4,"permute ordering");
|
||||
toc(permute_delta);
|
||||
tic(permute_ordering);
|
||||
ordering_.permuteWithInverse(*colamdInverse);
|
||||
toc(4,"permute ordering");
|
||||
toc(1,"reorder");
|
||||
toc(permute_ordering);
|
||||
toc(reorder);
|
||||
|
||||
tic(2,"linearize");
|
||||
tic(linearize);
|
||||
GaussianFactorGraph linearized = *nonlinearFactors_.linearize(theta_, ordering_);
|
||||
if(params_.cacheLinearizedFactors)
|
||||
linearFactors_ = linearized;
|
||||
toc(2,"linearize");
|
||||
toc(linearize);
|
||||
|
||||
tic(5,"eliminate");
|
||||
tic(eliminate);
|
||||
JunctionTree<GaussianFactorGraph, Base::Clique> jt(linearized, variableIndex_);
|
||||
sharedClique newRoot;
|
||||
if(params_.factorization == ISAM2Params::CHOLESKY)
|
||||
|
@ -372,12 +372,12 @@ boost::shared_ptr<FastSet<Index> > ISAM2::recalculate(const FastSet<Index>& mark
|
|||
newRoot = jt.eliminate(EliminateQR);
|
||||
else assert(false);
|
||||
if(debug) newRoot->print("Eliminated: ");
|
||||
toc(5,"eliminate");
|
||||
toc(eliminate);
|
||||
|
||||
tic(6,"insert");
|
||||
tic(insert);
|
||||
this->clear();
|
||||
this->insert(newRoot);
|
||||
toc(6,"insert");
|
||||
toc(insert);
|
||||
|
||||
result.variablesReeliminated = affectedKeysSet->size();
|
||||
|
||||
|
@ -392,20 +392,20 @@ boost::shared_ptr<FastSet<Index> > ISAM2::recalculate(const FastSet<Index>& mark
|
|||
}
|
||||
}
|
||||
|
||||
toc(3,"batch");
|
||||
toc(batch);
|
||||
|
||||
} else {
|
||||
|
||||
tic(4,"incremental");
|
||||
tic(incremental);
|
||||
|
||||
// 2. Add the new factors \Factors' into the resulting factor graph
|
||||
FastList<Index> affectedAndNewKeys;
|
||||
affectedAndNewKeys.insert(affectedAndNewKeys.end(), affectedKeys.begin(), affectedKeys.end());
|
||||
affectedAndNewKeys.insert(affectedAndNewKeys.end(), observedKeys.begin(), observedKeys.end());
|
||||
tic(1,"relinearizeAffected");
|
||||
tic(relinearizeAffected);
|
||||
GaussianFactorGraph factors(*relinearizeAffectedFactors(affectedAndNewKeys, relinKeys));
|
||||
if(debug) factors.print("Relinearized factors: ");
|
||||
toc(1,"relinearizeAffected");
|
||||
toc(relinearizeAffected);
|
||||
|
||||
if(debug) { cout << "Affected keys: "; BOOST_FOREACH(const Index key, affectedKeys) { cout << key << " "; } cout << endl; }
|
||||
|
||||
|
@ -428,27 +428,27 @@ boost::shared_ptr<FastSet<Index> > ISAM2::recalculate(const FastSet<Index>& mark
|
|||
<< " avgCliqueSize: " << avgClique << " #Cliques: " << numCliques << " nnzR: " << nnzR << endl;
|
||||
#endif
|
||||
|
||||
tic(2,"cached");
|
||||
tic(cached);
|
||||
// add the cached intermediate results from the boundary of the orphans ...
|
||||
GaussianFactorGraph cachedBoundary = getCachedBoundaryFactors(orphans);
|
||||
if(debug) cachedBoundary.print("Boundary factors: ");
|
||||
factors.push_back(cachedBoundary);
|
||||
toc(2,"cached");
|
||||
toc(cached);
|
||||
|
||||
// END OF COPIED CODE
|
||||
|
||||
// 3. Re-order and eliminate the factor graph into a Bayes net (Algorithm [alg:eliminate]), and re-assemble into a new Bayes tree (Algorithm [alg:BayesTree])
|
||||
|
||||
tic(4,"reorder and eliminate");
|
||||
tic(reorder_and_eliminate);
|
||||
|
||||
tic(1,"list to set");
|
||||
tic(list_to_set);
|
||||
// create a partial reordering for the new and contaminated factors
|
||||
// markedKeys are passed in: those variables will be forced to the end in the ordering
|
||||
affectedKeysSet->insert(markedKeys.begin(), markedKeys.end());
|
||||
affectedKeysSet->insert(affectedKeys.begin(), affectedKeys.end());
|
||||
toc(1,"list to set");
|
||||
toc(list_to_set);
|
||||
|
||||
tic(2,"PartialSolve");
|
||||
tic(PartialSolve);
|
||||
Impl::ReorderingMode reorderingMode;
|
||||
reorderingMode.nFullSystemVars = ordering_.nVars();
|
||||
reorderingMode.algorithm = Impl::ReorderingMode::COLAMD;
|
||||
|
@ -465,50 +465,50 @@ boost::shared_ptr<FastSet<Index> > ISAM2::recalculate(const FastSet<Index>& mark
|
|||
}
|
||||
Impl::PartialSolveResult partialSolveResult =
|
||||
Impl::PartialSolve(factors, affectedUsedKeys, reorderingMode, (params_.factorization == ISAM2Params::QR));
|
||||
toc(2,"PartialSolve");
|
||||
toc(PartialSolve);
|
||||
|
||||
// We now need to permute everything according this partial reordering: the
|
||||
// delta vector, the global ordering, and the factors we're about to
|
||||
// re-eliminate. The reordered variables are also mentioned in the
|
||||
// orphans and the leftover cached factors.
|
||||
tic(3,"permute global variable index");
|
||||
tic(permute_global_variable_index);
|
||||
variableIndex_.permuteInPlace(partialSolveResult.fullReordering);
|
||||
toc(3,"permute global variable index");
|
||||
tic(4,"permute delta");
|
||||
toc(permute_global_variable_index);
|
||||
tic(permute_delta);
|
||||
delta_ = delta_.permute(partialSolveResult.fullReordering);
|
||||
deltaNewton_ = deltaNewton_.permute(partialSolveResult.fullReordering);
|
||||
RgProd_ = RgProd_.permute(partialSolveResult.fullReordering);
|
||||
toc(4,"permute delta");
|
||||
tic(5,"permute ordering");
|
||||
toc(permute_delta);
|
||||
tic(permute_ordering);
|
||||
ordering_.permuteWithInverse(partialSolveResult.fullReorderingInverse);
|
||||
toc(5,"permute ordering");
|
||||
toc(permute_ordering);
|
||||
if(params_.cacheLinearizedFactors) {
|
||||
tic(6,"permute cached linear");
|
||||
tic(permute_cached_linear);
|
||||
//linearFactors_.permuteWithInverse(partialSolveResult.fullReorderingInverse);
|
||||
FastList<size_t> permuteLinearIndices = getAffectedFactors(affectedAndNewKeys);
|
||||
BOOST_FOREACH(size_t idx, permuteLinearIndices) {
|
||||
linearFactors_[idx]->permuteWithInverse(partialSolveResult.fullReorderingInverse);
|
||||
}
|
||||
toc(6,"permute cached linear");
|
||||
toc(permute_cached_linear);
|
||||
}
|
||||
|
||||
toc(4,"reorder and eliminate");
|
||||
toc(reorder_and_eliminate);
|
||||
|
||||
tic(6,"re-assemble");
|
||||
tic(reassemble);
|
||||
if(partialSolveResult.bayesTree) {
|
||||
assert(!this->root_);
|
||||
this->insert(partialSolveResult.bayesTree);
|
||||
}
|
||||
toc(6,"re-assemble");
|
||||
toc(reassemble);
|
||||
|
||||
// 4. Insert the orphans back into the new Bayes tree.
|
||||
tic(7,"orphans");
|
||||
tic(1,"permute");
|
||||
tic(orphans);
|
||||
tic(permute);
|
||||
BOOST_FOREACH(sharedClique orphan, orphans) {
|
||||
(void)orphan->permuteSeparatorWithInverse(partialSolveResult.fullReorderingInverse);
|
||||
}
|
||||
toc(1,"permute");
|
||||
tic(2,"insert");
|
||||
toc(permute);
|
||||
tic(insert);
|
||||
// add orphans to the bottom of the new tree
|
||||
BOOST_FOREACH(sharedClique orphan, orphans) {
|
||||
// Because the affectedKeysSelector is sorted, the orphan separator keys
|
||||
|
@ -520,10 +520,10 @@ boost::shared_ptr<FastSet<Index> > ISAM2::recalculate(const FastSet<Index>& mark
|
|||
parent->children_ += orphan;
|
||||
orphan->parent_ = parent; // set new parent!
|
||||
}
|
||||
toc(2,"insert");
|
||||
toc(7,"orphans");
|
||||
toc(insert);
|
||||
toc(orphans);
|
||||
|
||||
toc(4,"incremental");
|
||||
toc(incremental);
|
||||
}
|
||||
|
||||
// Root clique variables for detailed results
|
||||
|
@ -565,12 +565,12 @@ ISAM2Result ISAM2::update(
|
|||
|
||||
// Update delta if we need it to check relinearization later
|
||||
if(relinearizeThisStep) {
|
||||
tic(0, "updateDelta");
|
||||
tic(updateDelta);
|
||||
updateDelta(disableReordering);
|
||||
toc(0, "updateDelta");
|
||||
toc(updateDelta);
|
||||
}
|
||||
|
||||
tic(1,"push_back factors");
|
||||
tic(push_back_factors);
|
||||
// Add the new factor indices to the result struct
|
||||
result.newFactorsIndices.resize(newFactors.size());
|
||||
for(size_t i=0; i<newFactors.size(); ++i)
|
||||
|
@ -612,23 +612,23 @@ ISAM2Result ISAM2::update(
|
|||
unusedIndices.insert(unusedIndices.end(), ordering_[key]);
|
||||
}
|
||||
}
|
||||
toc(1,"push_back factors");
|
||||
toc(push_back_factors);
|
||||
|
||||
tic(2,"add new variables");
|
||||
tic(add_new_variables);
|
||||
// 2. Initialize any new variables \Theta_{new} and add \Theta:=\Theta\cup\Theta_{new}.
|
||||
Impl::AddVariables(newTheta, theta_, delta_, deltaNewton_, RgProd_, deltaReplacedMask_, ordering_);
|
||||
// New keys for detailed results
|
||||
if(params_.enableDetailedResults) {
|
||||
inverseOrdering_ = ordering_.invert();
|
||||
BOOST_FOREACH(Key key, newTheta.keys()) { result.detail->variableStatus[key].isNew = true; } }
|
||||
toc(2,"add new variables");
|
||||
toc(add_new_variables);
|
||||
|
||||
tic(3,"evaluate error before");
|
||||
tic(evaluate_error_before);
|
||||
if(params_.evaluateNonlinearError)
|
||||
result.errorBefore.reset(nonlinearFactors_.error(calculateEstimate()));
|
||||
toc(3,"evaluate error before");
|
||||
toc(evaluate_error_before);
|
||||
|
||||
tic(4,"gather involved keys");
|
||||
tic(gather_involved_keys);
|
||||
// 3. Mark linear update
|
||||
FastSet<Index> markedKeys = Impl::IndicesFromFactors(ordering_, newFactors); // Get keys from new factors
|
||||
// Also mark keys involved in removed factors
|
||||
|
@ -651,12 +651,12 @@ ISAM2Result ISAM2::update(
|
|||
if(unusedIndices.find(index) == unusedIndices.end()) // Only add if not unused
|
||||
observedKeys.push_back(index); // Make a copy of these, as we'll soon add to them
|
||||
}
|
||||
toc(4,"gather involved keys");
|
||||
toc(gather_involved_keys);
|
||||
|
||||
// Check relinearization if we're at the nth step, or we are using a looser loop relin threshold
|
||||
FastSet<Index> relinKeys;
|
||||
if (relinearizeThisStep) {
|
||||
tic(5,"gather relinearize keys");
|
||||
tic(gather_relinearize_keys);
|
||||
vector<bool> markedRelinMask(ordering_.nVars(), false);
|
||||
// 4. Mark keys in \Delta above threshold \beta: J=\{\Delta_{j}\in\Delta|\Delta_{j}\geq\beta\}.
|
||||
if(params_.enablePartialRelinearizationCheck)
|
||||
|
@ -674,9 +674,9 @@ ISAM2Result ISAM2::update(
|
|||
// Add the variables being relinearized to the marked keys
|
||||
BOOST_FOREACH(const Index j, relinKeys) { markedRelinMask[j] = true; }
|
||||
markedKeys.insert(relinKeys.begin(), relinKeys.end());
|
||||
toc(5,"gather relinearize keys");
|
||||
toc(gather_relinearize_keys);
|
||||
|
||||
tic(6,"fluid find_all");
|
||||
tic(fluid_find_all);
|
||||
// 5. Mark all cliques that involve marked variables \Theta_{J} and all their ancestors.
|
||||
if (!relinKeys.empty() && this->root()) {
|
||||
// add other cliques that have the marked ones in the separator
|
||||
|
@ -692,38 +692,38 @@ ISAM2Result ISAM2::update(
|
|||
result.detail->variableStatus[inverseOrdering_->at(index)].isRelinearized = true; } }
|
||||
}
|
||||
}
|
||||
toc(6,"fluid find_all");
|
||||
toc(fluid_find_all);
|
||||
|
||||
tic(7,"expmap");
|
||||
tic(expmap);
|
||||
// 6. Update linearization point for marked variables: \Theta_{J}:=\Theta_{J}+\Delta_{J}.
|
||||
if (!relinKeys.empty())
|
||||
Impl::ExpmapMasked(theta_, delta_, ordering_, markedRelinMask, delta_);
|
||||
toc(7,"expmap");
|
||||
toc(expmap);
|
||||
|
||||
result.variablesRelinearized = markedKeys.size();
|
||||
} else {
|
||||
result.variablesRelinearized = 0;
|
||||
}
|
||||
|
||||
tic(8,"linearize new");
|
||||
tic(linearize_new);
|
||||
// 7. Linearize new factors
|
||||
if(params_.cacheLinearizedFactors) {
|
||||
tic(1,"linearize");
|
||||
tic(linearize);
|
||||
FactorGraph<GaussianFactor>::shared_ptr linearFactors = newFactors.linearize(theta_, ordering_);
|
||||
linearFactors_.push_back(*linearFactors);
|
||||
assert(nonlinearFactors_.size() == linearFactors_.size());
|
||||
toc(1,"linearize");
|
||||
toc(linearize);
|
||||
|
||||
tic(2,"augment VI");
|
||||
tic(augment_VI);
|
||||
// Augment the variable index with the new factors
|
||||
variableIndex_.augment(*linearFactors);
|
||||
toc(2,"augment VI");
|
||||
toc(augment_VI);
|
||||
} else {
|
||||
variableIndex_.augment(*newFactors.symbolic(ordering_));
|
||||
}
|
||||
toc(8,"linearize new");
|
||||
toc(linearize_new);
|
||||
|
||||
tic(9,"recalculate");
|
||||
tic(recalculate);
|
||||
// 8. Redo top of Bayes tree
|
||||
// Convert constrained symbols to indices
|
||||
boost::optional<FastMap<Index,int> > constrainedIndices;
|
||||
|
@ -742,25 +742,25 @@ ISAM2Result ISAM2::update(
|
|||
if(replacedKeys) {
|
||||
BOOST_FOREACH(const Index var, *replacedKeys) {
|
||||
deltaReplacedMask_[var] = true; } }
|
||||
toc(9,"recalculate");
|
||||
toc(recalculate);
|
||||
|
||||
// After the top of the tree has been redone and may have index gaps from
|
||||
// unused keys, condense the indices to remove gaps by rearranging indices
|
||||
// in all data structures.
|
||||
if(!unusedKeys.empty()) {
|
||||
tic(10,"remove variables");
|
||||
tic(remove_variables);
|
||||
Impl::RemoveVariables(unusedKeys, root_, theta_, variableIndex_, delta_, deltaNewton_, RgProd_,
|
||||
deltaReplacedMask_, ordering_, Base::nodes_, linearFactors_);
|
||||
toc(10,"remove variables");
|
||||
toc(remove_variables);
|
||||
}
|
||||
result.cliques = this->nodes().size();
|
||||
deltaDoglegUptodate_ = false;
|
||||
deltaUptodate_ = false;
|
||||
|
||||
tic(11,"evaluate error after");
|
||||
tic(evaluate_error_after);
|
||||
if(params_.evaluateNonlinearError)
|
||||
result.errorAfter.reset(nonlinearFactors_.error(calculateEstimate()));
|
||||
toc(11,"evaluate error after");
|
||||
toc(evaluate_error_after);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -773,9 +773,9 @@ void ISAM2::updateDelta(bool forceFullSolve) const {
|
|||
const ISAM2GaussNewtonParams& gaussNewtonParams =
|
||||
boost::get<ISAM2GaussNewtonParams>(params_.optimizationParams);
|
||||
const double effectiveWildfireThreshold = forceFullSolve ? 0.0 : gaussNewtonParams.wildfireThreshold;
|
||||
tic(0, "Wildfire update");
|
||||
tic(Wildfire_update);
|
||||
lastBacksubVariableCount = Impl::UpdateDelta(this->root(), deltaReplacedMask_, delta_, effectiveWildfireThreshold);
|
||||
toc(0, "Wildfire update");
|
||||
toc(Wildfire_update);
|
||||
|
||||
} else if(params_.optimizationParams.type() == typeid(ISAM2DoglegParams)) {
|
||||
// If using Dogleg, do a Dogleg step
|
||||
|
@ -783,16 +783,16 @@ void ISAM2::updateDelta(bool forceFullSolve) const {
|
|||
boost::get<ISAM2DoglegParams>(params_.optimizationParams);
|
||||
|
||||
// Do one Dogleg iteration
|
||||
tic(1, "Dogleg Iterate");
|
||||
tic(Dogleg_Iterate);
|
||||
DoglegOptimizerImpl::IterationResult doglegResult(DoglegOptimizerImpl::Iterate(
|
||||
*doglegDelta_, doglegParams.adaptationMode, *this, nonlinearFactors_, theta_, ordering_, nonlinearFactors_.error(theta_), doglegParams.verbose));
|
||||
toc(1, "Dogleg Iterate");
|
||||
toc(Dogleg_Iterate);
|
||||
|
||||
tic(2, "Copy dx_d");
|
||||
tic(Copy_dx_d);
|
||||
// Update Delta and linear step
|
||||
doglegDelta_ = doglegResult.Delta;
|
||||
delta_ = doglegResult.dx_d; // Copy the VectorValues containing with the linear solution
|
||||
toc(2, "Copy dx_d");
|
||||
toc(Copy_dx_d);
|
||||
}
|
||||
|
||||
deltaUptodate_ = true;
|
||||
|
@ -802,16 +802,16 @@ void ISAM2::updateDelta(bool forceFullSolve) const {
|
|||
Values ISAM2::calculateEstimate() const {
|
||||
// We use ExpmapMasked here instead of regular expmap because the former
|
||||
// handles Permuted<VectorValues>
|
||||
tic(1, "Copy Values");
|
||||
tic(Copy_Values);
|
||||
Values ret(theta_);
|
||||
toc(1, "Copy Values");
|
||||
tic(2, "getDelta");
|
||||
toc(Copy_Values);
|
||||
tic(getDelta);
|
||||
const VectorValues& delta(getDelta());
|
||||
toc(2, "getDelta");
|
||||
tic(3, "Expmap");
|
||||
toc(getDelta);
|
||||
tic(Expmap);
|
||||
vector<bool> mask(ordering_.nVars(), true);
|
||||
Impl::ExpmapMasked(ret, delta, ordering_, mask);
|
||||
toc(3, "Expmap");
|
||||
toc(Expmap);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -831,9 +831,9 @@ const VectorValues& ISAM2::getDelta() const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
VectorValues optimize(const ISAM2& isam) {
|
||||
tic(0, "allocateVectorValues");
|
||||
tic(allocateVectorValues);
|
||||
VectorValues delta = *allocateVectorValues(isam);
|
||||
toc(0, "allocateVectorValues");
|
||||
toc(allocateVectorValues);
|
||||
optimizeInPlace(isam, delta);
|
||||
return delta;
|
||||
}
|
||||
|
@ -842,7 +842,7 @@ VectorValues optimize(const ISAM2& isam) {
|
|||
void optimizeInPlace(const ISAM2& isam, VectorValues& delta) {
|
||||
// We may need to update the solution calcaulations
|
||||
if(!isam.deltaDoglegUptodate_) {
|
||||
tic(1, "UpdateDoglegDeltas");
|
||||
tic(UpdateDoglegDeltas);
|
||||
double wildfireThreshold = 0.0;
|
||||
if(isam.params().optimizationParams.type() == typeid(ISAM2GaussNewtonParams))
|
||||
wildfireThreshold = boost::get<ISAM2GaussNewtonParams>(isam.params().optimizationParams).wildfireThreshold;
|
||||
|
@ -852,19 +852,19 @@ void optimizeInPlace(const ISAM2& isam, VectorValues& delta) {
|
|||
assert(false);
|
||||
ISAM2::Impl::UpdateDoglegDeltas(isam, wildfireThreshold, isam.deltaReplacedMask_, isam.deltaNewton_, isam.RgProd_);
|
||||
isam.deltaDoglegUptodate_ = true;
|
||||
toc(1, "UpdateDoglegDeltas");
|
||||
toc(UpdateDoglegDeltas);
|
||||
}
|
||||
|
||||
tic(2, "copy delta");
|
||||
tic(copy_delta);
|
||||
delta = isam.deltaNewton_;
|
||||
toc(2, "copy delta");
|
||||
toc(copy_delta);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
VectorValues optimizeGradientSearch(const ISAM2& isam) {
|
||||
tic(0, "Allocate VectorValues");
|
||||
tic(Allocate_VectorValues);
|
||||
VectorValues grad = *allocateVectorValues(isam);
|
||||
toc(0, "Allocate VectorValues");
|
||||
toc(Allocate_VectorValues);
|
||||
|
||||
optimizeGradientSearchInPlace(isam, grad);
|
||||
|
||||
|
@ -875,7 +875,7 @@ VectorValues optimizeGradientSearch(const ISAM2& isam) {
|
|||
void optimizeGradientSearchInPlace(const ISAM2& isam, VectorValues& grad) {
|
||||
// We may need to update the solution calcaulations
|
||||
if(!isam.deltaDoglegUptodate_) {
|
||||
tic(1, "UpdateDoglegDeltas");
|
||||
tic(UpdateDoglegDeltas);
|
||||
double wildfireThreshold = 0.0;
|
||||
if(isam.params().optimizationParams.type() == typeid(ISAM2GaussNewtonParams))
|
||||
wildfireThreshold = boost::get<ISAM2GaussNewtonParams>(isam.params().optimizationParams).wildfireThreshold;
|
||||
|
@ -885,25 +885,25 @@ void optimizeGradientSearchInPlace(const ISAM2& isam, VectorValues& grad) {
|
|||
assert(false);
|
||||
ISAM2::Impl::UpdateDoglegDeltas(isam, wildfireThreshold, isam.deltaReplacedMask_, isam.deltaNewton_, isam.RgProd_);
|
||||
isam.deltaDoglegUptodate_ = true;
|
||||
toc(1, "UpdateDoglegDeltas");
|
||||
toc(UpdateDoglegDeltas);
|
||||
}
|
||||
|
||||
tic(2, "Compute Gradient");
|
||||
tic(Compute_Gradient);
|
||||
// Compute gradient (call gradientAtZero function, which is defined for various linear systems)
|
||||
gradientAtZero(isam, grad);
|
||||
double gradientSqNorm = grad.dot(grad);
|
||||
toc(2, "Compute Gradient");
|
||||
toc(Compute_Gradient);
|
||||
|
||||
tic(3, "Compute minimizing step size");
|
||||
tic(Compute_minimizing_step_size);
|
||||
// Compute minimizing step size
|
||||
double RgNormSq = isam.RgProd_.vector().squaredNorm();
|
||||
double step = -gradientSqNorm / RgNormSq;
|
||||
toc(3, "Compute minimizing step size");
|
||||
toc(Compute_minimizing_step_size);
|
||||
|
||||
tic(4, "Compute point");
|
||||
tic(Compute_point);
|
||||
// Compute steepest descent point
|
||||
grad.vector() *= step;
|
||||
toc(4, "Compute point");
|
||||
toc(Compute_point);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -71,6 +71,8 @@ void LevenbergMarquardtParams::print(const std::string& str) const {
|
|||
/* ************************************************************************* */
|
||||
void LevenbergMarquardtOptimizer::iterate() {
|
||||
|
||||
tic(LM_iterate);
|
||||
|
||||
// Linearize graph
|
||||
GaussianFactorGraph::shared_ptr linear = graph_.linearize(state_.values, *params_.ordering);
|
||||
|
||||
|
@ -85,6 +87,7 @@ void LevenbergMarquardtOptimizer::iterate() {
|
|||
|
||||
// Add prior-factors
|
||||
// TODO: replace this dampening with a backsubstitution approach
|
||||
tic(damp);
|
||||
GaussianFactorGraph dampedSystem(*linear);
|
||||
{
|
||||
double sigma = 1.0 / std::sqrt(state_.lambda);
|
||||
|
@ -99,6 +102,7 @@ void LevenbergMarquardtOptimizer::iterate() {
|
|||
dampedSystem.push_back(prior);
|
||||
}
|
||||
}
|
||||
toc(damp);
|
||||
if (lmVerbosity >= LevenbergMarquardtParams::DAMPED) dampedSystem.print("damped");
|
||||
|
||||
// Try solving
|
||||
|
@ -110,10 +114,14 @@ void LevenbergMarquardtOptimizer::iterate() {
|
|||
if (lmVerbosity >= LevenbergMarquardtParams::TRYDELTA) delta.print("delta");
|
||||
|
||||
// update values
|
||||
tic(retract);
|
||||
Values newValues = state_.values.retract(delta, *params_.ordering);
|
||||
toc(retract);
|
||||
|
||||
// create new optimization state with more adventurous lambda
|
||||
tic(compute_error);
|
||||
double error = graph_.error(newValues);
|
||||
toc(compute_error);
|
||||
|
||||
if (lmVerbosity >= LevenbergMarquardtParams::TRYLAMBDA) cout << "next error = " << error << endl;
|
||||
|
||||
|
|
|
@ -44,6 +44,7 @@ void NonlinearFactorGraph::print(const std::string& str, const KeyFormatter& key
|
|||
|
||||
/* ************************************************************************* */
|
||||
double NonlinearFactorGraph::error(const Values& c) const {
|
||||
tic(NonlinearFactorGraph_error);
|
||||
double total_error = 0.;
|
||||
// iterate over all the factors_ to accumulate the log probabilities
|
||||
BOOST_FOREACH(const sharedFactor& factor, this->factors_) {
|
||||
|
@ -65,7 +66,9 @@ FastSet<Key> NonlinearFactorGraph::keys() const {
|
|||
|
||||
/* ************************************************************************* */
|
||||
Ordering::shared_ptr NonlinearFactorGraph::orderingCOLAMD(
|
||||
const Values& config) const {
|
||||
const Values& config) const
|
||||
{
|
||||
tic(NonlinearFactorGraph_orderingCOLAMD);
|
||||
|
||||
// Create symbolic graph and initial (iterator) ordering
|
||||
SymbolicFactorGraph::shared_ptr symbolic;
|
||||
|
@ -88,7 +91,10 @@ Ordering::shared_ptr NonlinearFactorGraph::orderingCOLAMD(
|
|||
|
||||
/* ************************************************************************* */
|
||||
Ordering::shared_ptr NonlinearFactorGraph::orderingCOLAMDConstrained(const Values& config,
|
||||
const std::map<Key, int>& constraints) const {
|
||||
const std::map<Key, int>& constraints) const
|
||||
{
|
||||
tic(NonlinearFactorGraph_orderingCOLAMDConstrained);
|
||||
|
||||
// Create symbolic graph and initial (iterator) ordering
|
||||
SymbolicFactorGraph::shared_ptr symbolic;
|
||||
Ordering::shared_ptr ordering;
|
||||
|
@ -116,6 +122,8 @@ Ordering::shared_ptr NonlinearFactorGraph::orderingCOLAMDConstrained(const Value
|
|||
|
||||
/* ************************************************************************* */
|
||||
SymbolicFactorGraph::shared_ptr NonlinearFactorGraph::symbolic(const Ordering& ordering) const {
|
||||
tic(NonlinearFactorGraph_symbolic_from_Ordering);
|
||||
|
||||
// Generate the symbolic factor graph
|
||||
SymbolicFactorGraph::shared_ptr symbolicfg(new SymbolicFactorGraph);
|
||||
symbolicfg->reserve(this->size());
|
||||
|
@ -132,7 +140,10 @@ SymbolicFactorGraph::shared_ptr NonlinearFactorGraph::symbolic(const Ordering& o
|
|||
|
||||
/* ************************************************************************* */
|
||||
pair<SymbolicFactorGraph::shared_ptr, Ordering::shared_ptr> NonlinearFactorGraph::symbolic(
|
||||
const Values& config) const {
|
||||
const Values& config) const
|
||||
{
|
||||
tic(NonlinearFactorGraph_symbolic_from_Values);
|
||||
|
||||
// Generate an initial key ordering in iterator order
|
||||
Ordering::shared_ptr ordering(config.orderingArbitrary());
|
||||
return make_pair(symbolic(*ordering), ordering);
|
||||
|
@ -140,7 +151,9 @@ pair<SymbolicFactorGraph::shared_ptr, Ordering::shared_ptr> NonlinearFactorGraph
|
|||
|
||||
/* ************************************************************************* */
|
||||
GaussianFactorGraph::shared_ptr NonlinearFactorGraph::linearize(
|
||||
const Values& config, const Ordering& ordering) const {
|
||||
const Values& config, const Ordering& ordering) const
|
||||
{
|
||||
tic(NonlinearFactorGraph_linearize);
|
||||
|
||||
// create an empty linear FG
|
||||
GaussianFactorGraph::shared_ptr linearFG(new GaussianFactorGraph);
|
||||
|
|
|
@ -54,12 +54,14 @@ void SuccessiveLinearizationParams::print(const std::string& str) const {
|
|||
}
|
||||
|
||||
VectorValues solveGaussianFactorGraph(const GaussianFactorGraph &gfg, const SuccessiveLinearizationParams ¶ms) {
|
||||
tic(solveGaussianFactorGraph);
|
||||
VectorValues delta;
|
||||
if ( params.isMultifrontal() ) {
|
||||
if (params.isMultifrontal()) {
|
||||
delta = GaussianJunctionTree(gfg).optimize(params.getEliminationFunction());
|
||||
}
|
||||
else if ( params.isSequential() ) {
|
||||
delta = gtsam::optimize(*EliminationTree<GaussianFactor>::Create(gfg)->eliminate(params.getEliminationFunction()));
|
||||
} else if(params.isSequential()) {
|
||||
const boost::shared_ptr<GaussianBayesNet> gbn =
|
||||
EliminationTree<GaussianFactor>::Create(gfg)->eliminate(params.getEliminationFunction());
|
||||
delta = gtsam::optimize(*gbn);
|
||||
}
|
||||
else if ( params.isCG() ) {
|
||||
if ( !params.iterativeParams ) throw std::runtime_error("solveGaussianFactorGraph: cg parameter has to be assigned ...");
|
||||
|
|
|
@ -253,12 +253,12 @@ namespace gtsam {
|
|||
|
||||
/** Eliminate, return a Bayes net */
|
||||
DiscreteBayesNet::shared_ptr Scheduler::eliminate() const {
|
||||
tic(1, "my_solver");
|
||||
tic(my_solver);
|
||||
DiscreteSequentialSolver solver(*this);
|
||||
toc(1, "my_solver");
|
||||
tic(2, "my_eliminate");
|
||||
toc(my_solver);
|
||||
tic(my_eliminate);
|
||||
DiscreteBayesNet::shared_ptr chordal = solver.eliminate();
|
||||
toc(2, "my_eliminate");
|
||||
toc(my_eliminate);
|
||||
return chordal;
|
||||
}
|
||||
|
||||
|
@ -273,9 +273,9 @@ namespace gtsam {
|
|||
(*it)->print(student.name_);
|
||||
}
|
||||
|
||||
tic(3, "my_optimize");
|
||||
tic(my_optimize);
|
||||
sharedValues mpe = optimize(*chordal);
|
||||
toc(3, "my_optimize");
|
||||
toc(my_optimize);
|
||||
return mpe;
|
||||
}
|
||||
|
||||
|
|
|
@ -117,9 +117,9 @@ void runLargeExample() {
|
|||
// Do exact inference
|
||||
// SETDEBUG("timing-verbose", true);
|
||||
SETDEBUG("DiscreteConditional::DiscreteConditional", true);
|
||||
tic(2, "large");
|
||||
tic(large);
|
||||
DiscreteFactor::sharedValues MPE = scheduler.optimalAssignment();
|
||||
toc(2, "large");
|
||||
toc(large);
|
||||
tictoc_finishedIteration();
|
||||
tictoc_print();
|
||||
scheduler.printAssignment(MPE);
|
||||
|
|
|
@ -124,9 +124,9 @@ void runLargeExample() {
|
|||
SETDEBUG("DiscreteConditional::DiscreteConditional", true);
|
||||
#define SAMPLE
|
||||
#ifdef SAMPLE
|
||||
tic(2, "large");
|
||||
tic(large);
|
||||
DiscreteBayesNet::shared_ptr chordal = scheduler.eliminate();
|
||||
toc(2, "large");
|
||||
toc(large);
|
||||
tictoc_finishedIteration();
|
||||
tictoc_print();
|
||||
for (size_t i=0;i<100;i++) {
|
||||
|
@ -143,9 +143,9 @@ void runLargeExample() {
|
|||
}
|
||||
}
|
||||
#else
|
||||
tic(2, "large");
|
||||
tic(large);
|
||||
DiscreteFactor::sharedValues MPE = scheduler.optimalAssignment();
|
||||
toc(2, "large");
|
||||
toc(large);
|
||||
tictoc_finishedIteration();
|
||||
tictoc_print();
|
||||
scheduler.printAssignment(MPE);
|
||||
|
|
|
@ -125,9 +125,9 @@ TEST( schedulingExample, test)
|
|||
//product.dot("scheduling", false);
|
||||
|
||||
// Do exact inference
|
||||
tic(1, "small");
|
||||
tic(small);
|
||||
DiscreteFactor::sharedValues MPE = s.optimalAssignment();
|
||||
toc(1, "small");
|
||||
toc(small);
|
||||
|
||||
// print MPE, commented out as unit tests don't print
|
||||
// s.printAssignment(MPE);
|
||||
|
|
|
@ -34,15 +34,15 @@ int main(int argc, char *argv[]) {
|
|||
|
||||
cout << "Optimizing..." << endl;
|
||||
|
||||
tic_(1, "Create optimizer");
|
||||
tic_(Create_optimizer);
|
||||
LevenbergMarquardtOptimizer optimizer(graph, initial);
|
||||
toc_(1, "Create optimizer");
|
||||
toc_(Create_optimizer);
|
||||
tictoc_print_();
|
||||
double lastError = optimizer.error();
|
||||
do {
|
||||
tic_(2, "Iterate optimizer");
|
||||
tic_(Iterate_optimizer);
|
||||
optimizer.iterate();
|
||||
toc_(2, "Iterate optimizer");
|
||||
toc_(Iterate_optimizer);
|
||||
tictoc_finishedIteration_();
|
||||
tictoc_print_();
|
||||
cout << "Error: " << optimizer.error() << ", lambda: " << optimizer.lambda() << endl;
|
||||
|
|
|
@ -1,81 +0,0 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file timeSequentialOnDataset.cpp
|
||||
* @author Richard Roberts
|
||||
* @date Oct 7, 2010
|
||||
*/
|
||||
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/slam/dataset.h>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/linear/GaussianMultifrontalSolver.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost;
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
|
||||
string datasetname;
|
||||
bool soft_prior = true;
|
||||
if(argc > 1)
|
||||
datasetname = argv[1];
|
||||
else
|
||||
datasetname = "intel";
|
||||
|
||||
// check if there should be a constraint
|
||||
if (argc == 3 && string(argv[2]).compare("-c") == 0)
|
||||
soft_prior = false;
|
||||
|
||||
// find the number of trials - default is 10
|
||||
size_t nrTrials = 10;
|
||||
if (argc == 3 && string(argv[2]).compare("-c") != 0)
|
||||
nrTrials = strtoul(argv[2], NULL, 10);
|
||||
else if (argc == 4)
|
||||
nrTrials = strtoul(argv[3], NULL, 10);
|
||||
|
||||
pair<boost::shared_ptr<pose2SLAM::Graph>, boost::shared_ptr<Values> > data = load2D(dataset(datasetname));
|
||||
|
||||
// Add a prior on the first pose
|
||||
if (soft_prior)
|
||||
data.first->addPosePrior(0, Pose2(), noiseModel::Isotropic::Sigma(Pose2::Dim(), 0.0005));
|
||||
else
|
||||
data.first->addPoseConstraint(0, Pose2());
|
||||
|
||||
tic_(1, "order");
|
||||
Ordering::shared_ptr ordering(data.first->orderingCOLAMD(*data.second));
|
||||
toc_(1, "order");
|
||||
tictoc_print_();
|
||||
|
||||
tic_(2, "linearize");
|
||||
GaussianFactorGraph::shared_ptr gfg(data.first->linearize(*data.second, *ordering)->dynamicCastFactors<GaussianFactorGraph>());
|
||||
toc_(2, "linearize");
|
||||
tictoc_print_();
|
||||
|
||||
for(size_t trial = 0; trial < nrTrials; ++trial) {
|
||||
|
||||
tic_(3, "solve");
|
||||
tic(1, "construct solver");
|
||||
GaussianMultifrontalSolver solver(*gfg);
|
||||
toc(1, "construct solver");
|
||||
tic(2, "optimize");
|
||||
VectorValues soln(*solver.optimize());
|
||||
toc(2, "optimize");
|
||||
toc_(3, "solve");
|
||||
|
||||
tictoc_print_();
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file timeSequentialOnDataset.cpp
|
||||
* @author Richard Roberts
|
||||
* @date Oct 7, 2010
|
||||
*/
|
||||
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/slam/dataset.h>
|
||||
#include <gtsam/linear/GaussianFactorGraph.h>
|
||||
#include <gtsam/linear/GaussianSequentialSolver.h>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using namespace boost;
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
|
||||
string datasetname;
|
||||
bool soft_prior = true;
|
||||
if(argc > 1)
|
||||
datasetname = argv[1];
|
||||
else
|
||||
datasetname = "intel";
|
||||
|
||||
// check if there should be a constraint
|
||||
if (argc == 3 && string(argv[2]).compare("-c") == 0)
|
||||
soft_prior = false;
|
||||
|
||||
// find the number of trials - default is 10
|
||||
size_t nrTrials = 10;
|
||||
if (argc == 3 && string(argv[2]).compare("-c") != 0)
|
||||
nrTrials = strtoul(argv[2], NULL, 10);
|
||||
else if (argc == 4)
|
||||
nrTrials = strtoul(argv[3], NULL, 10);
|
||||
|
||||
pair<boost::shared_ptr<pose2SLAM::Graph>, boost::shared_ptr<Values> > data = load2D(dataset(datasetname));
|
||||
|
||||
// Add a prior on the first pose
|
||||
if (soft_prior)
|
||||
data.first->addPosePrior(0, Pose2(), noiseModel::Isotropic::Sigma(Pose2::Dim(), 0.0005));
|
||||
else
|
||||
data.first->addPoseConstraint(0, Pose2());
|
||||
|
||||
tic_(1, "order");
|
||||
Ordering::shared_ptr ordering(data.first->orderingCOLAMD(*data.second));
|
||||
toc_(1, "order");
|
||||
tictoc_print_();
|
||||
|
||||
tic_(2, "linearize");
|
||||
GaussianFactorGraph::shared_ptr gfg(data.first->linearize(*data.second, *ordering)->dynamicCastFactors<GaussianFactorGraph>());
|
||||
toc_(2, "linearize");
|
||||
tictoc_print_();
|
||||
|
||||
for(size_t trial = 0; trial < nrTrials; ++trial) {
|
||||
|
||||
tic_(3, "solve");
|
||||
tic(1, "construct solver");
|
||||
GaussianSequentialSolver solver(*gfg);
|
||||
toc(1, "construct solver");
|
||||
tic(2, "optimize");
|
||||
VectorValues soln(*solver.optimize());
|
||||
toc(2, "optimize");
|
||||
toc_(3, "solve");
|
||||
|
||||
tictoc_print_();
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
}
|
Loading…
Reference in New Issue