diff --git a/gtsam/inference/EliminationTree-inl.h b/gtsam/inference/EliminationTree-inl.h index a4b0b9294..7464b5a8a 100644 --- a/gtsam/inference/EliminationTree-inl.h +++ b/gtsam/inference/EliminationTree-inl.h @@ -157,9 +157,11 @@ EliminationTree::Create(const FactorGraph& factorGraph, c BOOST_FOREACH(const typename DERIVEDFACTOR::shared_ptr& derivedFactor, factorGraph) { // Here we upwards-cast to the factor type of this EliminationTree. This // allows performing symbolic elimination on, for example, GaussianFactors. - sharedFactor factor(derivedFactor); - Index j = *std::min_element(factor->begin(), factor->end()); - trees[j]->add(factor); + if(derivedFactor) { + sharedFactor factor(derivedFactor); + Index j = *std::min_element(factor->begin(), factor->end()); + trees[j]->add(factor); + } } toc(3, "hang factors"); diff --git a/gtsam/inference/JunctionTree-inl.h b/gtsam/inference/JunctionTree-inl.h index e155862db..38ac7d47e 100644 --- a/gtsam/inference/JunctionTree-inl.h +++ b/gtsam/inference/JunctionTree-inl.h @@ -85,14 +85,12 @@ namespace gtsam { // Two stages - first build an array of the lowest-ordered variable in each // factor and find the last variable to be eliminated. - vector lowestOrdered(fg.size()); + vector lowestOrdered(fg.size(), numeric_limits::max()); Index maxVar = 0; for(size_t i=0; ibegin(), fg[i]->end()); - if(min == fg[i]->end()) - lowestOrdered[i] = numeric_limits::max(); - else { + if(min != fg[i]->end()) { lowestOrdered[i] = *min; maxVar = std::max(maxVar, *min); } diff --git a/gtsam/nonlinear/NonlinearFactorGraph-inl.h b/gtsam/nonlinear/NonlinearFactorGraph-inl.h index a968e1327..254343401 100644 --- a/gtsam/nonlinear/NonlinearFactorGraph-inl.h +++ b/gtsam/nonlinear/NonlinearFactorGraph-inl.h @@ -50,8 +50,10 @@ namespace gtsam { template Vector NonlinearFactorGraph::unwhitenedError(const VALUES& c) const { list errors; - BOOST_FOREACH(const sharedFactor& factor, this->factors_) - errors.push_back(factor->unwhitenedError(c)); + BOOST_FOREACH(const sharedFactor& factor, this->factors_) { + if(factor) + errors.push_back(factor->unwhitenedError(c)); + } return concatVectors(errors); } @@ -60,8 +62,10 @@ namespace gtsam { double NonlinearFactorGraph::error(const VALUES& c) const { double total_error = 0.; // iterate over all the factors_ to accumulate the log probabilities - BOOST_FOREACH(const sharedFactor& factor, this->factors_) - total_error += factor->error(c); + BOOST_FOREACH(const sharedFactor& factor, this->factors_) { + if(factor) + total_error += factor->error(c); + } return total_error; } @@ -70,8 +74,10 @@ namespace gtsam { template std::set NonlinearFactorGraph::keys() const { std::set keys; - BOOST_FOREACH(const sharedFactor& factor, this->factors_) - keys.insert(factor->begin(), factor->end()); + BOOST_FOREACH(const sharedFactor& factor, this->factors_) { + if(factor) + keys.insert(factor->begin(), factor->end()); + } return keys; } @@ -112,8 +118,12 @@ namespace gtsam { SymbolicFactorGraph::shared_ptr symbolicfg(new SymbolicFactorGraph); symbolicfg->reserve(this->size()); - BOOST_FOREACH(const sharedFactor& factor, this->factors_) - symbolicfg->push_back(factor->symbolic(ordering)); + BOOST_FOREACH(const sharedFactor& factor, this->factors_) { + if(factor) + symbolicfg->push_back(factor->symbolic(ordering)); + else + symbolicfg->push_back(SymbolicFactorGraph::sharedFactor()); + } return symbolicfg; } @@ -138,11 +148,13 @@ namespace gtsam { linearFG->reserve(this->size()); // linearize all factors - BOOST_FOREACH(const sharedFactor& factor, this->factors_) - { - JacobianFactor::shared_ptr lf = factor->linearize(config, ordering); - if (lf) linearFG->push_back(lf); - } + BOOST_FOREACH(const sharedFactor& factor, this->factors_) { + if(factor) { + JacobianFactor::shared_ptr lf = factor->linearize(config, ordering); + if (lf) linearFG->push_back(lf); + } else + linearFG->push_back(FactorGraph::sharedFactor()); + } return linearFG; } diff --git a/gtsam/nonlinear/NonlinearOptimizer-inl.h b/gtsam/nonlinear/NonlinearOptimizer-inl.h index e59a8596e..7ce56ab21 100644 --- a/gtsam/nonlinear/NonlinearOptimizer-inl.h +++ b/gtsam/nonlinear/NonlinearOptimizer-inl.h @@ -216,7 +216,7 @@ namespace gtsam { if (verbosity >= Parameters::LAMBDA) cout << "lambda = " << lambda << endl; // linearize all factors once - boost::shared_ptr linear = graph_->linearize(*values_, *ordering_)->template dynamicCastFactors(); + boost::shared_ptr linear(new L(*graph_->linearize(*values_, *ordering_))); if (verbosity >= Parameters::LINEAR) linear->print("linear"); diff --git a/gtsam/nonlinear/NonlinearOptimizer.h b/gtsam/nonlinear/NonlinearOptimizer.h index 30ef4f91e..b8a367a48 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.h +++ b/gtsam/nonlinear/NonlinearOptimizer.h @@ -208,7 +208,7 @@ public: * Return a linearized graph at the current graph/values/ordering */ shared_linear linearize() const { - return graph_->linearize(*values_, *ordering_)->template dynamicCastFactors(); + return shared_linear(new L(*graph_->linearize(*values_, *ordering_))); } /** diff --git a/tests/testNonlinearOptimizer.cpp b/tests/testNonlinearOptimizer.cpp index 4eeac4965..265b070dc 100644 --- a/tests/testNonlinearOptimizer.cpp +++ b/tests/testNonlinearOptimizer.cpp @@ -257,6 +257,46 @@ TEST( NonlinearOptimizer, Factorization ) CHECK(assert_equal(expected, *optimized.values(), 1e-5)); } +/* ************************************************************************* */ +TEST_UNSAFE(NonlinearOptimizer, NullFactor) { + + shared_ptr fg(new example::Graph( + example::createReallyNonlinearFactorGraph())); + + // Add null factor + fg->push_back(example::Graph::sharedFactor()); + + // test error at minimum + Point2 xstar(0,0); + example::Values cstar; + cstar.insert(simulated2D::PoseKey(1), xstar); + DOUBLES_EQUAL(0.0,fg->error(cstar),0.0); + + // test error at initial = [(1-cos(3))^2 + (sin(3))^2]*50 = + Point2 x0(3,3); + boost::shared_ptr c0(new example::Values); + c0->insert(simulated2D::PoseKey(1), x0); + DOUBLES_EQUAL(199.0,fg->error(*c0),1e-3); + + // optimize parameters + shared_ptr ord(new Ordering()); + ord->push_back("x1"); + + // initial optimization state is the same in both cases tested + boost::shared_ptr params = boost::make_shared(); + params->relDecrease_ = 1e-5; + params->absDecrease_ = 1e-5; + Optimizer optimizer(fg, c0, ord, params); + + // Gauss-Newton + Optimizer actual1 = optimizer.gaussNewton(); + DOUBLES_EQUAL(0,fg->error(*(actual1.values())),tol); + + // Levenberg-Marquardt + Optimizer actual2 = optimizer.levenbergMarquardt(); + DOUBLES_EQUAL(0,fg->error(*(actual2.values())),tol); +} + ///* ************************************************************************* */ // SL-FIX TEST( NonlinearOptimizer, SubgraphSolver ) //{