diff --git a/cpp/SQPOptimizer-inl.h b/cpp/SQPOptimizer-inl.h index b245442c8..c222cda4c 100644 --- a/cpp/SQPOptimizer-inl.h +++ b/cpp/SQPOptimizer-inl.h @@ -6,6 +6,8 @@ #pragma once +#include +#include "GaussianFactorGraph.h" #include "SQPOptimizer.h" using namespace std; @@ -30,4 +32,50 @@ SQPOptimizer::SQPOptimizer(const G& graph, const Ordering& ordering, } +/* **************************************************************** */ +template +SQPOptimizer SQPOptimizer::iterate(Verbosity v) const { + bool verbose = v == SQPOptimizer::FULL; + + // local typedefs + typedef typename G::const_iterator const_iterator; + typedef NonlinearConstraint NLConstraint; + typedef boost::shared_ptr shared_c; + + // linearize the graph + GaussianFactorGraph fg; + + // iterate over all factors and linearize + for (const_iterator factor = graph_->begin(); factor < graph_->end(); factor++) { + const shared_c constraint = boost::shared_dynamic_cast(*factor); + if (constraint == NULL) { + // if a regular factor, linearize using the default linearization + GaussianFactor::shared_ptr f = (*factor)->linearize(*config_); + if (verbose) f->print("Regular Factor"); + fg.push_back(f); + } else { + // if a constraint, linearize using the constraint method (2 configs) + GaussianFactor::shared_ptr f, c; + boost::tie(f,c) = constraint->linearize(*config_, *lagrange_config_); + if (verbose) f->print("Constrained Factor"); + if (verbose) c->print("Constraint"); + fg.push_back(f); + fg.push_back(c); + } + } + if (verbose) fg.print("Before Optimization"); + + // optimize linear graph to get full delta config + VectorConfig delta = fg.optimize(*ordering_).scale(-1.0); + + if (verbose) delta.print("Delta Config"); + + // update both state variables + shared_config newConfig(new C(config_->exmap(delta))); + shared_vconfig newLamConfig(new VectorConfig(lagrange_config_->exmap(delta))); + + // construct a new optimizer + return SQPOptimizer(*graph_, *ordering_, newConfig, newLamConfig); +} + } diff --git a/cpp/SQPOptimizer.h b/cpp/SQPOptimizer.h index 31f929831..cd1f81cda 100644 --- a/cpp/SQPOptimizer.h +++ b/cpp/SQPOptimizer.h @@ -19,6 +19,12 @@ template class SQPOptimizer { public: + // verbosity level + typedef enum { + SILENT, + FULL + } Verbosity; + // useful for storing configurations typedef boost::shared_ptr shared_config; typedef boost::shared_ptr shared_vconfig; @@ -32,6 +38,7 @@ private: shared_config config_; shared_vconfig lagrange_config_; double error_; + double constraint_error_; public: /** @@ -61,6 +68,12 @@ public: const Ordering* ordering() const { return ordering_; } shared_config config() const { return config_; } + /** + * Primary optimization iteration, updates the configs + * @return a new optimization object with updated values + */ + SQPOptimizer iterate(Verbosity verbosity=SILENT) const; + }; } diff --git a/cpp/testSQPOptimizer.cpp b/cpp/testSQPOptimizer.cpp index dfecae066..ad9059415 100644 --- a/cpp/testSQPOptimizer.cpp +++ b/cpp/testSQPOptimizer.cpp @@ -5,6 +5,10 @@ */ #include +#include // for operator += +#include // for insert +#include +#include #include "NonlinearFactorGraph.h" #include "NonlinearConstraint.h" #include "VectorConfig.h" @@ -17,10 +21,13 @@ using namespace std; using namespace gtsam; +using namespace boost::assign; // typedefs -typedef NonlinearFactorGraph NLGraph; typedef boost::shared_ptr shared_config; +typedef NonlinearFactorGraph NLGraph; +typedef boost::shared_ptr > shared; +typedef boost::shared_ptr > shared_c; TEST ( SQPOptimizer, basic ) { // create a basic optimizer @@ -36,6 +43,109 @@ TEST ( SQPOptimizer, basic ) { CHECK(assert_equal(*config, *(optimizer.config()))); } +namespace sqpOptimizer_test1 { +// binary constraint between landmarks +/** g(x) = x-y = 0 */ +Vector g_func(const VectorConfig& config, const std::string& key1, const std::string& key2) { + return config[key1]-config[key2]; +} + +/** gradient at l1 */ +Matrix grad_g1(const VectorConfig& config, const std::string& key) { + return eye(2); +} + +/** gradient at l2 */ +Matrix grad_g2(const VectorConfig& config, const std::string& key) { + return -1*eye(2); +} +} // \namespace sqpOptimizer_test1 + +namespace sqpOptimizer_test2 { +// Unary Constraint on x1 +/** g(x) = x -[1;1] = 0 */ +Vector g_func(const VectorConfig& config, const std::string& key) { + return config[key]-Vector_(2, 1.0, 1.0); +} + +/** gradient at x1 */ +Matrix grad_g(const VectorConfig& config, const std::string& key) { + return eye(2); +} +} // \namespace sqpOptimizer_test2 + +typedef SQPOptimizer Optimizer; + +/** + * Test pulled from testSQP, with + */ +TEST ( SQPOptimizer, simple_case ) { + bool verbose = false; + // position (1, 1) constraint for x1 + VectorConfig feas; + feas.insert("x1", Vector_(2, 1.0, 1.0)); + + // constant constraint on x1 + boost::shared_ptr > c1( + new NonlinearConstraint1( + "x1", *sqpOptimizer_test2::grad_g, + *sqpOptimizer_test2::g_func, 2, "L_x1")); + + // measurement from x1 to l1 + Vector z1 = Vector_(2, 0.0, 5.0); + double sigma1 = 0.1; + shared f1(new Simulated2DMeasurement(z1, sigma1, "x1", "l1")); + + // measurement from x2 to l2 + Vector z2 = Vector_(2, -4.0, 0.0); + double sigma2 = 0.1; + shared f2(new Simulated2DMeasurement(z2, sigma2, "x2", "l2")); + + // equality constraint between l1 and l2 + boost::shared_ptr > c2( + new NonlinearConstraint2( + "l1", *sqpOptimizer_test1::grad_g1, + "l2", *sqpOptimizer_test1::grad_g2, + *sqpOptimizer_test1::g_func, 2, "L_l1l2")); + + // construct the graph + NLGraph graph; + graph.push_back(c1); + graph.push_back(c2); + graph.push_back(f1); + graph.push_back(f2); + + // create an initial estimate + shared_config initialEstimate(new VectorConfig(feas)); // must start with feasible set + initialEstimate->insert("l1", Vector_(2, 1.0, 6.0)); // ground truth + initialEstimate->insert("l2", Vector_(2, -4.0, 0.0)); // starting with a separate reference frame + initialEstimate->insert("x2", Vector_(2, 0.0, 0.0)); // other pose starts at origin + + // create an initial estimate for the lagrange multiplier + shared_config initLagrange(new VectorConfig); + initLagrange->insert("L_l1l2", Vector_(2, 1.0, 1.0)); + initLagrange->insert("L_x1", Vector_(2, 1.0, 1.0)); + + // create an ordering + Ordering ordering; + ordering += "x1", "x2", "l1", "l2", "L_l1l2", "L_x1"; + + // create an optimizer + Optimizer optimizer(graph, ordering, initialEstimate, initLagrange); + + // perform an iteration of optimization + Optimizer oneIteration = optimizer.iterate(Optimizer::SILENT); + + // get the config back out and verify + VectorConfig actual = *(oneIteration.config()); + VectorConfig expected; + expected.insert("x1", Vector_(2, 1.0, 1.0)); + expected.insert("l1", Vector_(2, 1.0, 6.0)); + expected.insert("l2", Vector_(2, 1.0, 6.0)); + expected.insert("x2", Vector_(2, 5.0, 6.0)); + CHECK(assert_equal(actual, expected)); +} + /* ************************************************************************* */ int main() { TestResult tr; return TestRegistry::runAllTests(tr); } /* ************************************************************************* */