SQPoptimizer can now perform optimization, and has a test using the linear map-warping example.
parent
ac315082aa
commit
a5515d9d57
|
@ -6,6 +6,8 @@
|
||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <boost/foreach.hpp>
|
||||||
|
#include "GaussianFactorGraph.h"
|
||||||
#include "SQPOptimizer.h"
|
#include "SQPOptimizer.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
@ -30,4 +32,50 @@ SQPOptimizer<G,C>::SQPOptimizer(const G& graph, const Ordering& ordering,
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* **************************************************************** */
|
||||||
|
template<class G, class C>
|
||||||
|
SQPOptimizer<G, C> SQPOptimizer<G, C>::iterate(Verbosity v) const {
|
||||||
|
bool verbose = v == SQPOptimizer<G, C>::FULL;
|
||||||
|
|
||||||
|
// local typedefs
|
||||||
|
typedef typename G::const_iterator const_iterator;
|
||||||
|
typedef NonlinearConstraint<C> NLConstraint;
|
||||||
|
typedef boost::shared_ptr<NLConstraint > shared_c;
|
||||||
|
|
||||||
|
// linearize the graph
|
||||||
|
GaussianFactorGraph fg;
|
||||||
|
|
||||||
|
// iterate over all factors and linearize
|
||||||
|
for (const_iterator factor = graph_->begin(); factor < graph_->end(); factor++) {
|
||||||
|
const shared_c constraint = boost::shared_dynamic_cast<NLConstraint >(*factor);
|
||||||
|
if (constraint == NULL) {
|
||||||
|
// if a regular factor, linearize using the default linearization
|
||||||
|
GaussianFactor::shared_ptr f = (*factor)->linearize(*config_);
|
||||||
|
if (verbose) f->print("Regular Factor");
|
||||||
|
fg.push_back(f);
|
||||||
|
} else {
|
||||||
|
// if a constraint, linearize using the constraint method (2 configs)
|
||||||
|
GaussianFactor::shared_ptr f, c;
|
||||||
|
boost::tie(f,c) = constraint->linearize(*config_, *lagrange_config_);
|
||||||
|
if (verbose) f->print("Constrained Factor");
|
||||||
|
if (verbose) c->print("Constraint");
|
||||||
|
fg.push_back(f);
|
||||||
|
fg.push_back(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (verbose) fg.print("Before Optimization");
|
||||||
|
|
||||||
|
// optimize linear graph to get full delta config
|
||||||
|
VectorConfig delta = fg.optimize(*ordering_).scale(-1.0);
|
||||||
|
|
||||||
|
if (verbose) delta.print("Delta Config");
|
||||||
|
|
||||||
|
// update both state variables
|
||||||
|
shared_config newConfig(new C(config_->exmap(delta)));
|
||||||
|
shared_vconfig newLamConfig(new VectorConfig(lagrange_config_->exmap(delta)));
|
||||||
|
|
||||||
|
// construct a new optimizer
|
||||||
|
return SQPOptimizer<G, C>(*graph_, *ordering_, newConfig, newLamConfig);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,12 @@ template<class FactorGraph, class Config>
|
||||||
class SQPOptimizer {
|
class SQPOptimizer {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
// verbosity level
|
||||||
|
typedef enum {
|
||||||
|
SILENT,
|
||||||
|
FULL
|
||||||
|
} Verbosity;
|
||||||
|
|
||||||
// useful for storing configurations
|
// useful for storing configurations
|
||||||
typedef boost::shared_ptr<const Config> shared_config;
|
typedef boost::shared_ptr<const Config> shared_config;
|
||||||
typedef boost::shared_ptr<const VectorConfig> shared_vconfig;
|
typedef boost::shared_ptr<const VectorConfig> shared_vconfig;
|
||||||
|
@ -32,6 +38,7 @@ private:
|
||||||
shared_config config_;
|
shared_config config_;
|
||||||
shared_vconfig lagrange_config_;
|
shared_vconfig lagrange_config_;
|
||||||
double error_;
|
double error_;
|
||||||
|
double constraint_error_;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
|
@ -61,6 +68,12 @@ public:
|
||||||
const Ordering* ordering() const { return ordering_; }
|
const Ordering* ordering() const { return ordering_; }
|
||||||
shared_config config() const { return config_; }
|
shared_config config() const { return config_; }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Primary optimization iteration, updates the configs
|
||||||
|
* @return a new optimization object with updated values
|
||||||
|
*/
|
||||||
|
SQPOptimizer<FactorGraph, Config> iterate(Verbosity verbosity=SILENT) const;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,10 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <CppUnitLite/TestHarness.h>
|
#include <CppUnitLite/TestHarness.h>
|
||||||
|
#include <boost/assign/std/list.hpp> // for operator +=
|
||||||
|
#include <boost/assign/std/map.hpp> // for insert
|
||||||
|
#include <Simulated2DMeasurement.h>
|
||||||
|
#include <simulated2D.h>
|
||||||
#include "NonlinearFactorGraph.h"
|
#include "NonlinearFactorGraph.h"
|
||||||
#include "NonlinearConstraint.h"
|
#include "NonlinearConstraint.h"
|
||||||
#include "VectorConfig.h"
|
#include "VectorConfig.h"
|
||||||
|
@ -17,10 +21,13 @@
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
using namespace gtsam;
|
using namespace gtsam;
|
||||||
|
using namespace boost::assign;
|
||||||
|
|
||||||
// typedefs
|
// typedefs
|
||||||
typedef NonlinearFactorGraph<VectorConfig> NLGraph;
|
|
||||||
typedef boost::shared_ptr<VectorConfig> shared_config;
|
typedef boost::shared_ptr<VectorConfig> shared_config;
|
||||||
|
typedef NonlinearFactorGraph<VectorConfig> NLGraph;
|
||||||
|
typedef boost::shared_ptr<NonlinearFactor<VectorConfig> > shared;
|
||||||
|
typedef boost::shared_ptr<NonlinearConstraint<VectorConfig> > shared_c;
|
||||||
|
|
||||||
TEST ( SQPOptimizer, basic ) {
|
TEST ( SQPOptimizer, basic ) {
|
||||||
// create a basic optimizer
|
// create a basic optimizer
|
||||||
|
@ -36,6 +43,109 @@ TEST ( SQPOptimizer, basic ) {
|
||||||
CHECK(assert_equal(*config, *(optimizer.config())));
|
CHECK(assert_equal(*config, *(optimizer.config())));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
namespace sqpOptimizer_test1 {
|
||||||
|
// binary constraint between landmarks
|
||||||
|
/** g(x) = x-y = 0 */
|
||||||
|
Vector g_func(const VectorConfig& config, const std::string& key1, const std::string& key2) {
|
||||||
|
return config[key1]-config[key2];
|
||||||
|
}
|
||||||
|
|
||||||
|
/** gradient at l1 */
|
||||||
|
Matrix grad_g1(const VectorConfig& config, const std::string& key) {
|
||||||
|
return eye(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** gradient at l2 */
|
||||||
|
Matrix grad_g2(const VectorConfig& config, const std::string& key) {
|
||||||
|
return -1*eye(2);
|
||||||
|
}
|
||||||
|
} // \namespace sqpOptimizer_test1
|
||||||
|
|
||||||
|
namespace sqpOptimizer_test2 {
|
||||||
|
// Unary Constraint on x1
|
||||||
|
/** g(x) = x -[1;1] = 0 */
|
||||||
|
Vector g_func(const VectorConfig& config, const std::string& key) {
|
||||||
|
return config[key]-Vector_(2, 1.0, 1.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** gradient at x1 */
|
||||||
|
Matrix grad_g(const VectorConfig& config, const std::string& key) {
|
||||||
|
return eye(2);
|
||||||
|
}
|
||||||
|
} // \namespace sqpOptimizer_test2
|
||||||
|
|
||||||
|
typedef SQPOptimizer<NLGraph, VectorConfig> Optimizer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test pulled from testSQP, with
|
||||||
|
*/
|
||||||
|
TEST ( SQPOptimizer, simple_case ) {
|
||||||
|
bool verbose = false;
|
||||||
|
// position (1, 1) constraint for x1
|
||||||
|
VectorConfig feas;
|
||||||
|
feas.insert("x1", Vector_(2, 1.0, 1.0));
|
||||||
|
|
||||||
|
// constant constraint on x1
|
||||||
|
boost::shared_ptr<NonlinearConstraint1<VectorConfig> > c1(
|
||||||
|
new NonlinearConstraint1<VectorConfig>(
|
||||||
|
"x1", *sqpOptimizer_test2::grad_g,
|
||||||
|
*sqpOptimizer_test2::g_func, 2, "L_x1"));
|
||||||
|
|
||||||
|
// measurement from x1 to l1
|
||||||
|
Vector z1 = Vector_(2, 0.0, 5.0);
|
||||||
|
double sigma1 = 0.1;
|
||||||
|
shared f1(new Simulated2DMeasurement(z1, sigma1, "x1", "l1"));
|
||||||
|
|
||||||
|
// measurement from x2 to l2
|
||||||
|
Vector z2 = Vector_(2, -4.0, 0.0);
|
||||||
|
double sigma2 = 0.1;
|
||||||
|
shared f2(new Simulated2DMeasurement(z2, sigma2, "x2", "l2"));
|
||||||
|
|
||||||
|
// equality constraint between l1 and l2
|
||||||
|
boost::shared_ptr<NonlinearConstraint2<VectorConfig> > c2(
|
||||||
|
new NonlinearConstraint2<VectorConfig>(
|
||||||
|
"l1", *sqpOptimizer_test1::grad_g1,
|
||||||
|
"l2", *sqpOptimizer_test1::grad_g2,
|
||||||
|
*sqpOptimizer_test1::g_func, 2, "L_l1l2"));
|
||||||
|
|
||||||
|
// construct the graph
|
||||||
|
NLGraph graph;
|
||||||
|
graph.push_back(c1);
|
||||||
|
graph.push_back(c2);
|
||||||
|
graph.push_back(f1);
|
||||||
|
graph.push_back(f2);
|
||||||
|
|
||||||
|
// create an initial estimate
|
||||||
|
shared_config initialEstimate(new VectorConfig(feas)); // must start with feasible set
|
||||||
|
initialEstimate->insert("l1", Vector_(2, 1.0, 6.0)); // ground truth
|
||||||
|
initialEstimate->insert("l2", Vector_(2, -4.0, 0.0)); // starting with a separate reference frame
|
||||||
|
initialEstimate->insert("x2", Vector_(2, 0.0, 0.0)); // other pose starts at origin
|
||||||
|
|
||||||
|
// create an initial estimate for the lagrange multiplier
|
||||||
|
shared_config initLagrange(new VectorConfig);
|
||||||
|
initLagrange->insert("L_l1l2", Vector_(2, 1.0, 1.0));
|
||||||
|
initLagrange->insert("L_x1", Vector_(2, 1.0, 1.0));
|
||||||
|
|
||||||
|
// create an ordering
|
||||||
|
Ordering ordering;
|
||||||
|
ordering += "x1", "x2", "l1", "l2", "L_l1l2", "L_x1";
|
||||||
|
|
||||||
|
// create an optimizer
|
||||||
|
Optimizer optimizer(graph, ordering, initialEstimate, initLagrange);
|
||||||
|
|
||||||
|
// perform an iteration of optimization
|
||||||
|
Optimizer oneIteration = optimizer.iterate(Optimizer::SILENT);
|
||||||
|
|
||||||
|
// get the config back out and verify
|
||||||
|
VectorConfig actual = *(oneIteration.config());
|
||||||
|
VectorConfig expected;
|
||||||
|
expected.insert("x1", Vector_(2, 1.0, 1.0));
|
||||||
|
expected.insert("l1", Vector_(2, 1.0, 6.0));
|
||||||
|
expected.insert("l2", Vector_(2, 1.0, 6.0));
|
||||||
|
expected.insert("x2", Vector_(2, 5.0, 6.0));
|
||||||
|
CHECK(assert_equal(actual, expected));
|
||||||
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
Loading…
Reference in New Issue