remove support for nonlinear constraints. Refactor SQPSimple to LCNLPSolver.

release/4.3a0
krunalchande 2014-12-23 15:12:53 -05:00 committed by thduynguyen
parent 0fdd49ca4e
commit 276959e39a
7 changed files with 114 additions and 314 deletions

View File

@ -384,29 +384,6 @@ namespace gtsam {
return false;
}
/* ************************************************************************* */
boost::tuple<GaussianFactorGraph, GaussianFactorGraph, GaussianFactorGraph> GaussianFactorGraph::splitConstraints() const {
typedef HessianFactor H;
typedef JacobianFactor J;
GaussianFactorGraph hessians, jacobians, constraints;
BOOST_FOREACH(const GaussianFactor::shared_ptr& factor, *this) {
H::shared_ptr hessian(boost::dynamic_pointer_cast<H>(factor));
if (hessian)
hessians.push_back(factor);
else {
J::shared_ptr jacobian(boost::dynamic_pointer_cast<J>(factor));
if (jacobian && jacobian->get_model() && jacobian->get_model()->isConstrained()) {
constraints.push_back(jacobian);
}
else {
jacobians.push_back(factor);
}
}
}
return boost::make_tuple(hessians, jacobians, constraints);
}
/* ************************************************************************* */
// x += alpha*A'*e
void GaussianFactorGraph::transposeMultiplyAdd(double alpha, const Errors& e,

View File

@ -316,12 +316,6 @@ namespace gtsam {
/** In-place version e <- A*x that takes an iterator. */
void multiplyInPlace(const VectorValues& x, const Errors::iterator& e) const;
/**
* Split constraints and unconstrained factors into two different graphs
* @return a pair of <unconstrained, constrained> graphs
*/
boost::tuple<GaussianFactorGraph, GaussianFactorGraph, GaussianFactorGraph> splitConstraints() const;
/// @}
private:

View File

@ -616,110 +616,10 @@ EliminatePreferCholesky(const GaussianFactorGraph& factors, const Ordering& keys
// all factors to JacobianFactors. Otherwise, we can convert all factors
// to HessianFactors. This is because QR can handle constrained noise
// models but Cholesky cannot.
/* Currently, when eliminating a constrained variable, EliminatePreferCholesky
* converts every other factors to JacobianFactor before doing the special QR
* factorization for constrained variables. Unfortunately, after a constrained
* nonlinear graph is linearized, new hessian factors from constraints, multiplied
* with the dual variable (-lambda*\hessian{c} terms in the Lagrangian objective
* function), might become negative definite, thus cannot be converted to JacobianFactors.
*
* Following EliminateCholesky, this version of EliminatePreferCholesky for
* constrained var gathers all unconstrained factors into a big joint HessianFactor
* before converting it into a JacobianFactor to be eliminiated by QR together with
* the other constrained factors.
*
* Of course, this might not solve the non-positive-definite problem entirely,
* because (1) the original hessian factors might be non-positive definite
* and (2) large strange value of lambdas might cause the joint factor non-positive
* definite [is this true?]. But at least, this will help in typical cases.
*/
GaussianFactorGraph hessians, jacobians, constraints;
// factors.print("factors: ");
boost::tie(hessians, jacobians, constraints) = factors.splitConstraints();
// keys.print("Frontal variables to eliminate: ");
// hessians.print("Hessians: ");
// jacobians.print("Jacobians: ");
// constraints.print("Constraints: ");
bool hasHessians = hessians.size() > 0;
// Add all jacobians to gather as much info as we can
hessians.push_back(jacobians);
if (constraints.size()>0) {
// // Build joint factor
// HessianFactor::shared_ptr jointFactor;
// try {
// jointFactor = boost::make_shared<HessianFactor>(hessians, Scatter(factors, keys));
// } catch(std::invalid_argument&) {
// throw InvalidDenseElimination(
// "EliminateCholesky was called with a request to eliminate variables that are not\n"
// "involved in the provided factors.");
// }
// constraints.push_back(jointFactor);
// return EliminateQR(constraints, keys);
// If there are hessian factors, turn them into conditional
// by doing partial elimination, then use those jacobians when eliminating the constraints
GaussianFactor::shared_ptr unconstrainedNewFactor;
if (hessians.size() > 0) {
bool hasSeparator = false;
GaussianFactorGraph::Keys unconstrainedKeys = hessians.keys();
BOOST_FOREACH(Key key, unconstrainedKeys) {
if (find(keys.begin(), keys.end(), key) == keys.end()) {
hasSeparator = true;
break;
}
}
if (hasSeparator) {
// find frontal keys in the unconstrained factor to eliminate
Ordering subkeys;
BOOST_FOREACH(Key key, keys) {
if (unconstrainedKeys.exists(key))
subkeys.push_back(key);
}
GaussianConditional::shared_ptr unconstrainedConditional;
boost::tie(unconstrainedConditional, unconstrainedNewFactor)
= EliminateCholesky(hessians, subkeys);
constraints.push_back(unconstrainedConditional);
}
else {
if (hasHessians) {
HessianFactor::shared_ptr jointFactor = boost::make_shared<
HessianFactor>(hessians, Scatter(factors, keys));
constraints.push_back(jointFactor);
}
else {
constraints.push_back(hessians);
}
}
}
// Now eliminate the constraints
GaussianConditional::shared_ptr constrainedConditional;
GaussianFactor::shared_ptr constrainedNewFactor;
boost::tie(constrainedConditional, constrainedNewFactor) = EliminateQR(
constraints, keys);
// constraints.print("constraints: ");
// constrainedConditional->print("constrainedConditional: ");
// constrainedNewFactor->print("constrainedNewFactor: ");
if (unconstrainedNewFactor) {
GaussianFactorGraph newFactors;
newFactors.push_back(unconstrainedNewFactor);
newFactors.push_back(constrainedNewFactor);
// newFactors.print("newFactors: ");
HessianFactor::shared_ptr newFactor(new HessianFactor(newFactors));
return make_pair(constrainedConditional, newFactor);
} else {
return make_pair(constrainedConditional, constrainedNewFactor);
}
}
else {
if (hasConstraints(factors))
return EliminateQR(factors, keys);
else
return EliminateCholesky(factors, keys);
}
}
} // gtsam

View File

@ -10,33 +10,32 @@
* -------------------------------------------------------------------------- */
/**
* @file SQPSimple.cpp
* @file LCNLPSolver.cpp
* @author Duy-Nguyen Ta
* @author Krunal Chande
* @author Luca Carlone
* @date Dec 15, 2014
*/
#include <gtsam_unstable/nonlinear/SQPSimple.h>
#include <gtsam_unstable/nonlinear/LCNLPSolver.h>
#include <gtsam_unstable/linear/QPSolver.h>
namespace gtsam {
/* ************************************************************************* */
bool SQPSimple::isStationary(const VectorValues& delta) const {
bool LCNLPSolver::isStationary(const VectorValues& delta) const {
return delta.vector().lpNorm<Eigen::Infinity>() < errorTol;
}
/* ************************************************************************* */
bool SQPSimple::isPrimalFeasible(const SQPSimpleState& state) const {
return nlp_.linearEqualities.checkFeasibility(state.values, errorTol)
&& nlp_.nonlinearEqualities.checkFeasibility(state.values, errorTol);
bool LCNLPSolver::isPrimalFeasible(const LCNLPState& state) const {
return lcnlp_.linearEqualities.checkFeasibility(state.values, errorTol);
}
/* ************************************************************************* */
bool SQPSimple::isDualFeasible(const VectorValues& duals) const {
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, nlp_.linearInequalities) {
bool LCNLPSolver::isDualFeasible(const VectorValues& duals) const {
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, lcnlp_.linearInequalities) {
NonlinearConstraint::shared_ptr inequality = boost::dynamic_pointer_cast<NonlinearConstraint>(factor);
Key dualKey = inequality->dualKey();
if (!duals.exists(dualKey)) continue; // should be inactive constraint!
@ -48,33 +47,29 @@ bool SQPSimple::isDualFeasible(const VectorValues& duals) const {
}
/* ************************************************************************* */
bool SQPSimple::isComplementary(const SQPSimpleState& state) const {
return nlp_.linearInequalities.checkFeasibilityAndComplimentary(state.values, state.duals, errorTol);
bool LCNLPSolver::isComplementary(const LCNLPState& state) const {
return lcnlp_.linearInequalities.checkFeasibilityAndComplimentary(state.values, state.duals, errorTol);
}
/* ************************************************************************* */
bool SQPSimple::checkConvergence(const SQPSimpleState& state, const VectorValues& delta) const {
bool LCNLPSolver::checkConvergence(const LCNLPState& state, const VectorValues& delta) const {
return isStationary(delta) && isPrimalFeasible(state) && isDualFeasible(state.duals) && isComplementary(state);
}
/* ************************************************************************* */
VectorValues SQPSimple::initializeDuals() const {
VectorValues LCNLPSolver::initializeDuals() const {
VectorValues duals;
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, nlp_.linearEqualities) {
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, lcnlp_.linearEqualities) {
NonlinearConstraint::shared_ptr constraint = boost::dynamic_pointer_cast<NonlinearConstraint>(factor);
duals.insert(constraint->dualKey(), zero(factor->dim()));
}
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, nlp_.nonlinearEqualities) {
NonlinearConstraint::shared_ptr constraint = boost::dynamic_pointer_cast<NonlinearConstraint>(factor);
duals.insert(constraint->dualKey(), zero(factor->dim()));
}
return duals;
}
/* ************************************************************************* */
std::pair<Values, VectorValues> SQPSimple::optimize(const Values& initialValues) const {
SQPSimpleState state(initialValues);
std::pair<Values, VectorValues> LCNLPSolver::optimize(const Values& initialValues) const {
LCNLPState state(initialValues);
state.duals = initializeDuals();
while (!state.converged && state.iterations < 100) {
state = iterate(state);
@ -83,19 +78,14 @@ std::pair<Values, VectorValues> SQPSimple::optimize(const Values& initialValues)
}
/* ************************************************************************* */
SQPSimpleState SQPSimple::iterate(const SQPSimpleState& state) const {
LCNLPState LCNLPSolver::iterate(const LCNLPState& state) const {
static const bool debug = false;
// construct the qp subproblem
QP qp;
qp.cost = *nlp_.cost.linearize(state.values);
GaussianFactorGraph::shared_ptr multipliedHessians = nlp_.nonlinearEqualities.multipliedHessians(state.values, state.duals);
qp.cost.push_back(*multipliedHessians);
qp.equalities.add(*nlp_.linearEqualities.linearize(state.values));
qp.equalities.add(*nlp_.nonlinearEqualities.linearize(state.values));
qp.inequalities.add(*nlp_.linearInequalities.linearize(state.values));
qp.cost = *lcnlp_.cost.linearize(state.values);
qp.equalities.add(*lcnlp_.linearEqualities.linearize(state.values));
qp.inequalities.add(*lcnlp_.linearInequalities.linearize(state.values));
if (debug)
qp.print("QP subproblem:");
@ -112,7 +102,7 @@ SQPSimpleState SQPSimple::iterate(const SQPSimpleState& state) const {
duals.print("duals = ");
// update new state
SQPSimpleState newState;
LCNLPState newState;
newState.values = state.values.retract(delta);
newState.duals = duals;
newState.converged = checkConvergence(newState, delta);

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */
/**
* @file SQPSimple.h
* @file LCNLPSolver.h
* @author Duy-Nguyen Ta
* @author Krunal Chande
* @author Luca Carlone
@ -24,46 +24,51 @@
namespace gtsam {
struct NLP {
/**
* Nonlinear Programming problem with
* only linear constraints, encoded in factor graphs
*/
struct LCNLP {
NonlinearFactorGraph cost;
NonlinearEqualityFactorGraph linearEqualities;
NonlinearEqualityFactorGraph nonlinearEqualities;
NonlinearInequalityFactorGraph linearInequalities;
};
struct SQPSimpleState {
/**
* State of LCNLPSolver before/after each iteration
*/
struct LCNLPState {
Values values;
VectorValues duals;
bool converged;
size_t iterations;
/// Default constructor
SQPSimpleState() : values(), duals(), converged(false), iterations(0) {}
LCNLPState() : values(), duals(), converged(false), iterations(0) {}
/// Constructor with an initialValues
SQPSimpleState(const Values& initialValues) :
LCNLPState(const Values& initialValues) :
values(initialValues), duals(VectorValues()), converged(false), iterations(0) {
}
};
/**
* Simple SQP optimizer to solve nonlinear constrained problems.
* This simple version won't care about nonconvexity, which needs
* more advanced techniques to solve, e.g., merit function, line search, second-order correction etc.
* Simple SQP optimizer to solve nonlinear constrained problems
* ONLY linear constraints are supported.
*/
class SQPSimple {
NLP nlp_;
class LCNLPSolver {
LCNLP lcnlp_;
static const double errorTol = 1e-5;
public:
SQPSimple(const NLP& nlp) :
nlp_(nlp) {
LCNLPSolver(const LCNLP& lcnlp) :
lcnlp_(lcnlp) {
}
/// Check if \nabla f(x) - \lambda * \nabla c(x) == 0
bool isStationary(const VectorValues& delta) const;
/// Check if c_E(x) == 0
bool isPrimalFeasible(const SQPSimpleState& state) const;
bool isPrimalFeasible(const LCNLPState& state) const;
/**
* Dual variables of inequality constraints need to be >=0
@ -81,15 +86,15 @@ public:
* If it is inactive, the dual should be 0, regardless of the error. However, we don't compute
* dual variables for inactive constraints in the QP subproblem, so we don't care.
*/
bool isComplementary(const SQPSimpleState& state) const;
bool isComplementary(const LCNLPState& state) const;
/// Check convergence
bool checkConvergence(const SQPSimpleState& state, const VectorValues& delta) const;
bool checkConvergence(const LCNLPState& state, const VectorValues& delta) const;
/**
* Single iteration of SQP
*/
SQPSimpleState iterate(const SQPSimpleState& state) const;
LCNLPState iterate(const LCNLPState& state) const;
VectorValues initializeDuals() const;
/**

View File

@ -58,17 +58,17 @@ public:
return true;
}
/**
* Additional cost for -lambda*ConstraintHessian for SQP
*/
GaussianFactorGraph::shared_ptr multipliedHessians(const Values& values, const VectorValues& duals) const {
GaussianFactorGraph::shared_ptr constrainedHessians(new GaussianFactorGraph());
BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, *this) {
NonlinearConstraint::shared_ptr constraint = boost::dynamic_pointer_cast<NonlinearConstraint>(factor);
constrainedHessians->push_back(constraint->multipliedHessian(values, duals));
}
return constrainedHessians;
}
// /**
// * Additional cost for -lambda*ConstraintHessian for SQP
// */
// GaussianFactorGraph::shared_ptr multipliedHessians(const Values& values, const VectorValues& duals) const {
// GaussianFactorGraph::shared_ptr constrainedHessians(new GaussianFactorGraph());
// BOOST_FOREACH(const NonlinearFactor::shared_ptr& factor, *this) {
// NonlinearConstraint::shared_ptr constraint = boost::dynamic_pointer_cast<NonlinearConstraint>(factor);
// constrainedHessians->push_back(constraint->multipliedHessian(values, duals));
// }
// return constrainedHessians;
// }
};

View File

@ -26,7 +26,7 @@
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam_unstable/linear/QPSolver.h>
#include <gtsam_unstable/nonlinear/SQPSimple.h>
#include <gtsam_unstable/nonlinear/LCNLPSolver.h>
#include <gtsam_unstable/nonlinear/NonlinearInequality.h>
#include <CppUnitLite/TestHarness.h>
#include <iostream>
@ -54,7 +54,7 @@ public:
}
};
TEST(testSQPSimple, QPProblem) {
TEST(testlcnlpSolver, QPProblem) {
const Key dualKey = 0;
// Simple quadratic cost: x1^2 + x2^2
@ -81,85 +81,27 @@ TEST(testSQPSimple, QPProblem) {
initialVectorValues.insert(Y(1), ones(1));
VectorValues expectedSolution = qpSolver.optimize(initialVectorValues).first;
//Instantiate NLP
NLP nlp;
//Instantiate LCNLP
LCNLP lcnlp;
Values linPoint;
linPoint.insert<Vector1>(X(1), zero(1));
linPoint.insert<Vector1>(Y(1), zero(1));
nlp.cost.add(LinearContainerFactor(hf, linPoint)); // wrap it using linearcontainerfactor
nlp.linearEqualities.add(ConstraintProblem1(X(1), Y(1), dualKey));
lcnlp.cost.add(LinearContainerFactor(hf, linPoint)); // wrap it using linearcontainerfactor
lcnlp.linearEqualities.add(ConstraintProblem1(X(1), Y(1), dualKey));
Values initialValues;
initialValues.insert(X(1), 0.0);
initialValues.insert(Y(1), 0.0);
// Instantiate SQP
SQPSimple sqpSimple(nlp);
Values actualValues = sqpSimple.optimize(initialValues).first;
// Instantiate LCNLPSolver
LCNLPSolver lcnlpSolver(lcnlp);
Values actualValues = lcnlpSolver.optimize(initialValues).first;
DOUBLES_EQUAL(expectedSolution.at(X(1))[0], actualValues.at<double>(X(1)), 1e-100);
DOUBLES_EQUAL(expectedSolution.at(Y(1))[0], actualValues.at<double>(Y(1)), 1e-100);
}
//******************************************************************************
class CircleConstraint : public NonlinearConstraint2<double, double> {
typedef NonlinearConstraint2<double, double> Base;
public:
CircleConstraint(Key xK, Key yK, Key dualKey) : Base(xK, yK, dualKey, 1) {}
Vector evaluateError(const double& x, const double& y,
boost::optional<Matrix&> H1 = boost::none, boost::optional<Matrix&> H2 =
boost::none) const {
if (H1) *H1 = (Matrix(1,1) << 2*(x-1)).finished();
if (H2) *H2 = (Matrix(1,1) << 2*y).finished();
return (Vector(1) << (x-1)*(x-1) + y*y - 0.25).finished();
}
void evaluateHessians(const double& x, const double& y,
std::vector<Matrix>& G11, std::vector<Matrix>& G12,
std::vector<Matrix>& G22) const {
G11.push_back((Matrix(1,1) << 2).finished());
G12.push_back((Matrix(1,1) << 0).finished());
G22.push_back((Matrix(1,1) << 2).finished());
}
};
// Nonlinear constraint not supported currently
TEST_DISABLED(testSQPSimple, quadraticCostNonlinearConstraint) {
const Key dualKey = 0;
//Instantiate NLP
NLP nlp;
// Simple quadratic cost: x1^2 + x2^2 +1000
// Note the Hessian encodes:
// 0.5*x1'*G11*x1 + x1'*G12*x2 + 0.5*x2'*G22*x2 - x1'*g1 - x2'*g2 + 0.5*f
// Hence here we have G11 = 2, G12 = 0, G22 = 2, g1 = 0, g2 = 0, f = 0
Values linPoint;
linPoint.insert<Vector1>(X(1), zero(1));
linPoint.insert<Vector1>(Y(1), zero(1));
HessianFactor hf(X(1), Y(1), 2.0 * ones(1,1), zero(1), zero(1),
2*ones(1,1), zero(1) , 1000);
nlp.cost.add(LinearContainerFactor(hf, linPoint)); // wrap it using linearcontainerfactor
nlp.nonlinearEqualities.add(CircleConstraint(X(1), Y(1), dualKey));
Values initialValues;
initialValues.insert<double>(X(1), 4.0);
initialValues.insert<double>(Y(1), 10.0);
Values expectedSolution;
expectedSolution.insert<double>(X(1), 0.5);
expectedSolution.insert<double>(Y(1), 0.0);
// Instantiate SQP
SQPSimple sqpSimple(nlp);
Values actualSolution = sqpSimple.optimize(initialValues).first;
CHECK(assert_equal(expectedSolution, actualSolution, 1e-10));
}
//******************************************************************************
class LineConstraintX : public NonlinearConstraint1<Pose3> {
typedef NonlinearConstraint1<Pose3> Base;
@ -171,13 +113,6 @@ public:
return pose.x();
}
void evaluateHessians(const Pose3& pose, std::vector<Matrix>& G11) const {
Matrix G11all = Z_6x6;
Vector rT1 = pose.rotation().matrix().row(0);
G11all.block<3,3>(3,0) = skewSymmetric(rT1);
G11.push_back(G11all);
}
Vector evaluateError(const Pose3& pose, boost::optional<Matrix&> H = boost::none) const {
if (H)
*H = (Matrix(1,6) << zeros(1,3), pose.rotation().matrix().row(0)).finished();
@ -185,15 +120,15 @@ public:
}
};
TEST(testSQPSimple, poseOnALine) {
TEST(testlcnlpSolver, poseOnALine) {
const Key dualKey = 0;
//Instantiate NLP
NLP nlp;
nlp.cost.add(PriorFactor<Pose3>(X(1), Pose3(Rot3::ypr(0.1, 0.2, 0.3), Point3(1, 0, 0)), noiseModel::Unit::Create(6)));
//Instantiate LCNLP
LCNLP lcnlp;
lcnlp.cost.add(PriorFactor<Pose3>(X(1), Pose3(Rot3::ypr(0.1, 0.2, 0.3), Point3(1, 0, 0)), noiseModel::Unit::Create(6)));
LineConstraintX constraint(X(1), dualKey);
nlp.nonlinearEqualities.add(constraint);
lcnlp.linearEqualities.add(constraint);
Values initialValues;
initialValues.insert(X(1), Pose3(Rot3::ypr(0.3, 0.2, 0.3), Point3(1,0,0)));
@ -201,9 +136,9 @@ TEST(testSQPSimple, poseOnALine) {
Values expectedSolution;
expectedSolution.insert(X(1), Pose3(Rot3::ypr(0.1, 0.2, 0.3), Point3()));
// Instantiate SQP
SQPSimple sqpSimple(nlp);
Values actualSolution = sqpSimple.optimize(initialValues).first;
// Instantiate LCNLPSolver
LCNLPSolver lcnlpSolver(lcnlp);
Values actualSolution = lcnlpSolver.optimize(initialValues).first;
CHECK(assert_equal(expectedSolution, actualSolution, 1e-10));
Pose3 pose(Rot3::ypr(0.1, 0.2, 0.3), Point3());
@ -226,7 +161,7 @@ public:
}
};
TEST(testSQPSimple, inequalityConstraint) {
TEST(testlcnlpSolver, inequalityConstraint) {
const Key dualKey = 0;
// Simple quadratic cost: x^2 + y^2
@ -253,27 +188,26 @@ TEST(testSQPSimple, inequalityConstraint) {
initialVectorValues.insert(Y(1), zero(1));
VectorValues expectedSolution = qpSolver.optimize(initialVectorValues).first;
//Instantiate NLP
NLP nlp;
//Instantiate LCNLP
LCNLP lcnlp;
Values linPoint;
linPoint.insert<Vector1>(X(1), zero(1));
linPoint.insert<Vector1>(Y(1), zero(1));
nlp.cost.add(LinearContainerFactor(hf, linPoint)); // wrap it using linearcontainerfactor
nlp.linearInequalities.add(InequalityProblem1(X(1), Y(1), dualKey));
lcnlp.cost.add(LinearContainerFactor(hf, linPoint)); // wrap it using linearcontainerfactor
lcnlp.linearInequalities.add(InequalityProblem1(X(1), Y(1), dualKey));
Values initialValues;
initialValues.insert(X(1), 1.0);
initialValues.insert(Y(1), -10.0);
// Instantiate SQP
SQPSimple sqpSimple(nlp);
Values actualValues = sqpSimple.optimize(initialValues).first;
// Instantiate LCNLPSolver
LCNLPSolver lcnlpSolver(lcnlp);
Values actualValues = lcnlpSolver.optimize(initialValues).first;
DOUBLES_EQUAL(expectedSolution.at(X(1))[0], actualValues.at<double>(X(1)), 1e-10);
DOUBLES_EQUAL(expectedSolution.at(Y(1))[0], actualValues.at<double>(Y(1)), 1e-10);
}
//******************************************************************************
const size_t X_AXIS = 0;
const size_t Y_AXIS = 1;
@ -319,14 +253,14 @@ public:
}
};
TEST(testSQPSimple, poseWithABoundary) {
TEST(testlcnlpSolver, poseWithABoundary) {
const Key dualKey = 0;
//Instantiate NLP
NLP nlp;
nlp.cost.add(PriorFactor<Pose3>(X(1), Pose3(Rot3::ypr(0.1, 0.2, 0.3), Point3(1, 0, 0)), noiseModel::Unit::Create(6)));
//Instantiate LCNLP
LCNLP lcnlp;
lcnlp.cost.add(PriorFactor<Pose3>(X(1), Pose3(Rot3::ypr(0.1, 0.2, 0.3), Point3(1, 0, 0)), noiseModel::Unit::Create(6)));
AxisUpperBound constraint(X(1), X_AXIS, 0, dualKey);
nlp.linearInequalities.add(constraint);
lcnlp.linearInequalities.add(constraint);
Values initialValues;
initialValues.insert(X(1), Pose3(Rot3::ypr(0.3, 0.2, 0.3), Point3(1, 0, 0)));
@ -334,23 +268,23 @@ TEST(testSQPSimple, poseWithABoundary) {
Values expectedSolution;
expectedSolution.insert(X(1), Pose3(Rot3::ypr(0.1, 0.2, 0.3), Point3(0, 0, 0)));
// Instantiate SQP
SQPSimple sqpSimple(nlp);
Values actualSolution = sqpSimple.optimize(initialValues).first;
// Instantiate LCNLPSolver
LCNLPSolver lcnlpSolver(lcnlp);
Values actualSolution = lcnlpSolver.optimize(initialValues).first;
CHECK(assert_equal(expectedSolution, actualSolution, 1e-10));
}
TEST(testSQPSimple, poseWithinA2DBox) {
TEST(testlcnlpSolver, poseWithinA2DBox) {
const Key dualKey = 0;
//Instantiate NLP
NLP nlp;
nlp.cost.add(PriorFactor<Pose3>(X(1), Pose3(Rot3::ypr(0.1, 0.2, 0.3), Point3(10, 0.5, 0)), noiseModel::Unit::Create(6)));
nlp.linearInequalities.add(AxisLowerBound(X(1), X_AXIS, -1, dualKey)); // -1 <= x
nlp.linearInequalities.add(AxisUpperBound(X(1), X_AXIS, 1, dualKey+1)); // x <= 1
nlp.linearInequalities.add(AxisLowerBound(X(1), Y_AXIS, -1, dualKey+2)); // -1 <= y
nlp.linearInequalities.add(AxisUpperBound(X(1), Y_AXIS, 1, dualKey+3));// y <= 1
//Instantiate LCNLP
LCNLP lcnlp;
lcnlp.cost.add(PriorFactor<Pose3>(X(1), Pose3(Rot3::ypr(0.1, 0.2, 0.3), Point3(10, 0.5, 0)), noiseModel::Unit::Create(6)));
lcnlp.linearInequalities.add(AxisLowerBound(X(1), X_AXIS, -1, dualKey)); // -1 <= x
lcnlp.linearInequalities.add(AxisUpperBound(X(1), X_AXIS, 1, dualKey+1)); // x <= 1
lcnlp.linearInequalities.add(AxisLowerBound(X(1), Y_AXIS, -1, dualKey+2)); // -1 <= y
lcnlp.linearInequalities.add(AxisUpperBound(X(1), Y_AXIS, 1, dualKey+3));// y <= 1
Values initialValues;
initialValues.insert(X(1), Pose3(Rot3::ypr(1, -1, 2), Point3(3, -5, 0)));
@ -358,14 +292,14 @@ TEST(testSQPSimple, poseWithinA2DBox) {
Values expectedSolution;
expectedSolution.insert(X(1), Pose3(Rot3::ypr(0.1, 0.2, 0.3), Point3(1, 0.5, 0)));
// Instantiate SQP
SQPSimple sqpSimple(nlp);
Values actualSolution = sqpSimple.optimize(initialValues).first;
// Instantiate LCNLPSolver
LCNLPSolver lcnlpSolver(lcnlp);
Values actualSolution = lcnlpSolver.optimize(initialValues).first;
CHECK(assert_equal(expectedSolution, actualSolution, 1e-10));
}
TEST_DISABLED(testSQPSimple, posesInA2DBox) {
TEST_DISABLED(testlcnlpSolver, posesInA2DBox) {
const double xLowerBound = -3.0,
xUpperBound = 5.0,
yLowerBound = -1.0,
@ -373,32 +307,32 @@ TEST_DISABLED(testSQPSimple, posesInA2DBox) {
zLowerBound = 0.0,
zUpperBound = 2.0;
//Instantiate NLP
NLP nlp;
//Instantiate LCNLP
LCNLP lcnlp;
// prior on the first pose
SharedDiagonal priorNoise = noiseModel::Diagonal::Sigmas(
(Vector(6) << 0.001, 0.001, 0.001, 0.001, 0.001, 0.001).finished());
nlp.cost.add(PriorFactor<Pose3>(X(1), Pose3(), priorNoise));
lcnlp.cost.add(PriorFactor<Pose3>(X(1), Pose3(), priorNoise));
// odometry between factor for subsequent poses
SharedDiagonal odoNoise = noiseModel::Diagonal::Sigmas(
(Vector(6) << 0.01, 0.01, 0.01, 0.1, 0.1, 0.1).finished());
Pose3 odo12(Rot3::ypr(M_PI/2.0, 0, 0), Point3(10, 0, 0));
nlp.cost.add(BetweenFactor<Pose3>(X(1), X(2), odo12, odoNoise));
lcnlp.cost.add(BetweenFactor<Pose3>(X(1), X(2), odo12, odoNoise));
Pose3 odo23(Rot3::ypr(M_PI/2.0, 0, 0), Point3(2, 0, 2));
nlp.cost.add(BetweenFactor<Pose3>(X(2), X(3), odo23, odoNoise));
lcnlp.cost.add(BetweenFactor<Pose3>(X(2), X(3), odo23, odoNoise));
// Box constraints
Key dualKey = 0;
for (size_t i=1; i<=3; ++i) {
nlp.linearInequalities.add(AxisLowerBound(X(i), X_AXIS, xLowerBound, dualKey++));
nlp.linearInequalities.add(AxisUpperBound(X(i), X_AXIS, xUpperBound, dualKey++));
nlp.linearInequalities.add(AxisLowerBound(X(i), Y_AXIS, yLowerBound, dualKey++));
nlp.linearInequalities.add(AxisUpperBound(X(i), Y_AXIS, yUpperBound, dualKey++));
nlp.linearInequalities.add(AxisLowerBound(X(i), Z_AXIS, zLowerBound, dualKey++));
nlp.linearInequalities.add(AxisUpperBound(X(i), Z_AXIS, zUpperBound, dualKey++));
lcnlp.linearInequalities.add(AxisLowerBound(X(i), X_AXIS, xLowerBound, dualKey++));
lcnlp.linearInequalities.add(AxisUpperBound(X(i), X_AXIS, xUpperBound, dualKey++));
lcnlp.linearInequalities.add(AxisLowerBound(X(i), Y_AXIS, yLowerBound, dualKey++));
lcnlp.linearInequalities.add(AxisUpperBound(X(i), Y_AXIS, yUpperBound, dualKey++));
lcnlp.linearInequalities.add(AxisLowerBound(X(i), Z_AXIS, zLowerBound, dualKey++));
lcnlp.linearInequalities.add(AxisUpperBound(X(i), Z_AXIS, zUpperBound, dualKey++));
}
Values initialValues;
@ -412,9 +346,9 @@ TEST_DISABLED(testSQPSimple, posesInA2DBox) {
expectedSolution.insert(X(2), Pose3());
expectedSolution.insert(X(3), Pose3());
// Instantiate SQP
SQPSimple sqpSimple(nlp);
Values actualSolution = sqpSimple.optimize(initialValues).first;
// Instantiate LCNLPSolver
LCNLPSolver lcnlpSolver(lcnlp);
Values actualSolution = lcnlpSolver.optimize(initialValues).first;
actualSolution.print("actual solution: ");