SQP now works with single configs using the TupleConfigs, without needing a separate optimizer.

release/4.3a0
Alex Cunningham 2010-02-06 05:14:52 +00:00
parent d8a8575d26
commit 219dfd262d
4 changed files with 816 additions and 873 deletions

View File

@ -49,7 +49,7 @@ namespace gtsam {
virtual ~LieConfig() {}
/** print */
void print(const std::string &s) const;
void print(const std::string &s="") const;
/** Test whether configs are identical in keys and values */
bool equals(const LieConfig& expected, double tol=1e-9) const;

View File

@ -197,6 +197,8 @@ bool NonlinearConstraint2<Config, Key1, X1, Key2, X2>::equals(const Factor<Confi
template<class Config, class Key1, class X1, class Key2, class X2>
GaussianFactor::shared_ptr
NonlinearConstraint2<Config, Key1, X1, Key2, X2>::linearize(const Config& config) const {
const size_t p = this->p_;
// extract lagrange multiplier
Vector lambda = config[this->lagrange_key_];
@ -207,17 +209,40 @@ NonlinearConstraint2<Config, Key1, X1, Key2, X2>::linearize(const Config& config
Matrix grad1 = G1_(config);
Matrix grad2 = G2_(config);
// construct probabilistic factor
Matrix A1 = vector_scale(lambda, grad1);
Matrix A2 = vector_scale(lambda, grad2);
SharedDiagonal probModel = sharedSigma(this->p_,1.0);
GaussianFactor::shared_ptr factor(new GaussianFactor(key1_, A1, key2_, A2,
this->lagrange_key_, eye(this->p_), zero(this->p_), probModel));
// create matrices
Matrix Ax1 = zeros(grad1.size1()*2, grad1.size2()),
Ax2 = zeros(grad2.size1()*2, grad2.size2()),
AL = eye(p*2, p);
// construct the constraint
SharedDiagonal constraintModel = noiseModel::Constrained::All(this->p_);
GaussianFactor::shared_ptr constraint(new GaussianFactor(key1_, grad1,
key2_, grad2, -1.0 * g, constraintModel));
// insert matrix components
insertSub(Ax1, vector_scale(lambda, grad1), 0, 0);
insertSub(Ax1, grad1, grad1.size1(), 0);
insertSub(Ax2, vector_scale(lambda, grad2), 0, 0);
insertSub(Ax2, grad2, grad2.size1(), 0);
Vector rhs = zero(p*2);
subInsert(rhs, -1*g, p);
// construct a mixed constraint model
Vector sigmas = zero(p*2);
subInsert(sigmas, ones(p), 0);
SharedDiagonal mixedConstraint = noiseModel::Constrained::MixedSigmas(sigmas);
GaussianFactor::shared_ptr factor(new
GaussianFactor(key1_, Ax1, key2_, Ax2, this->lagrange_key_, AL, rhs, mixedConstraint));
// // construct probabilistic factor
// Matrix A1 = vector_scale(lambda, grad1);
// Matrix A2 = vector_scale(lambda, grad2);
// SharedDiagonal probModel = sharedSigma(this->p_,1.0);
// GaussianFactor::shared_ptr factor(new GaussianFactor(key1_, A1, key2_, A2,
// this->lagrange_key_, eye(this->p_), zero(this->p_), probModel));
//
// // construct the constraint
// SharedDiagonal constraintModel = noiseModel::Constrained::All(this->p_);
// GaussianFactor::shared_ptr constraint(new GaussianFactor(key1_, grad1,
// key2_, grad2, -1.0 * g, constraintModel));
return factor;
}

View File

@ -186,24 +186,15 @@ TEST( NonlinearConstraint2, binary_scalar_linearize ) {
// linearize the system
GaussianFactor::shared_ptr actualFactor = c1.linearize(realconfig);
// verify
// verify - probabilistic component goes on top
Matrix Ax0 = Matrix_(2,1, 6.0, 2.0),
Ax1 = Matrix_(2,1,-3.0,-1.0),
AL = Matrix_(2,1, 1.0, 0.0);
Vector rhs = Vector_(2, 0, 6.0),
Vector rhs = Vector_(2, 0.0, 6.0),
sigmas = Vector_(2, 1.0, 0.0);
SharedDiagonal expModel = noiseModel::Constrained::MixedSigmas(sigmas);
// SharedDiagonal probModel = sharedSigma(p,1.0);
// GaussianFactor expectedFactor(x0, Matrix_(1,1, 6.0),
// x1, Matrix_(1,1, -3.0),
// L1, eye(1), zero(1), probModel);
// SharedDiagonal constraintModel = noiseModel::Constrained::All(p);
// GaussianFactor expectedConstraint(x0, Matrix_(1,1, 2.0),
// x1, Matrix_(1,1, -1.0),
// Vector_(1, 6.0), constraintModel);
// CHECK(assert_equal(*actualFactor, expectedFactor));
// CHECK(assert_equal(*actualConstraint, expectedConstraint));
GaussianFactor expFactor(x0,Ax0, x1, Ax1,L1, AL, rhs, expModel);
CHECK(assert_equal(expFactor, *actualFactor));
}
/* ************************************************************************* */

File diff suppressed because it is too large Load Diff