Updates to dogleg to work with newer Lie/Manifold/Group interface
parent
1ec7d7e86e
commit
4284f07a61
|
@ -109,7 +109,7 @@ namespace gtsam {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Pose3 retract(const Vector& d) {
|
Pose3 retract(const Vector& d) {
|
||||||
return expmap(d);
|
return retract(d);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
|
|
|
@ -159,7 +159,7 @@ typename DoglegOptimizerImpl::IterationResult DoglegOptimizerImpl::Iterate(
|
||||||
cout << "Delta = " << Delta << ", dx_d_norm = " << result.dx_d.vector().norm() << endl;
|
cout << "Delta = " << Delta << ", dx_d_norm = " << result.dx_d.vector().norm() << endl;
|
||||||
|
|
||||||
// Compute expmapped solution
|
// Compute expmapped solution
|
||||||
const VALUES x_d(x0.expmap(result.dx_d, ordering));
|
const VALUES x_d(x0.retract(result.dx_d, ordering));
|
||||||
|
|
||||||
// Compute decrease in f
|
// Compute decrease in f
|
||||||
result.f_error = f.error(x_d);
|
result.f_error = f.error(x_d);
|
||||||
|
|
|
@ -306,7 +306,7 @@ namespace gtsam {
|
||||||
DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(
|
DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(
|
||||||
parameters_->lambda_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *solver.eliminate(),
|
parameters_->lambda_, DoglegOptimizerImpl::ONE_STEP_PER_ITERATION, *solver.eliminate(),
|
||||||
*graph_, *values_, *ordering_, error_);
|
*graph_, *values_, *ordering_, error_);
|
||||||
shared_values newValues(new T(values_->expmap(result.dx_d, *ordering_)));
|
shared_values newValues(new T(values_->retract(result.dx_d, *ordering_)));
|
||||||
cout << "newValues: " << newValues.get() << endl;
|
cout << "newValues: " << newValues.get() << endl;
|
||||||
return newValuesErrorLambda_(newValues, result.f_error, result.Delta);
|
return newValuesErrorLambda_(newValues, result.f_error, result.Delta);
|
||||||
}
|
}
|
||||||
|
|
|
@ -405,7 +405,7 @@ TEST(DoglegOptimizer, Iterate) {
|
||||||
DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(Delta, DoglegOptimizerImpl::SEARCH_EACH_ITERATION, gbn, *fg, *config, *ord, fg->error(*config));
|
DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(Delta, DoglegOptimizerImpl::SEARCH_EACH_ITERATION, gbn, *fg, *config, *ord, fg->error(*config));
|
||||||
Delta = result.Delta;
|
Delta = result.Delta;
|
||||||
EXPECT(result.f_error < fg->error(*config)); // Check that error decreases
|
EXPECT(result.f_error < fg->error(*config)); // Check that error decreases
|
||||||
simulated2D::Values newConfig(config->expmap(result.dx_d, *ord));
|
simulated2D::Values newConfig(config->retract(result.dx_d, *ord));
|
||||||
(*config) = newConfig;
|
(*config) = newConfig;
|
||||||
DOUBLES_EQUAL(fg->error(*config), result.f_error, 1e-5); // Check that error is correctly filled in
|
DOUBLES_EQUAL(fg->error(*config), result.f_error, 1e-5); // Check that error is correctly filled in
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue