added flag to enable optimality certification, some formatting
parent
553f569038
commit
de5adb495f
|
|
@ -54,7 +54,8 @@ ShonanAveragingParameters<d>::ShonanAveragingParameters(
|
|||
alpha(alpha),
|
||||
beta(beta),
|
||||
gamma(gamma),
|
||||
useHuber(false) {
|
||||
useHuber(false),
|
||||
certifyOptimality(true) {
|
||||
// By default, we will do conjugate gradient
|
||||
lm.linearSolverType = LevenbergMarquardtParams::Iterative;
|
||||
|
||||
|
|
@ -343,13 +344,15 @@ static double Kappa(const BinaryMeasurement<T> &measurement,
|
|||
} else {
|
||||
const auto &robust = boost::dynamic_pointer_cast<noiseModel::Robust>(
|
||||
measurement.noiseModel());
|
||||
// Check if noise model is robust
|
||||
if (robust) {
|
||||
if (parameters.getUseHuber()) {
|
||||
// Cannot verify optimality, setting arbitrary value
|
||||
sigma = 1;
|
||||
} else {
|
||||
// If robust, check if optimality certificate is expected
|
||||
if (parameters.getCertifyOptimality()) {
|
||||
throw std::invalid_argument(
|
||||
"Robust cost function is invalid unless useHuber is set.");
|
||||
"Verification of optimality does not work with robust cost.");
|
||||
} else {
|
||||
// Optimality certificate not required, so setting default sigma
|
||||
sigma = 1;
|
||||
}
|
||||
} else {
|
||||
throw std::invalid_argument(
|
||||
|
|
@ -807,30 +810,30 @@ std::pair<Values, double> ShonanAveraging<d>::run(const Values &initialEstimate,
|
|||
for (size_t p = pMin; p <= pMax; p++) {
|
||||
// Optimize until convergence at this level
|
||||
Qstar = tryOptimizingAt(p, initialSOp);
|
||||
if (parameters_
|
||||
.useHuber) { // in this case, there is no optimality verification
|
||||
if (parameters_.getUseHuber() || parameters_.getCertifyOptimality()) {
|
||||
// in this case, there is no optimality verification
|
||||
if (pMin != pMax) {
|
||||
throw std::runtime_error(
|
||||
"When using robust norm, Shonan only tests a single rank. Set pMin = pMax");
|
||||
}
|
||||
const Values SO3Values = roundSolution(Qstar);
|
||||
return std::make_pair(SO3Values, 0);
|
||||
}
|
||||
} else {
|
||||
// Check certificate of global optimality
|
||||
Vector minEigenVector;
|
||||
double minEigenValue = computeMinEigenValue(Qstar, &minEigenVector);
|
||||
if (minEigenValue > parameters_.optimalityThreshold) {
|
||||
// If at global optimum, round and return solution
|
||||
const Values SO3Values = roundSolution(Qstar);
|
||||
return std::make_pair(SO3Values, minEigenValue);
|
||||
}
|
||||
|
||||
// Check certificate of global optimality
|
||||
Vector minEigenVector;
|
||||
double minEigenValue = computeMinEigenValue(Qstar, &minEigenVector);
|
||||
if (minEigenValue > parameters_.optimalityThreshold) {
|
||||
// If at global optimum, round and return solution
|
||||
const Values SO3Values = roundSolution(Qstar);
|
||||
return std::make_pair(SO3Values, minEigenValue);
|
||||
}
|
||||
|
||||
// Not at global optimimum yet, so check whether we will go to next level
|
||||
if (p != pMax) {
|
||||
// Calculate initial estimate for next level by following minEigenVector
|
||||
initialSOp =
|
||||
initializeWithDescent(p + 1, Qstar, minEigenVector, minEigenValue);
|
||||
// Not at global optimimum yet, so check whether we will go to next level
|
||||
if (p != pMax) {
|
||||
// Calculate initial estimate for next level by following minEigenVector
|
||||
initialSOp =
|
||||
initializeWithDescent(p + 1, Qstar, minEigenVector, minEigenValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
throw std::runtime_error("Shonan::run did not converge for given pMax");
|
||||
|
|
|
|||
|
|
@ -47,14 +47,16 @@ struct GTSAM_EXPORT ShonanAveragingParameters {
|
|||
using Anchor = std::pair<size_t, Rot>;
|
||||
|
||||
// Paremeters themselves:
|
||||
LevenbergMarquardtParams lm; // LM parameters
|
||||
double optimalityThreshold; // threshold used in checkOptimality
|
||||
Anchor anchor; // pose to use as anchor if not Karcher
|
||||
double alpha; // weight of anchor-based prior (default 0)
|
||||
double beta; // weight of Karcher-based prior (default 1)
|
||||
double gamma; // weight of gauge-fixing factors (default 0)
|
||||
bool useHuber; // if enabled, the Huber loss is used in the optimization
|
||||
// (default is false)
|
||||
LevenbergMarquardtParams lm; ///< LM parameters
|
||||
double optimalityThreshold; ///< threshold used in checkOptimality
|
||||
Anchor anchor; ///< pose to use as anchor if not Karcher
|
||||
double alpha; ///< weight of anchor-based prior (default 0)
|
||||
double beta; ///< weight of Karcher-based prior (default 1)
|
||||
double gamma; ///< weight of gauge-fixing factors (default 0)
|
||||
///< if enabled, the Huber loss is used (default false)
|
||||
bool useHuber;
|
||||
///< if enabled solution optimality is certified (default true)
|
||||
bool certifyOptimality;
|
||||
|
||||
ShonanAveragingParameters(const LevenbergMarquardtParams &lm =
|
||||
LevenbergMarquardtParams::CeresDefaults(),
|
||||
|
|
@ -83,6 +85,9 @@ struct GTSAM_EXPORT ShonanAveragingParameters {
|
|||
void setUseHuber(bool value) { useHuber = value; }
|
||||
bool getUseHuber() const { return useHuber; }
|
||||
|
||||
void setCertifyOptimality(bool value) { certifyOptimality = value; }
|
||||
bool getCertifyOptimality() const { return certifyOptimality; }
|
||||
|
||||
/// Print the parameters and flags used for rotation averaging.
|
||||
void print() const {
|
||||
std::cout << " ShonanAveragingParameters: " << std::endl;
|
||||
|
|
@ -166,7 +171,7 @@ class GTSAM_EXPORT ShonanAveraging {
|
|||
|
||||
/**
|
||||
* Update factors to use robust Huber loss.
|
||||
*
|
||||
*
|
||||
* @param measurements Vector of BinaryMeasurements.
|
||||
* @param k Huber noise model threshold.
|
||||
*/
|
||||
|
|
@ -174,7 +179,6 @@ class GTSAM_EXPORT ShonanAveraging {
|
|||
double k = 1.345) const {
|
||||
Measurements robustMeasurements;
|
||||
for (auto &measurement : measurements) {
|
||||
|
||||
auto model = measurement.noiseModel();
|
||||
const auto &robust =
|
||||
boost::dynamic_pointer_cast<noiseModel::Robust>(model);
|
||||
|
|
@ -189,7 +193,7 @@ class GTSAM_EXPORT ShonanAveraging {
|
|||
noiseModel::mEstimator::Huber::Create(k), model);
|
||||
}
|
||||
BinaryMeasurement<Rot> meas(measurement.key1(), measurement.key2(),
|
||||
measurement.measured(), robust_model);
|
||||
measurement.measured(), robust_model);
|
||||
robustMeasurements.push_back(meas);
|
||||
}
|
||||
return robustMeasurements;
|
||||
|
|
|
|||
Loading…
Reference in New Issue