Merge pull request #920 from borglab/fix/minor-stuff
commit
f454bcbdf8
|
@ -36,17 +36,17 @@ namespace gtsam {
|
||||||
// no Ordering is provided. When removing optional from VariableIndex, create VariableIndex
|
// no Ordering is provided. When removing optional from VariableIndex, create VariableIndex
|
||||||
// before creating ordering.
|
// before creating ordering.
|
||||||
VariableIndex computedVariableIndex(asDerived());
|
VariableIndex computedVariableIndex(asDerived());
|
||||||
return eliminateSequential(function, computedVariableIndex, orderingType);
|
return eliminateSequential(orderingType, function, computedVariableIndex);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// Compute an ordering and call this function again. We are guaranteed to have a
|
// Compute an ordering and call this function again. We are guaranteed to have a
|
||||||
// VariableIndex already here because we computed one if needed in the previous 'if' block.
|
// VariableIndex already here because we computed one if needed in the previous 'if' block.
|
||||||
if (orderingType == Ordering::METIS) {
|
if (orderingType == Ordering::METIS) {
|
||||||
Ordering computedOrdering = Ordering::Metis(asDerived());
|
Ordering computedOrdering = Ordering::Metis(asDerived());
|
||||||
return eliminateSequential(computedOrdering, function, variableIndex, orderingType);
|
return eliminateSequential(computedOrdering, function, variableIndex);
|
||||||
} else {
|
} else {
|
||||||
Ordering computedOrdering = Ordering::Colamd(*variableIndex);
|
Ordering computedOrdering = Ordering::Colamd(*variableIndex);
|
||||||
return eliminateSequential(computedOrdering, function, variableIndex, orderingType);
|
return eliminateSequential(computedOrdering, function, variableIndex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -142,8 +142,9 @@ class GTSAM_EXPORT GncOptimizer {
|
||||||
* provides an extra interface for the user to initialize the weightst
|
* provides an extra interface for the user to initialize the weightst
|
||||||
* */
|
* */
|
||||||
void setWeights(const Vector w) {
|
void setWeights(const Vector w) {
|
||||||
if(w.size() != nfg_.size()){
|
if (size_t(w.size()) != nfg_.size()) {
|
||||||
throw std::runtime_error("GncOptimizer::setWeights: the number of specified weights"
|
throw std::runtime_error(
|
||||||
|
"GncOptimizer::setWeights: the number of specified weights"
|
||||||
" does not match the size of the factor graph.");
|
" does not match the size of the factor graph.");
|
||||||
}
|
}
|
||||||
weights_ = w;
|
weights_ = w;
|
||||||
|
|
|
@ -35,8 +35,7 @@ namespace gtsam {
|
||||||
* zero errors anyway. However, it means that below will only be exact for the correct measurement.
|
* zero errors anyway. However, it means that below will only be exact for the correct measurement.
|
||||||
*/
|
*/
|
||||||
JacobianFactor linearizeNumerically(const NoiseModelFactor& factor,
|
JacobianFactor linearizeNumerically(const NoiseModelFactor& factor,
|
||||||
const Values& values, double delta = 1e-5) {
|
const Values& values, double delta = 1e-5) {
|
||||||
|
|
||||||
// We will fill a vector of key/Jacobians pairs (a map would sort)
|
// We will fill a vector of key/Jacobians pairs (a map would sort)
|
||||||
std::vector<std::pair<Key, Matrix> > jacobians;
|
std::vector<std::pair<Key, Matrix> > jacobians;
|
||||||
|
|
||||||
|
@ -46,24 +45,24 @@ JacobianFactor linearizeNumerically(const NoiseModelFactor& factor,
|
||||||
|
|
||||||
// Loop over all variables
|
// Loop over all variables
|
||||||
const double one_over_2delta = 1.0 / (2.0 * delta);
|
const double one_over_2delta = 1.0 / (2.0 * delta);
|
||||||
for(Key key: factor) {
|
for (Key key : factor) {
|
||||||
// Compute central differences using the values struct.
|
// Compute central differences using the values struct.
|
||||||
VectorValues dX = values.zeroVectors();
|
VectorValues dX = values.zeroVectors();
|
||||||
const size_t cols = dX.dim(key);
|
const size_t cols = dX.dim(key);
|
||||||
Matrix J = Matrix::Zero(rows, cols);
|
Matrix J = Matrix::Zero(rows, cols);
|
||||||
for (size_t col = 0; col < cols; ++col) {
|
for (size_t col = 0; col < cols; ++col) {
|
||||||
Eigen::VectorXd dx = Eigen::VectorXd::Zero(cols);
|
Eigen::VectorXd dx = Eigen::VectorXd::Zero(cols);
|
||||||
dx[col] = delta;
|
dx(col) = delta;
|
||||||
dX[key] = dx;
|
dX[key] = dx;
|
||||||
Values eval_values = values.retract(dX);
|
Values eval_values = values.retract(dX);
|
||||||
const Eigen::VectorXd left = factor.whitenedError(eval_values);
|
const Eigen::VectorXd left = factor.whitenedError(eval_values);
|
||||||
dx[col] = -delta;
|
dx(col) = -delta;
|
||||||
dX[key] = dx;
|
dX[key] = dx;
|
||||||
eval_values = values.retract(dX);
|
eval_values = values.retract(dX);
|
||||||
const Eigen::VectorXd right = factor.whitenedError(eval_values);
|
const Eigen::VectorXd right = factor.whitenedError(eval_values);
|
||||||
J.col(col) = (left - right) * one_over_2delta;
|
J.col(col) = (left - right) * one_over_2delta;
|
||||||
}
|
}
|
||||||
jacobians.push_back(std::make_pair(key,J));
|
jacobians.push_back(std::make_pair(key, J));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Next step...return JacobianFactor
|
// Next step...return JacobianFactor
|
||||||
|
|
|
@ -115,6 +115,10 @@ class Ordering {
|
||||||
Ordering();
|
Ordering();
|
||||||
Ordering(const gtsam::Ordering& other);
|
Ordering(const gtsam::Ordering& other);
|
||||||
|
|
||||||
|
template <FACTOR_GRAPH = {gtsam::NonlinearFactorGraph,
|
||||||
|
gtsam::GaussianFactorGraph}>
|
||||||
|
static gtsam::Ordering Colamd(const FACTOR_GRAPH& graph);
|
||||||
|
|
||||||
// Testable
|
// Testable
|
||||||
void print(string s = "", const gtsam::KeyFormatter& keyFormatter =
|
void print(string s = "", const gtsam::KeyFormatter& keyFormatter =
|
||||||
gtsam::DefaultKeyFormatter) const;
|
gtsam::DefaultKeyFormatter) const;
|
||||||
|
|
|
@ -7,73 +7,79 @@
|
||||||
See LICENSE for the license information
|
See LICENSE for the license information
|
||||||
|
|
||||||
Solve a structure-from-motion problem from a "Bundle Adjustment in the Large" file
|
Solve a structure-from-motion problem from a "Bundle Adjustment in the Large" file
|
||||||
Author: Frank Dellaert (Python: Akshay Krishnan, John Lambert)
|
Author: Frank Dellaert (Python: Akshay Krishnan, John Lambert, Varun Agrawal)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import gtsam
|
import gtsam
|
||||||
from gtsam import (
|
from gtsam import (GeneralSFMFactorCal3Bundler,
|
||||||
GeneralSFMFactorCal3Bundler,
|
PriorFactorPinholeCameraCal3Bundler, PriorFactorPoint3)
|
||||||
PinholeCameraCal3Bundler,
|
from gtsam.symbol_shorthand import C, P # type: ignore
|
||||||
PriorFactorPinholeCameraCal3Bundler,
|
from gtsam.utils import plot # type: ignore
|
||||||
readBal,
|
from matplotlib import pyplot as plt
|
||||||
symbol_shorthand
|
|
||||||
)
|
|
||||||
|
|
||||||
C = symbol_shorthand.C
|
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||||
P = symbol_shorthand.P
|
|
||||||
|
DEFAULT_BAL_DATASET = "dubrovnik-3-7-pre"
|
||||||
|
|
||||||
|
|
||||||
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
|
def plot_scene(scene_data: gtsam.SfmData, result: gtsam.Values) -> None:
|
||||||
|
"""Plot the SFM results."""
|
||||||
|
plot_vals = gtsam.Values()
|
||||||
|
for cam_idx in range(scene_data.number_cameras()):
|
||||||
|
plot_vals.insert(C(cam_idx),
|
||||||
|
result.atPinholeCameraCal3Bundler(C(cam_idx)).pose())
|
||||||
|
for j in range(scene_data.number_tracks()):
|
||||||
|
plot_vals.insert(P(j), result.atPoint3(P(j)))
|
||||||
|
|
||||||
def run(args):
|
plot.plot_3d_points(0, plot_vals, linespec="g.")
|
||||||
|
plot.plot_trajectory(0, plot_vals, title="SFM results")
|
||||||
|
|
||||||
|
plt.show()
|
||||||
|
|
||||||
|
|
||||||
|
def run(args: argparse.Namespace) -> None:
|
||||||
""" Run LM optimization with BAL input data and report resulting error """
|
""" Run LM optimization with BAL input data and report resulting error """
|
||||||
input_file = gtsam.findExampleDataFile(args.input_file)
|
input_file = args.input_file
|
||||||
|
|
||||||
# Load the SfM data from file
|
# Load the SfM data from file
|
||||||
scene_data = readBal(input_file)
|
scene_data = gtsam.readBal(input_file)
|
||||||
logging.info(f"read {scene_data.number_tracks()} tracks on {scene_data.number_cameras()} cameras\n")
|
logging.info("read %d tracks on %d cameras\n", scene_data.number_tracks(),
|
||||||
|
scene_data.number_cameras())
|
||||||
|
|
||||||
# Create a factor graph
|
# Create a factor graph
|
||||||
graph = gtsam.NonlinearFactorGraph()
|
graph = gtsam.NonlinearFactorGraph()
|
||||||
|
|
||||||
# We share *one* noiseModel between all projection factors
|
# We share *one* noiseModel between all projection factors
|
||||||
noise = gtsam.noiseModel.Isotropic.Sigma(2, 1.0) # one pixel in u and v
|
noise = gtsam.noiseModel.Isotropic.Sigma(2, 1.0) # one pixel in u and v
|
||||||
|
|
||||||
# Add measurements to the factor graph
|
# Add measurements to the factor graph
|
||||||
j = 0
|
for j in range(scene_data.number_tracks()):
|
||||||
for t_idx in range(scene_data.number_tracks()):
|
track = scene_data.track(j) # SfmTrack
|
||||||
track = scene_data.track(t_idx) # SfmTrack
|
|
||||||
# retrieve the SfmMeasurement objects
|
# retrieve the SfmMeasurement objects
|
||||||
for m_idx in range(track.number_measurements()):
|
for m_idx in range(track.number_measurements()):
|
||||||
# i represents the camera index, and uv is the 2d measurement
|
# i represents the camera index, and uv is the 2d measurement
|
||||||
i, uv = track.measurement(m_idx)
|
i, uv = track.measurement(m_idx)
|
||||||
# note use of shorthand symbols C and P
|
# note use of shorthand symbols C and P
|
||||||
graph.add(GeneralSFMFactorCal3Bundler(uv, noise, C(i), P(j)))
|
graph.add(GeneralSFMFactorCal3Bundler(uv, noise, C(i), P(j)))
|
||||||
j += 1
|
|
||||||
|
|
||||||
# Add a prior on pose x1. This indirectly specifies where the origin is.
|
# Add a prior on pose x1. This indirectly specifies where the origin is.
|
||||||
graph.push_back(
|
graph.push_back(
|
||||||
gtsam.PriorFactorPinholeCameraCal3Bundler(
|
PriorFactorPinholeCameraCal3Bundler(
|
||||||
C(0), scene_data.camera(0), gtsam.noiseModel.Isotropic.Sigma(9, 0.1)
|
C(0), scene_data.camera(0),
|
||||||
)
|
gtsam.noiseModel.Isotropic.Sigma(9, 0.1)))
|
||||||
)
|
|
||||||
# Also add a prior on the position of the first landmark to fix the scale
|
# Also add a prior on the position of the first landmark to fix the scale
|
||||||
graph.push_back(
|
graph.push_back(
|
||||||
gtsam.PriorFactorPoint3(
|
PriorFactorPoint3(P(0),
|
||||||
P(0), scene_data.track(0).point3(), gtsam.noiseModel.Isotropic.Sigma(3, 0.1)
|
scene_data.track(0).point3(),
|
||||||
)
|
gtsam.noiseModel.Isotropic.Sigma(3, 0.1)))
|
||||||
)
|
|
||||||
|
|
||||||
# Create initial estimate
|
# Create initial estimate
|
||||||
initial = gtsam.Values()
|
initial = gtsam.Values()
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
# add each PinholeCameraCal3Bundler
|
# add each PinholeCameraCal3Bundler
|
||||||
for cam_idx in range(scene_data.number_cameras()):
|
for cam_idx in range(scene_data.number_cameras()):
|
||||||
|
@ -81,12 +87,10 @@ def run(args):
|
||||||
initial.insert(C(i), camera)
|
initial.insert(C(i), camera)
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
j = 0
|
|
||||||
# add each SfmTrack
|
# add each SfmTrack
|
||||||
for t_idx in range(scene_data.number_tracks()):
|
for j in range(scene_data.number_tracks()):
|
||||||
track = scene_data.track(t_idx)
|
track = scene_data.track(j)
|
||||||
initial.insert(P(j), track.point3())
|
initial.insert(P(j), track.point3())
|
||||||
j += 1
|
|
||||||
|
|
||||||
# Optimize the graph and print results
|
# Optimize the graph and print results
|
||||||
try:
|
try:
|
||||||
|
@ -94,25 +98,31 @@ def run(args):
|
||||||
params.setVerbosityLM("ERROR")
|
params.setVerbosityLM("ERROR")
|
||||||
lm = gtsam.LevenbergMarquardtOptimizer(graph, initial, params)
|
lm = gtsam.LevenbergMarquardtOptimizer(graph, initial, params)
|
||||||
result = lm.optimize()
|
result = lm.optimize()
|
||||||
except Exception as e:
|
except RuntimeError:
|
||||||
logging.exception("LM Optimization failed")
|
logging.exception("LM Optimization failed")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Error drops from ~2764.22 to ~0.046
|
# Error drops from ~2764.22 to ~0.046
|
||||||
logging.info(f"final error: {graph.error(result)}")
|
logging.info("initial error: %f", graph.error(initial))
|
||||||
|
logging.info("final error: %f", graph.error(result))
|
||||||
|
|
||||||
|
plot_scene(scene_data, result)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Main runner."""
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('-i',
|
||||||
|
'--input_file',
|
||||||
|
type=str,
|
||||||
|
default=gtsam.findExampleDataFile(DEFAULT_BAL_DATASET),
|
||||||
|
help="""Read SFM data from the specified BAL file.
|
||||||
|
The data format is described here: https://grail.cs.washington.edu/projects/bal/.
|
||||||
|
BAL files contain (nrPoses, nrPoints, nrObservations), followed by (i,j,u,v) tuples,
|
||||||
|
then (wx,wy,wz,tx,ty,tz,f,k1,k1) as Bundler camera calibrations w/ Rodrigues vector
|
||||||
|
and (x,y,z) 3d point initializations.""")
|
||||||
|
run(parser.parse_args())
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser()
|
main()
|
||||||
parser.add_argument(
|
|
||||||
'-i',
|
|
||||||
'--input_file',
|
|
||||||
type=str,
|
|
||||||
default="dubrovnik-3-7-pre",
|
|
||||||
help='Read SFM data from the specified BAL file'
|
|
||||||
'The data format is described here: https://grail.cs.washington.edu/projects/bal/.'
|
|
||||||
'BAL files contain (nrPoses, nrPoints, nrObservations), followed by (i,j,u,v) tuples, '
|
|
||||||
'then (wx,wy,wz,tx,ty,tz,f,k1,k1) as Bundler camera calibrations w/ Rodrigues vector'
|
|
||||||
'and (x,y,z) 3d point initializations.'
|
|
||||||
)
|
|
||||||
run(parser.parse_args())
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue