Merge remote-tracking branch 'origin/develop' into feature/sam_sfm_directories
commit
da813d8171
26
.cproject
26
.cproject
|
@ -1309,6 +1309,7 @@
|
|||
</target>
|
||||
<target name="testSimulated2DOriented.run" path="slam" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testSimulated2DOriented.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -1348,6 +1349,7 @@
|
|||
</target>
|
||||
<target name="testSimulated2D.run" path="slam" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testSimulated2D.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -1355,6 +1357,7 @@
|
|||
</target>
|
||||
<target name="testSimulated3D.run" path="slam" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testSimulated3D.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -1458,7 +1461,6 @@
|
|||
</target>
|
||||
<target name="testErrors.run" path="linear" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testErrors.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -1769,7 +1771,6 @@
|
|||
</target>
|
||||
<target name="Generate DEB Package" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>cpack</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>-G DEB</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -1777,7 +1778,6 @@
|
|||
</target>
|
||||
<target name="Generate RPM Package" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>cpack</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>-G RPM</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -1785,7 +1785,6 @@
|
|||
</target>
|
||||
<target name="Generate TGZ Package" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>cpack</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>-G TGZ</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -1793,7 +1792,6 @@
|
|||
</target>
|
||||
<target name="Generate TGZ Source Package" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>cpack</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>--config CPackSourceConfig.cmake</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -1985,6 +1983,7 @@
|
|||
</target>
|
||||
<target name="tests/testGaussianISAM2" path="build/slam" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>tests/testGaussianISAM2</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -2120,7 +2119,6 @@
|
|||
</target>
|
||||
<target name="tests/testBayesTree.run" path="inference" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>tests/testBayesTree.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -2128,7 +2126,6 @@
|
|||
</target>
|
||||
<target name="testBinaryBayesNet.run" path="inference" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testBinaryBayesNet.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -2176,7 +2173,6 @@
|
|||
</target>
|
||||
<target name="testSymbolicBayesNet.run" path="inference" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testSymbolicBayesNet.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -2184,7 +2180,6 @@
|
|||
</target>
|
||||
<target name="tests/testSymbolicFactor.run" path="inference" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>tests/testSymbolicFactor.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -2192,7 +2187,6 @@
|
|||
</target>
|
||||
<target name="testSymbolicFactorGraph.run" path="inference" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testSymbolicFactorGraph.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -2208,7 +2202,6 @@
|
|||
</target>
|
||||
<target name="tests/testBayesTree" path="inference" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>tests/testBayesTree</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -2902,6 +2895,14 @@
|
|||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<runAllBuilders>true</runAllBuilders>
|
||||
</target>
|
||||
<target name="timeSchurFactors.run" path="build/timing" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments>-j4</buildArguments>
|
||||
<buildTarget>timeSchurFactors.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<runAllBuilders>true</runAllBuilders>
|
||||
</target>
|
||||
<target name="testBTree.run" path="build/gtsam_unstable/base/tests" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments>-j5</buildArguments>
|
||||
|
@ -3344,7 +3345,6 @@
|
|||
</target>
|
||||
<target name="testGraph.run" path="build/tests" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testGraph.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -3352,7 +3352,6 @@
|
|||
</target>
|
||||
<target name="testJunctionTree.run" path="build/tests" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testJunctionTree.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
@ -3360,7 +3359,6 @@
|
|||
</target>
|
||||
<target name="testSymbolicBayesNetB.run" path="build/tests" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>make</buildCommand>
|
||||
<buildArguments/>
|
||||
<buildTarget>testSymbolicBayesNetB.run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
|
|
|
@ -158,6 +158,12 @@ else()
|
|||
set(GTSAM_USE_TBB 0) # This will go into config.h
|
||||
endif()
|
||||
|
||||
###############################################################################
|
||||
# Prohibit Timing build mode in combination with TBB
|
||||
if(GTSAM_USE_TBB AND (CMAKE_BUILD_TYPE STREQUAL "Timing"))
|
||||
message(FATAL_ERROR "Timing build mode cannot be used together with TBB. Use a sampling profiler such as Instruments or Intel VTune Amplifier instead.")
|
||||
endif()
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Find Google perftools
|
||||
|
@ -173,6 +179,11 @@ if(MKL_FOUND AND GTSAM_WITH_EIGEN_MKL)
|
|||
set(EIGEN_USE_MKL_ALL 1) # This will go into config.h - it makes Eigen use MKL
|
||||
include_directories(${MKL_INCLUDE_DIR})
|
||||
list(APPEND GTSAM_ADDITIONAL_LIBRARIES ${MKL_LIBRARIES})
|
||||
|
||||
# --no-as-needed is required with gcc according to the MKL link advisor
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-as-needed")
|
||||
endif()
|
||||
else()
|
||||
set(GTSAM_USE_EIGEN_MKL 0)
|
||||
set(EIGEN_USE_MKL_ALL 0)
|
||||
|
|
|
@ -195,7 +195,9 @@ macro(gtsamAddTestsGlob_impl groupName globPatterns excludedFiles linkLibraries)
|
|||
add_test(NAME ${target_name} COMMAND ${target_name})
|
||||
add_dependencies(check.${groupName} ${target_name})
|
||||
add_dependencies(check ${target_name})
|
||||
add_dependencies(all.tests ${script_name})
|
||||
if(NOT XCODE_VERSION)
|
||||
add_dependencies(all.tests ${script_name})
|
||||
endif()
|
||||
|
||||
# Add TOPSRCDIR
|
||||
set_property(SOURCE ${script_srcs} APPEND PROPERTY COMPILE_DEFINITIONS "TOPSRCDIR=\"${PROJECT_SOURCE_DIR}\"")
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
*.txt
|
|
@ -97,7 +97,7 @@ int main(int argc, char* argv[]) {
|
|||
// Intentionally initialize the variables off from the ground truth
|
||||
Values initialEstimate;
|
||||
for (size_t i = 0; i < poses.size(); ++i)
|
||||
initialEstimate.insert(Symbol('x', i), poses[i].compose(Pose3(Rot3::rodriguez(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20))));
|
||||
initialEstimate.insert(Symbol('x', i), poses[i].compose(Pose3(Rot3::Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20))));
|
||||
for (size_t j = 0; j < points.size(); ++j)
|
||||
initialEstimate.insert(Symbol('l', j), points[j].compose(Point3(-0.25, 0.20, 0.15)));
|
||||
initialEstimate.print("Initial Estimates:\n");
|
||||
|
|
|
@ -84,7 +84,7 @@ int main(int argc, char* argv[]) {
|
|||
|
||||
// Create perturbed initial
|
||||
Values initial;
|
||||
Pose3 delta(Rot3::rodriguez(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20));
|
||||
Pose3 delta(Rot3::Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20));
|
||||
for (size_t i = 0; i < poses.size(); ++i)
|
||||
initial.insert(Symbol('x', i), poses[i].compose(delta));
|
||||
for (size_t j = 0; j < points.size(); ++j)
|
||||
|
|
|
@ -34,6 +34,9 @@ using namespace gtsam;
|
|||
// Make the typename short so it looks much cleaner
|
||||
typedef SmartProjectionPoseFactor<Cal3_S2> SmartFactor;
|
||||
|
||||
// create a typedef to the camera type
|
||||
typedef PinholePose<Cal3_S2> Camera;
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main(int argc, char* argv[]) {
|
||||
|
||||
|
@ -55,12 +58,12 @@ int main(int argc, char* argv[]) {
|
|||
for (size_t j = 0; j < points.size(); ++j) {
|
||||
|
||||
// every landmark represent a single landmark, we use shared pointer to init the factor, and then insert measurements.
|
||||
SmartFactor::shared_ptr smartfactor(new SmartFactor());
|
||||
SmartFactor::shared_ptr smartfactor(new SmartFactor(K));
|
||||
|
||||
for (size_t i = 0; i < poses.size(); ++i) {
|
||||
|
||||
// generate the 2D measurement
|
||||
SimpleCamera camera(poses[i], *K);
|
||||
Camera camera(poses[i], K);
|
||||
Point2 measurement = camera.project(points[j]);
|
||||
|
||||
// call add() function to add measurement into a single factor, here we need to add:
|
||||
|
@ -68,7 +71,7 @@ int main(int argc, char* argv[]) {
|
|||
// 2. the corresponding camera's key
|
||||
// 3. camera noise model
|
||||
// 4. camera calibration
|
||||
smartfactor->add(measurement, i, measurementNoise, K);
|
||||
smartfactor->add(measurement, i, measurementNoise);
|
||||
}
|
||||
|
||||
// insert the smart factor in the graph
|
||||
|
@ -77,24 +80,24 @@ int main(int argc, char* argv[]) {
|
|||
|
||||
// Add a prior on pose x0. This indirectly specifies where the origin is.
|
||||
// 30cm std on x,y,z 0.1 rad on roll,pitch,yaw
|
||||
noiseModel::Diagonal::shared_ptr poseNoise = noiseModel::Diagonal::Sigmas(
|
||||
noiseModel::Diagonal::shared_ptr noise = noiseModel::Diagonal::Sigmas(
|
||||
(Vector(6) << Vector3::Constant(0.3), Vector3::Constant(0.1)).finished());
|
||||
graph.push_back(PriorFactor<Pose3>(0, poses[0], poseNoise));
|
||||
graph.push_back(PriorFactor<Camera>(0, Camera(poses[0],K), noise));
|
||||
|
||||
// Because the structure-from-motion problem has a scale ambiguity, the problem is
|
||||
// still under-constrained. Here we add a prior on the second pose x1, so this will
|
||||
// fix the scale by indicating the distance between x0 and x1.
|
||||
// Because these two are fixed, the rest of the poses will be also be fixed.
|
||||
graph.push_back(PriorFactor<Pose3>(1, poses[1], poseNoise)); // add directly to graph
|
||||
graph.push_back(PriorFactor<Camera>(1, Camera(poses[1],K), noise)); // add directly to graph
|
||||
|
||||
graph.print("Factor Graph:\n");
|
||||
|
||||
// Create the initial estimate to the solution
|
||||
// Intentionally initialize the variables off from the ground truth
|
||||
Values initialEstimate;
|
||||
Pose3 delta(Rot3::rodriguez(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20));
|
||||
Pose3 delta(Rot3::Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20));
|
||||
for (size_t i = 0; i < poses.size(); ++i)
|
||||
initialEstimate.insert(i, poses[i].compose(delta));
|
||||
initialEstimate.insert(i, Camera(poses[i].compose(delta), K));
|
||||
initialEstimate.print("Initial Estimates:\n");
|
||||
|
||||
// Optimize the graph and print results
|
||||
|
|
|
@ -30,6 +30,9 @@ using namespace gtsam;
|
|||
// Make the typename short so it looks much cleaner
|
||||
typedef SmartProjectionPoseFactor<Cal3_S2> SmartFactor;
|
||||
|
||||
// create a typedef to the camera type
|
||||
typedef PinholePose<Cal3_S2> Camera;
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main(int argc, char* argv[]) {
|
||||
|
||||
|
@ -51,16 +54,16 @@ int main(int argc, char* argv[]) {
|
|||
for (size_t j = 0; j < points.size(); ++j) {
|
||||
|
||||
// every landmark represent a single landmark, we use shared pointer to init the factor, and then insert measurements.
|
||||
SmartFactor::shared_ptr smartfactor(new SmartFactor());
|
||||
SmartFactor::shared_ptr smartfactor(new SmartFactor(K));
|
||||
|
||||
for (size_t i = 0; i < poses.size(); ++i) {
|
||||
|
||||
// generate the 2D measurement
|
||||
SimpleCamera camera(poses[i], *K);
|
||||
Camera camera(poses[i], K);
|
||||
Point2 measurement = camera.project(points[j]);
|
||||
|
||||
// call add() function to add measurement into a single factor, here we need to add:
|
||||
smartfactor->add(measurement, i, measurementNoise, K);
|
||||
smartfactor->add(measurement, i, measurementNoise);
|
||||
}
|
||||
|
||||
// insert the smart factor in the graph
|
||||
|
@ -69,18 +72,18 @@ int main(int argc, char* argv[]) {
|
|||
|
||||
// Add a prior on pose x0. This indirectly specifies where the origin is.
|
||||
// 30cm std on x,y,z 0.1 rad on roll,pitch,yaw
|
||||
noiseModel::Diagonal::shared_ptr poseNoise = noiseModel::Diagonal::Sigmas(
|
||||
noiseModel::Diagonal::shared_ptr noise = noiseModel::Diagonal::Sigmas(
|
||||
(Vector(6) << Vector3::Constant(0.3), Vector3::Constant(0.1)).finished());
|
||||
graph.push_back(PriorFactor<Pose3>(0, poses[0], poseNoise));
|
||||
graph.push_back(PriorFactor<Camera>(0, Camera(poses[0],K), noise));
|
||||
|
||||
// Fix the scale ambiguity by adding a prior
|
||||
graph.push_back(PriorFactor<Pose3>(1, poses[1], poseNoise));
|
||||
graph.push_back(PriorFactor<Camera>(1, Camera(poses[0],K), noise));
|
||||
|
||||
// Create the initial estimate to the solution
|
||||
Values initialEstimate;
|
||||
Pose3 delta(Rot3::rodriguez(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20));
|
||||
Pose3 delta(Rot3::Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20));
|
||||
for (size_t i = 0; i < poses.size(); ++i)
|
||||
initialEstimate.insert(i, poses[i].compose(delta));
|
||||
initialEstimate.insert(i, Camera(poses[i].compose(delta),K));
|
||||
|
||||
// We will use LM in the outer optimization loop, but by specifying "Iterative" below
|
||||
// We indicate that an iterative linear solver should be used.
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file SFMExample.cpp
|
||||
* @brief This file is to compare the ordering performance for COLAMD vs METIS.
|
||||
* Example problem is to solve a structure-from-motion problem from a "Bundle Adjustment in the Large" file.
|
||||
* @author Frank Dellaert, Zhaoyang Lv
|
||||
*/
|
||||
|
||||
// For an explanation of headers, see SFMExample.cpp
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||
#include <gtsam/slam/PriorFactor.h>
|
||||
#include <gtsam/slam/GeneralSFMFactor.h>
|
||||
#include <gtsam/slam/dataset.h> // for loading BAL datasets !
|
||||
|
||||
#include <gtsam/base/timing.h>
|
||||
|
||||
#include <vector>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
using symbol_shorthand::C;
|
||||
using symbol_shorthand::P;
|
||||
|
||||
// We will be using a projection factor that ties a SFM_Camera to a 3D point.
|
||||
// An SFM_Camera is defined in datase.h as a camera with unknown Cal3Bundler calibration
|
||||
// and has a total of 9 free parameters
|
||||
typedef GeneralSFMFactor<SfM_Camera,Point3> MyFactor;
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main (int argc, char* argv[]) {
|
||||
|
||||
// Find default file, but if an argument is given, try loading a file
|
||||
string filename = findExampleDataFile("dubrovnik-3-7-pre");
|
||||
if (argc>1) filename = string(argv[1]);
|
||||
|
||||
// Load the SfM data from file
|
||||
SfM_data mydata;
|
||||
readBAL(filename, mydata);
|
||||
cout << boost::format("read %1% tracks on %2% cameras\n") % mydata.number_tracks() % mydata.number_cameras();
|
||||
|
||||
// Create a factor graph
|
||||
NonlinearFactorGraph graph;
|
||||
|
||||
// We share *one* noiseModel between all projection factors
|
||||
noiseModel::Isotropic::shared_ptr noise =
|
||||
noiseModel::Isotropic::Sigma(2, 1.0); // one pixel in u and v
|
||||
|
||||
// Add measurements to the factor graph
|
||||
size_t j = 0;
|
||||
BOOST_FOREACH(const SfM_Track& track, mydata.tracks) {
|
||||
BOOST_FOREACH(const SfM_Measurement& m, track.measurements) {
|
||||
size_t i = m.first;
|
||||
Point2 uv = m.second;
|
||||
graph.push_back(MyFactor(uv, noise, C(i), P(j))); // note use of shorthand symbols C and P
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
|
||||
// Add a prior on pose x1. This indirectly specifies where the origin is.
|
||||
// and a prior on the position of the first landmark to fix the scale
|
||||
graph.push_back(PriorFactor<SfM_Camera>(C(0), mydata.cameras[0], noiseModel::Isotropic::Sigma(9, 0.1)));
|
||||
graph.push_back(PriorFactor<Point3> (P(0), mydata.tracks[0].p, noiseModel::Isotropic::Sigma(3, 0.1)));
|
||||
|
||||
// Create initial estimate
|
||||
Values initial;
|
||||
size_t i = 0; j = 0;
|
||||
BOOST_FOREACH(const SfM_Camera& camera, mydata.cameras) initial.insert(C(i++), camera);
|
||||
BOOST_FOREACH(const SfM_Track& track, mydata.tracks) initial.insert(P(j++), track.p);
|
||||
|
||||
/** --------------- COMPARISON -----------------------**/
|
||||
/** ----------------------------------------------------**/
|
||||
|
||||
LevenbergMarquardtParams params_using_COLAMD, params_using_METIS;
|
||||
try {
|
||||
params_using_METIS.setVerbosity("ERROR");
|
||||
gttic_(METIS_ORDERING);
|
||||
params_using_METIS.ordering = Ordering::Create(Ordering::METIS, graph);
|
||||
gttoc_(METIS_ORDERING);
|
||||
|
||||
params_using_COLAMD.setVerbosity("ERROR");
|
||||
gttic_(COLAMD_ORDERING);
|
||||
params_using_COLAMD.ordering = Ordering::Create(Ordering::COLAMD, graph);
|
||||
gttoc_(COLAMD_ORDERING);
|
||||
} catch (exception& e) {
|
||||
cout << e.what();
|
||||
}
|
||||
|
||||
// expect they have different ordering results
|
||||
if(params_using_COLAMD.ordering == params_using_METIS.ordering) {
|
||||
cout << "COLAMD and METIS produce the same ordering. "
|
||||
<< "Problem here!!!" << endl;
|
||||
}
|
||||
|
||||
/* Optimize the graph with METIS and COLAMD and time the results */
|
||||
|
||||
Values result_METIS, result_COLAMD;
|
||||
try {
|
||||
gttic_(OPTIMIZE_WITH_METIS);
|
||||
LevenbergMarquardtOptimizer lm_METIS(graph, initial, params_using_METIS);
|
||||
result_METIS = lm_METIS.optimize();
|
||||
gttoc_(OPTIMIZE_WITH_METIS);
|
||||
|
||||
gttic_(OPTIMIZE_WITH_COLAMD);
|
||||
LevenbergMarquardtOptimizer lm_COLAMD(graph, initial, params_using_COLAMD);
|
||||
result_COLAMD = lm_COLAMD.optimize();
|
||||
gttoc_(OPTIMIZE_WITH_COLAMD);
|
||||
} catch (exception& e) {
|
||||
cout << e.what();
|
||||
}
|
||||
|
||||
|
||||
{ // printing the result
|
||||
|
||||
cout << "COLAMD final error: " << graph.error(result_COLAMD) << endl;
|
||||
cout << "METIS final error: " << graph.error(result_METIS) << endl;
|
||||
|
||||
cout << endl << endl;
|
||||
|
||||
cout << "Time comparison by solving " << filename << " results:" << endl;
|
||||
cout << boost::format("%1% point tracks and %2% cameras\n") \
|
||||
% mydata.number_tracks() % mydata.number_cameras() \
|
||||
<< endl;
|
||||
|
||||
tictoc_print_();
|
||||
}
|
||||
|
||||
|
||||
return 0;
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -82,7 +82,7 @@ int main(int argc, char* argv[]) {
|
|||
Values initialEstimate;
|
||||
initialEstimate.insert(Symbol('K', 0), Cal3_S2(60.0, 60.0, 0.0, 45.0, 45.0));
|
||||
for (size_t i = 0; i < poses.size(); ++i)
|
||||
initialEstimate.insert(Symbol('x', i), poses[i].compose(Pose3(Rot3::rodriguez(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20))));
|
||||
initialEstimate.insert(Symbol('x', i), poses[i].compose(Pose3(Rot3::Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20))));
|
||||
for (size_t j = 0; j < points.size(); ++j)
|
||||
initialEstimate.insert(Symbol('l', j), points[j].compose(Point3(-0.25, 0.20, 0.15)));
|
||||
|
||||
|
|
|
@ -44,6 +44,7 @@
|
|||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/base/treeTraversal-inst.h>
|
||||
#include <gtsam/config.h> // for GTSAM_USE_TBB
|
||||
|
||||
#include <boost/archive/binary_iarchive.hpp>
|
||||
#include <boost/archive/binary_oarchive.hpp>
|
||||
|
@ -575,7 +576,7 @@ void runStats()
|
|||
{
|
||||
cout << "Gathering statistics..." << endl;
|
||||
GaussianFactorGraph linear = *datasetMeasurements.linearize(initial);
|
||||
GaussianJunctionTree jt(GaussianEliminationTree(linear, Ordering::colamd(linear)));
|
||||
GaussianJunctionTree jt(GaussianEliminationTree(linear, Ordering::Colamd(linear)));
|
||||
treeTraversal::ForestStatistics statistics = treeTraversal::GatherStatistics(jt);
|
||||
|
||||
ofstream file;
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
#include <gtsam/global_includes.h>
|
||||
#include <gtsam/base/Matrix.h>
|
||||
|
||||
#include <boost/assign/list_of.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <map>
|
||||
|
|
|
@ -95,7 +95,7 @@ int main(int argc, char* argv[]) {
|
|||
|
||||
// Add an initial guess for the current pose
|
||||
// Intentionally initialize the variables off from the ground truth
|
||||
initialEstimate.insert(Symbol('x', i), poses[i].compose(Pose3(Rot3::rodriguez(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20))));
|
||||
initialEstimate.insert(Symbol('x', i), poses[i].compose(Pose3(Rot3::Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20))));
|
||||
|
||||
// If this is the first iteration, add a prior on the first pose to set the coordinate frame
|
||||
// and a prior on the first landmark to set the scale
|
||||
|
|
|
@ -95,7 +95,7 @@ int main(int argc, char* argv[]) {
|
|||
}
|
||||
|
||||
// Intentionally initialize the variables off from the ground truth
|
||||
Pose3 noise(Rot3::rodriguez(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20));
|
||||
Pose3 noise(Rot3::Rodrigues(-0.1, 0.2, 0.25), Point3(0.05, -0.10, 0.20));
|
||||
Pose3 initial_xi = poses[i].compose(noise);
|
||||
|
||||
// Add an initial guess for the current pose
|
||||
|
|
51
gtsam.h
51
gtsam.h
|
@ -438,7 +438,7 @@ class Rot3 {
|
|||
static gtsam::Rot3 roll(double t); // positive roll is to right (increasing yaw in aircraft)
|
||||
static gtsam::Rot3 ypr(double y, double p, double r);
|
||||
static gtsam::Rot3 quaternion(double w, double x, double y, double z);
|
||||
static gtsam::Rot3 rodriguez(Vector v);
|
||||
static gtsam::Rot3 Rodrigues(Vector v);
|
||||
|
||||
// Testable
|
||||
void print(string s) const;
|
||||
|
@ -812,7 +812,7 @@ class CalibratedCamera {
|
|||
|
||||
// Action on Point3
|
||||
gtsam::Point2 project(const gtsam::Point3& point) const;
|
||||
static gtsam::Point2 project_to_camera(const gtsam::Point3& cameraPoint);
|
||||
static gtsam::Point2 Project(const gtsam::Point3& cameraPoint);
|
||||
|
||||
// Standard Interface
|
||||
gtsam::Pose3 pose() const;
|
||||
|
@ -848,7 +848,7 @@ class PinholeCamera {
|
|||
static size_t Dim();
|
||||
|
||||
// Transformations and measurement functions
|
||||
static gtsam::Point2 project_to_camera(const gtsam::Point3& cameraPoint);
|
||||
static gtsam::Point2 Project(const gtsam::Point3& cameraPoint);
|
||||
pair<gtsam::Point2,bool> projectSafe(const gtsam::Point3& pw) const;
|
||||
gtsam::Point2 project(const gtsam::Point3& point);
|
||||
gtsam::Point3 backproject(const gtsam::Point2& p, double depth) const;
|
||||
|
@ -884,7 +884,7 @@ virtual class SimpleCamera {
|
|||
static size_t Dim();
|
||||
|
||||
// Transformations and measurement functions
|
||||
static gtsam::Point2 project_to_camera(const gtsam::Point3& cameraPoint);
|
||||
static gtsam::Point2 Project(const gtsam::Point3& cameraPoint);
|
||||
pair<gtsam::Point2,bool> projectSafe(const gtsam::Point3& pw) const;
|
||||
gtsam::Point2 project(const gtsam::Point3& point);
|
||||
gtsam::Point3 backproject(const gtsam::Point2& p, double depth) const;
|
||||
|
@ -1654,12 +1654,12 @@ char symbolChr(size_t key);
|
|||
size_t symbolIndex(size_t key);
|
||||
|
||||
// Default keyformatter
|
||||
void printKeyList (const gtsam::KeyList& keys);
|
||||
void printKeyList (const gtsam::KeyList& keys, string s);
|
||||
void printKeyVector(const gtsam::KeyVector& keys);
|
||||
void printKeyVector(const gtsam::KeyVector& keys, string s);
|
||||
void printKeySet (const gtsam::KeySet& keys);
|
||||
void printKeySet (const gtsam::KeySet& keys, string s);
|
||||
void PrintKeyList (const gtsam::KeyList& keys);
|
||||
void PrintKeyList (const gtsam::KeyList& keys, string s);
|
||||
void PrintKeyVector(const gtsam::KeyVector& keys);
|
||||
void PrintKeyVector(const gtsam::KeyVector& keys, string s);
|
||||
void PrintKeySet (const gtsam::KeySet& keys);
|
||||
void PrintKeySet (const gtsam::KeySet& keys, string s);
|
||||
|
||||
#include <gtsam/inference/LabeledSymbol.h>
|
||||
class LabeledSymbol {
|
||||
|
@ -1776,7 +1776,7 @@ class Values {
|
|||
void swap(gtsam::Values& values);
|
||||
|
||||
bool exists(size_t j) const;
|
||||
gtsam::KeyList keys() const;
|
||||
gtsam::KeyVector keys() const;
|
||||
|
||||
gtsam::VectorValues zeroVectors() const;
|
||||
|
||||
|
@ -1893,8 +1893,6 @@ class KeySet {
|
|||
class KeyVector {
|
||||
KeyVector();
|
||||
KeyVector(const gtsam::KeyVector& other);
|
||||
KeyVector(const gtsam::KeySet& other);
|
||||
KeyVector(const gtsam::KeyList& other);
|
||||
|
||||
// Note: no print function
|
||||
|
||||
|
@ -2354,18 +2352,31 @@ virtual class GeneralSFMFactor2 : gtsam::NoiseModelFactor {
|
|||
void serialize() const;
|
||||
};
|
||||
|
||||
#include <gtsam/slam/SmartProjectionFactor.h>
|
||||
class SmartProjectionParams {
|
||||
SmartProjectionParams();
|
||||
// TODO(frank): make these work:
|
||||
// void setLinearizationMode(LinearizationMode linMode);
|
||||
// void setDegeneracyMode(DegeneracyMode degMode);
|
||||
void setRankTolerance(double rankTol);
|
||||
void setEnableEPI(bool enableEPI);
|
||||
void setLandmarkDistanceThreshold(bool landmarkDistanceThreshold);
|
||||
void setDynamicOutlierRejectionThreshold(bool dynOutRejectionThreshold);
|
||||
};
|
||||
|
||||
#include <gtsam/slam/SmartProjectionPoseFactor.h>
|
||||
template<CALIBRATION>
|
||||
virtual class SmartProjectionPoseFactor : gtsam::NonlinearFactor {
|
||||
virtual class SmartProjectionPoseFactor: gtsam::NonlinearFactor {
|
||||
|
||||
SmartProjectionPoseFactor(double rankTol, double linThreshold,
|
||||
bool manageDegeneracy, bool enableEPI, const gtsam::Pose3& body_P_sensor);
|
||||
|
||||
SmartProjectionPoseFactor(double rankTol);
|
||||
SmartProjectionPoseFactor();
|
||||
SmartProjectionPoseFactor(const CALIBRATION* K);
|
||||
SmartProjectionPoseFactor(const CALIBRATION* K,
|
||||
const gtsam::Pose3& body_P_sensor);
|
||||
SmartProjectionPoseFactor(const CALIBRATION* K,
|
||||
const gtsam::Pose3& body_P_sensor,
|
||||
const gtsam::SmartProjectionParams& params);
|
||||
|
||||
void add(const gtsam::Point2& measured_i, size_t poseKey_i,
|
||||
const gtsam::noiseModel::Base* noise_i, const CALIBRATION* K_i);
|
||||
const gtsam::noiseModel::Base* noise_i);
|
||||
|
||||
// enabling serialization functionality
|
||||
//void serialize() const;
|
||||
|
|
|
@ -17,8 +17,7 @@
|
|||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/global_includes.h>
|
||||
#include <gtsam/config.h> // Configuration from CMake
|
||||
|
||||
#if !defined GTSAM_ALLOCATOR_BOOSTPOOL && !defined GTSAM_ALLOCATOR_TBB && !defined GTSAM_ALLOCATOR_STL
|
||||
# ifdef GTSAM_USE_TBB
|
||||
|
@ -85,4 +84,4 @@ namespace gtsam
|
|||
};
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
#pragma once
|
||||
|
||||
#include <gtsam/base/FastDefaultAllocator.h>
|
||||
#include <map>
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
#include <boost/serialization/map.hpp>
|
||||
#include <map>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
|
@ -18,26 +18,22 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/FastDefaultAllocator.h>
|
||||
#include <boost/mpl/has_xxx.hpp>
|
||||
#include <boost/utility/enable_if.hpp>
|
||||
#include <boost/serialization/serialization.hpp>
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
#include <boost/serialization/set.hpp>
|
||||
#include <set>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <cmath>
|
||||
#include <gtsam/base/FastDefaultAllocator.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
|
||||
BOOST_MPL_HAS_XXX_TRAIT_DEF(print)
|
||||
#include <functional>
|
||||
#include <set>
|
||||
|
||||
namespace boost {
|
||||
namespace serialization {
|
||||
class access;
|
||||
} /* namespace serialization */
|
||||
} /* namespace boost */
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// This is used internally to allow this container to be Testable even when it
|
||||
// contains non-testable elements.
|
||||
template<typename VALUE, class ENABLE = void>
|
||||
struct FastSetTestableHelper;
|
||||
|
||||
/**
|
||||
* FastSet is a thin wrapper around std::set that uses the boost
|
||||
* fast_pool_allocator instead of the default STL allocator. This is just a
|
||||
|
@ -45,12 +41,16 @@ struct FastSetTestableHelper;
|
|||
* we've seen that the fast_pool_allocator can lead to speedups of several %.
|
||||
* @addtogroup base
|
||||
*/
|
||||
template<typename VALUE, class ENABLE = void>
|
||||
class FastSet: public std::set<VALUE, std::less<VALUE>, typename internal::FastDefaultAllocator<VALUE>::type> {
|
||||
template<typename VALUE>
|
||||
class FastSet: public std::set<VALUE, std::less<VALUE>,
|
||||
typename internal::FastDefaultAllocator<VALUE>::type> {
|
||||
|
||||
BOOST_CONCEPT_ASSERT ((IsTestable<VALUE> ));
|
||||
|
||||
public:
|
||||
|
||||
typedef std::set<VALUE, std::less<VALUE>, typename internal::FastDefaultAllocator<VALUE>::type> Base;
|
||||
typedef std::set<VALUE, std::less<VALUE>,
|
||||
typename internal::FastDefaultAllocator<VALUE>::type> Base;
|
||||
|
||||
/** Default constructor */
|
||||
FastSet() {
|
||||
|
@ -59,23 +59,23 @@ public:
|
|||
/** Constructor from a range, passes through to base class */
|
||||
template<typename INPUTITERATOR>
|
||||
explicit FastSet(INPUTITERATOR first, INPUTITERATOR last) :
|
||||
Base(first, last) {
|
||||
Base(first, last) {
|
||||
}
|
||||
|
||||
/** Constructor from a iterable container, passes through to base class */
|
||||
template<typename INPUTCONTAINER>
|
||||
explicit FastSet(const INPUTCONTAINER& container) :
|
||||
Base(container.begin(), container.end()) {
|
||||
Base(container.begin(), container.end()) {
|
||||
}
|
||||
|
||||
/** Copy constructor from another FastSet */
|
||||
FastSet(const FastSet<VALUE>& x) :
|
||||
Base(x) {
|
||||
Base(x) {
|
||||
}
|
||||
|
||||
/** Copy constructor from the base set class */
|
||||
FastSet(const Base& x) :
|
||||
Base(x) {
|
||||
Base(x) {
|
||||
}
|
||||
|
||||
#ifdef GTSAM_ALLOCATOR_BOOSTPOOL
|
||||
|
@ -85,7 +85,7 @@ public:
|
|||
// STL vector where if the size is zero, the pool allocator will allocate
|
||||
// huge amounts of memory.
|
||||
if(x.size() > 0)
|
||||
Base::insert(x.begin(), x.end());
|
||||
Base::insert(x.begin(), x.end());
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -95,17 +95,31 @@ public:
|
|||
}
|
||||
|
||||
/** Handy 'exists' function */
|
||||
bool exists(const VALUE& e) const { return this->find(e) != this->end(); }
|
||||
bool exists(const VALUE& e) const {
|
||||
return this->find(e) != this->end();
|
||||
}
|
||||
|
||||
/** Print to implement Testable */
|
||||
void print(const std::string& str = "") const { FastSetTestableHelper<VALUE>::print(*this, str); }
|
||||
/** Print to implement Testable: pretty basic */
|
||||
void print(const std::string& str = "") const {
|
||||
for (typename Base::const_iterator it = this->begin(); it != this->end(); ++it)
|
||||
traits<VALUE>::Print(*it, str);
|
||||
}
|
||||
|
||||
/** Check for equality within tolerance to implement Testable */
|
||||
bool equals(const FastSet<VALUE>& other, double tol = 1e-9) const { return FastSetTestableHelper<VALUE>::equals(*this, other, tol); }
|
||||
bool equals(const FastSet<VALUE>& other, double tol = 1e-9) const {
|
||||
typename Base::const_iterator it1 = this->begin(), it2 = other.begin();
|
||||
while (it1 != this->end()) {
|
||||
if (it2 == other.end() || !traits<VALUE>::Equals(*it2, *it2, tol))
|
||||
return false;
|
||||
++it1;
|
||||
++it2;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/** insert another set: handy for MATLAB access */
|
||||
void merge(const FastSet& other) {
|
||||
Base::insert(other.begin(),other.end());
|
||||
Base::insert(other.begin(), other.end());
|
||||
}
|
||||
|
||||
private:
|
||||
|
@ -117,59 +131,4 @@ private:
|
|||
}
|
||||
};
|
||||
|
||||
// This is the default Testable interface for *non*Testable elements, which
|
||||
// uses stream operators.
|
||||
template<typename VALUE, class ENABLE>
|
||||
struct FastSetTestableHelper {
|
||||
|
||||
typedef FastSet<VALUE> Set;
|
||||
|
||||
static void print(const Set& set, const std::string& str) {
|
||||
std::cout << str << "\n";
|
||||
for (typename Set::const_iterator it = set.begin(); it != set.end(); ++it)
|
||||
std::cout << " " << *it << "\n";
|
||||
std::cout.flush();
|
||||
}
|
||||
|
||||
static bool equals(const Set& set1, const Set& set2, double tol) {
|
||||
typename Set::const_iterator it1 = set1.begin();
|
||||
typename Set::const_iterator it2 = set2.begin();
|
||||
while (it1 != set1.end()) {
|
||||
if (it2 == set2.end() ||
|
||||
fabs((double)(*it1) - (double)(*it2)) > tol)
|
||||
return false;
|
||||
++it1;
|
||||
++it2;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
// This is the Testable interface for Testable elements
|
||||
template<typename VALUE>
|
||||
struct FastSetTestableHelper<VALUE, typename boost::enable_if<has_print<VALUE> >::type> {
|
||||
|
||||
typedef FastSet<VALUE> Set;
|
||||
|
||||
static void print(const Set& set, const std::string& str) {
|
||||
std::cout << str << "\n";
|
||||
for (typename Set::const_iterator it = set.begin(); it != set.end(); ++it)
|
||||
it->print(" ");
|
||||
std::cout.flush();
|
||||
}
|
||||
|
||||
static bool equals(const Set& set1, const Set& set2, double tol) {
|
||||
typename Set::const_iterator it1 = set1.begin();
|
||||
typename Set::const_iterator it2 = set2.begin();
|
||||
while (it1 != set1.end()) {
|
||||
if (it2 == set2.end() ||
|
||||
!it1->equals(*it2, tol))
|
||||
return false;
|
||||
++it1;
|
||||
++it2;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -18,12 +18,10 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <gtsam/base/FastDefaultAllocator.h>
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
#include <boost/serialization/vector.hpp>
|
||||
|
||||
#include <gtsam/base/FastList.h>
|
||||
#include <gtsam/base/FastSet.h>
|
||||
#include <vector>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -35,20 +33,27 @@ namespace gtsam {
|
|||
* @addtogroup base
|
||||
*/
|
||||
template<typename VALUE>
|
||||
class FastVector: public std::vector<VALUE, typename internal::FastDefaultVectorAllocator<VALUE>::type> {
|
||||
class FastVector: public std::vector<VALUE,
|
||||
typename internal::FastDefaultVectorAllocator<VALUE>::type> {
|
||||
|
||||
public:
|
||||
|
||||
typedef std::vector<VALUE, typename internal::FastDefaultVectorAllocator<VALUE>::type> Base;
|
||||
typedef std::vector<VALUE,
|
||||
typename internal::FastDefaultVectorAllocator<VALUE>::type> Base;
|
||||
|
||||
/** Default constructor */
|
||||
FastVector() {}
|
||||
FastVector() {
|
||||
}
|
||||
|
||||
/** Constructor from size */
|
||||
explicit FastVector(size_t size) : Base(size) {}
|
||||
explicit FastVector(size_t size) :
|
||||
Base(size) {
|
||||
}
|
||||
|
||||
/** Constructor from size and initial values */
|
||||
explicit FastVector(size_t size, const VALUE& initial) : Base(size, initial) {}
|
||||
explicit FastVector(size_t size, const VALUE& initial) :
|
||||
Base(size, initial) {
|
||||
}
|
||||
|
||||
/** Constructor from a range, passes through to base class */
|
||||
template<typename INPUTITERATOR>
|
||||
|
@ -56,33 +61,22 @@ public:
|
|||
// This if statement works around a bug in boost pool allocator and/or
|
||||
// STL vector where if the size is zero, the pool allocator will allocate
|
||||
// huge amounts of memory.
|
||||
if(first != last)
|
||||
if (first != last)
|
||||
Base::assign(first, last);
|
||||
}
|
||||
|
||||
/** Copy constructor from a FastList */
|
||||
FastVector(const FastList<VALUE>& x) {
|
||||
if(x.size() > 0)
|
||||
Base::assign(x.begin(), x.end());
|
||||
}
|
||||
|
||||
/** Copy constructor from a FastSet */
|
||||
FastVector(const FastSet<VALUE>& x) {
|
||||
if(x.size() > 0)
|
||||
Base::assign(x.begin(), x.end());
|
||||
}
|
||||
|
||||
/** Copy constructor from the base class */
|
||||
FastVector(const Base& x) : Base(x) {}
|
||||
FastVector(const Base& x) :
|
||||
Base(x) {
|
||||
}
|
||||
|
||||
/** Copy constructor from a standard STL container */
|
||||
template<typename ALLOCATOR>
|
||||
FastVector(const std::vector<VALUE, ALLOCATOR>& x)
|
||||
{
|
||||
FastVector(const std::vector<VALUE, ALLOCATOR>& x) {
|
||||
// This if statement works around a bug in boost pool allocator and/or
|
||||
// STL vector where if the size is zero, the pool allocator will allocate
|
||||
// huge amounts of memory.
|
||||
if(x.size() > 0)
|
||||
if (x.size() > 0)
|
||||
Base::assign(x.begin(), x.end());
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
|
||||
#pragma once
|
||||
#include <gtsam/base/Vector.h>
|
||||
#include <gtsam/config.h> // Configuration from CMake
|
||||
|
||||
#include <boost/math/special_functions/fpclassify.hpp>
|
||||
#include <Eigen/Core>
|
||||
#include <Eigen/Cholesky>
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/config.h> // Configuration from CMake
|
||||
#include <Eigen/Dense>
|
||||
|
||||
#ifndef OPTIONALJACOBIAN_NOBOOST
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include <gtsam/base/VerticalBlockMatrix.h>
|
||||
#include <gtsam/base/cholesky.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/base/ThreadsafeException.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -61,13 +62,13 @@ VerticalBlockMatrix SymmetricBlockMatrix::choleskyPartial(
|
|||
|
||||
// Split conditional
|
||||
|
||||
// Create one big conditionals with many frontal variables.
|
||||
gttic(Construct_conditional);
|
||||
const size_t varDim = offset(nFrontals);
|
||||
VerticalBlockMatrix Ab = VerticalBlockMatrix::LikeActiveViewOf(*this, varDim);
|
||||
Ab.full() = matrix_.topRows(varDim);
|
||||
Ab.full().triangularView<Eigen::StrictlyLower>().setZero();
|
||||
gttoc(Construct_conditional);
|
||||
// Create one big conditionals with many frontal variables.
|
||||
gttic(Construct_conditional);
|
||||
const size_t varDim = offset(nFrontals);
|
||||
VerticalBlockMatrix Ab = VerticalBlockMatrix::LikeActiveViewOf(*this, varDim);
|
||||
Ab.full() = matrix_.topRows(varDim);
|
||||
Ab.full().triangularView<Eigen::StrictlyLower>().setZero();
|
||||
gttoc(Construct_conditional);
|
||||
|
||||
gttic(Remaining_factor);
|
||||
// Take lower-right block of Ab_ to get the remaining factor
|
||||
|
|
|
@ -17,9 +17,21 @@
|
|||
*/
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/Matrix.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
#include <gtsam/base/Matrix.h>
|
||||
#include <gtsam/base/SymmetricBlockMatrixBlockExpr.h>
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/dllexport.h>
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
#include <cassert>
|
||||
#include <stdexcept>
|
||||
#include <vector>
|
||||
|
||||
namespace boost {
|
||||
namespace serialization {
|
||||
class access;
|
||||
} /* namespace serialization */
|
||||
} /* namespace boost */
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -247,13 +259,8 @@ namespace gtsam {
|
|||
}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
class CholeskyFailed : public gtsam::ThreadsafeException<CholeskyFailed>
|
||||
{
|
||||
public:
|
||||
CholeskyFailed() throw() {}
|
||||
virtual ~CholeskyFailed() throw() {}
|
||||
};
|
||||
/// Foward declare exception class
|
||||
class CholeskyFailed;
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -17,13 +17,14 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/global_includes.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
#include <gtsam/global_includes.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
|
@ -0,0 +1,155 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file ThreadSafeException.h
|
||||
* @brief Base exception type that uses tbb_exception if GTSAM is compiled with TBB
|
||||
* @author Richard Roberts
|
||||
* @date Aug 21, 2010
|
||||
* @addtogroup base
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/config.h> // for GTSAM_USE_TBB
|
||||
|
||||
#include <boost/optional/optional.hpp>
|
||||
#include <string>
|
||||
#include <typeinfo>
|
||||
|
||||
#ifdef GTSAM_USE_TBB
|
||||
#include <tbb/tbb_allocator.h>
|
||||
#include <tbb/tbb_exception.h>
|
||||
#include <tbb/scalable_allocator.h>
|
||||
#include <iostream>
|
||||
#endif
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/// Base exception type that uses tbb_exception if GTSAM is compiled with TBB.
|
||||
template<class DERIVED>
|
||||
class ThreadsafeException:
|
||||
#ifdef GTSAM_USE_TBB
|
||||
public tbb::tbb_exception
|
||||
#else
|
||||
public std::exception
|
||||
#endif
|
||||
{
|
||||
#ifdef GTSAM_USE_TBB
|
||||
private:
|
||||
typedef tbb::tbb_exception Base;
|
||||
protected:
|
||||
typedef std::basic_string<char, std::char_traits<char>,
|
||||
tbb::tbb_allocator<char> > String;
|
||||
#else
|
||||
private:
|
||||
typedef std::exception Base;
|
||||
protected:
|
||||
typedef std::string String;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool dynamic_; ///< Whether this object was moved
|
||||
mutable boost::optional<String> description_; ///< Optional description
|
||||
|
||||
/// Default constructor is protected - may only be created from derived classes
|
||||
ThreadsafeException() :
|
||||
dynamic_(false) {
|
||||
}
|
||||
|
||||
/// Copy constructor is protected - may only be created from derived classes
|
||||
ThreadsafeException(const ThreadsafeException& other) :
|
||||
Base(other), dynamic_(false) {
|
||||
}
|
||||
|
||||
/// Construct with description string
|
||||
ThreadsafeException(const std::string& description) :
|
||||
dynamic_(false), description_(
|
||||
String(description.begin(), description.end())) {
|
||||
}
|
||||
|
||||
/// Default destructor doesn't have the throw()
|
||||
virtual ~ThreadsafeException() throw () {
|
||||
}
|
||||
|
||||
public:
|
||||
// Implement functions for tbb_exception
|
||||
#ifdef GTSAM_USE_TBB
|
||||
virtual tbb::tbb_exception* move() throw () {
|
||||
void* cloneMemory = scalable_malloc(sizeof(DERIVED));
|
||||
if (!cloneMemory) {
|
||||
std::cerr << "Failed allocating memory to copy thrown exception, exiting now." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
DERIVED* clone = ::new(cloneMemory) DERIVED(static_cast<DERIVED&>(*this));
|
||||
clone->dynamic_ = true;
|
||||
return clone;
|
||||
}
|
||||
|
||||
virtual void destroy() throw () {
|
||||
if (dynamic_) {
|
||||
DERIVED* derivedPtr = static_cast<DERIVED*>(this);
|
||||
derivedPtr->~DERIVED();
|
||||
scalable_free(derivedPtr);
|
||||
}
|
||||
}
|
||||
|
||||
virtual void throw_self() {
|
||||
throw *static_cast<DERIVED*>(this);
|
||||
}
|
||||
|
||||
virtual const char* name() const throw () {
|
||||
return typeid(DERIVED).name();
|
||||
}
|
||||
#endif
|
||||
|
||||
virtual const char* what() const throw () {
|
||||
return description_ ? description_->c_str() : "";
|
||||
}
|
||||
};
|
||||
|
||||
/// Thread-safe runtime error exception
|
||||
class RuntimeErrorThreadsafe: public ThreadsafeException<RuntimeErrorThreadsafe> {
|
||||
public:
|
||||
/// Construct with a string describing the exception
|
||||
RuntimeErrorThreadsafe(const std::string& description) :
|
||||
ThreadsafeException<RuntimeErrorThreadsafe>(description) {
|
||||
}
|
||||
};
|
||||
|
||||
/// Thread-safe out of range exception
|
||||
class OutOfRangeThreadsafe: public ThreadsafeException<OutOfRangeThreadsafe> {
|
||||
public:
|
||||
/// Construct with a string describing the exception
|
||||
OutOfRangeThreadsafe(const std::string& description) :
|
||||
ThreadsafeException<OutOfRangeThreadsafe>(description) {
|
||||
}
|
||||
};
|
||||
|
||||
/// Thread-safe invalid argument exception
|
||||
class InvalidArgumentThreadsafe: public ThreadsafeException<
|
||||
InvalidArgumentThreadsafe> {
|
||||
public:
|
||||
/// Construct with a string describing the exception
|
||||
InvalidArgumentThreadsafe(const std::string& description) :
|
||||
ThreadsafeException<InvalidArgumentThreadsafe>(description) {
|
||||
}
|
||||
};
|
||||
|
||||
/// Indicate Cholesky factorization failure
|
||||
class CholeskyFailed : public gtsam::ThreadsafeException<CholeskyFailed>
|
||||
{
|
||||
public:
|
||||
CholeskyFailed() throw() {}
|
||||
virtual ~CholeskyFailed() throw() {}
|
||||
};
|
||||
|
||||
} // namespace gtsam
|
|
@ -17,6 +17,8 @@
|
|||
*/
|
||||
|
||||
#include <gtsam/base/debug.h>
|
||||
#include <gtsam/config.h> // for GTSAM_USE_TBB
|
||||
|
||||
#ifdef GTSAM_USE_TBB
|
||||
#include <tbb/mutex.h>
|
||||
#endif
|
||||
|
|
|
@ -7,13 +7,14 @@
|
|||
* @author Alex Cunningham
|
||||
*/
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/base/FastSet.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
|
||||
#include <boost/assign/std/vector.hpp>
|
||||
#include <boost/assign/std/set.hpp>
|
||||
|
||||
#include <gtsam/base/FastSet.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
using namespace boost::assign;
|
||||
using namespace gtsam;
|
||||
|
@ -21,7 +22,7 @@ using namespace gtsam;
|
|||
/* ************************************************************************* */
|
||||
TEST( testFastContainers, KeySet ) {
|
||||
|
||||
FastVector<size_t> init_vector;
|
||||
FastVector<Key> init_vector;
|
||||
init_vector += 2, 3, 4, 5;
|
||||
|
||||
FastSet<size_t> actSet(init_vector);
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
* @date Feb 7, 2012
|
||||
*/
|
||||
|
||||
#include <gtsam/inference/Key.h>
|
||||
|
||||
#include <gtsam/base/Matrix.h>
|
||||
#include <gtsam/base/Vector.h>
|
||||
#include <gtsam/base/FastList.h>
|
||||
|
@ -60,10 +62,10 @@ TEST (Serialization, FastMap) {
|
|||
|
||||
/* ************************************************************************* */
|
||||
TEST (Serialization, FastSet) {
|
||||
FastSet<double> set;
|
||||
set.insert(1.0);
|
||||
set.insert(2.0);
|
||||
set.insert(3.0);
|
||||
KeySet set;
|
||||
set.insert(1);
|
||||
set.insert(2);
|
||||
set.insert(3);
|
||||
|
||||
EXPECT(equality(set));
|
||||
EXPECT(equalityXML(set));
|
||||
|
|
|
@ -16,24 +16,28 @@
|
|||
* @date Oct 5, 2010
|
||||
*/
|
||||
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
#include <stdlib.h>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/format.hpp>
|
||||
#include <boost/algorithm/string.hpp>
|
||||
|
||||
#include <gtsam/base/debug.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
|
||||
#include <boost/algorithm/string/replace.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/format.hpp>
|
||||
|
||||
#include <cmath>
|
||||
#include <cstddef>
|
||||
#include <cassert>
|
||||
#include <iomanip>
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include <stdexcept>
|
||||
#include <utility>
|
||||
|
||||
namespace gtsam {
|
||||
namespace internal {
|
||||
|
||||
GTSAM_EXPORT boost::shared_ptr<TimingOutline> timingRoot(
|
||||
GTSAM_EXPORT boost::shared_ptr<TimingOutline> gTimingRoot(
|
||||
new TimingOutline("Total", getTicTocID("Total")));
|
||||
GTSAM_EXPORT boost::weak_ptr<TimingOutline> timingCurrent(timingRoot);
|
||||
GTSAM_EXPORT boost::weak_ptr<TimingOutline> gCurrentTimer(gTimingRoot);
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Implementation of TimingOutline
|
||||
|
@ -50,8 +54,8 @@ void TimingOutline::add(size_t usecs, size_t usecsWall) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TimingOutline::TimingOutline(const std::string& label, size_t myId) :
|
||||
myId_(myId), t_(0), tWall_(0), t2_(0.0), tIt_(0), tMax_(0), tMin_(0), n_(0), myOrder_(
|
||||
TimingOutline::TimingOutline(const std::string& label, size_t id) :
|
||||
id_(id), t_(0), tWall_(0), t2_(0.0), tIt_(0), tMax_(0), tMin_(0), n_(0), myOrder_(
|
||||
0), lastChildOrder_(0), label_(label) {
|
||||
#ifdef GTSAM_USING_NEW_BOOST_TIMERS
|
||||
timer_.stop();
|
||||
|
@ -153,7 +157,7 @@ const boost::shared_ptr<TimingOutline>& TimingOutline::child(size_t child,
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void TimingOutline::ticInternal() {
|
||||
void TimingOutline::tic() {
|
||||
#ifdef GTSAM_USING_NEW_BOOST_TIMERS
|
||||
assert(timer_.is_stopped());
|
||||
timer_.start();
|
||||
|
@ -169,7 +173,7 @@ void TimingOutline::ticInternal() {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void TimingOutline::tocInternal() {
|
||||
void TimingOutline::toc() {
|
||||
#ifdef GTSAM_USING_NEW_BOOST_TIMERS
|
||||
|
||||
assert(!timer_.is_stopped());
|
||||
|
@ -212,7 +216,6 @@ void TimingOutline::finishedIteration() {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Generate or retrieve a unique global ID number that will be used to look up tic_/toc statements
|
||||
size_t getTicTocID(const char *descriptionC) {
|
||||
const std::string description(descriptionC);
|
||||
// Global (static) map from strings to ID numbers and current next ID number
|
||||
|
@ -232,37 +235,33 @@ size_t getTicTocID(const char *descriptionC) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void ticInternal(size_t id, const char *labelC) {
|
||||
void tic(size_t id, const char *labelC) {
|
||||
const std::string label(labelC);
|
||||
if (ISDEBUG("timing-verbose"))
|
||||
std::cout << "gttic_(" << id << ", " << label << ")" << std::endl;
|
||||
boost::shared_ptr<TimingOutline> node = //
|
||||
timingCurrent.lock()->child(id, label, timingCurrent);
|
||||
timingCurrent = node;
|
||||
node->ticInternal();
|
||||
gCurrentTimer.lock()->child(id, label, gCurrentTimer);
|
||||
gCurrentTimer = node;
|
||||
node->tic();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void tocInternal(size_t id, const char *label) {
|
||||
if (ISDEBUG("timing-verbose"))
|
||||
std::cout << "gttoc(" << id << ", " << label << ")" << std::endl;
|
||||
boost::shared_ptr<TimingOutline> current(timingCurrent.lock());
|
||||
if (id != current->myId_) {
|
||||
timingRoot->print();
|
||||
void toc(size_t id, const char *label) {
|
||||
boost::shared_ptr<TimingOutline> current(gCurrentTimer.lock());
|
||||
if (id != current->id_) {
|
||||
gTimingRoot->print();
|
||||
throw std::invalid_argument(
|
||||
(boost::format(
|
||||
"gtsam timing: Mismatched tic/toc: gttoc(\"%s\") called when last tic was \"%s\".")
|
||||
% label % current->label_).str());
|
||||
}
|
||||
if (!current->parent_.lock()) {
|
||||
timingRoot->print();
|
||||
gTimingRoot->print();
|
||||
throw std::invalid_argument(
|
||||
(boost::format(
|
||||
"gtsam timing: Mismatched tic/toc: extra gttoc(\"%s\"), already at the root")
|
||||
% label).str());
|
||||
}
|
||||
current->tocInternal();
|
||||
timingCurrent = current->parent_;
|
||||
current->toc();
|
||||
gCurrentTimer = current->parent_;
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
|
|
|
@ -17,12 +17,16 @@
|
|||
*/
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <boost/weak_ptr.hpp>
|
||||
#include <boost/version.hpp>
|
||||
#include <gtsam/global_includes.h>
|
||||
#include <gtsam/base/FastMap.h>
|
||||
#include <gtsam/dllexport.h>
|
||||
#include <gtsam/config.h> // for GTSAM_USE_TBB
|
||||
|
||||
#include <boost/smart_ptr/shared_ptr.hpp>
|
||||
#include <boost/smart_ptr/weak_ptr.hpp>
|
||||
#include <boost/version.hpp>
|
||||
|
||||
#include <cstddef>
|
||||
#include <string>
|
||||
|
||||
// This file contains the GTSAM timing instrumentation library, a low-overhead method for
|
||||
// learning at a medium-fine level how much time various components of an algorithm take
|
||||
|
@ -125,16 +129,21 @@
|
|||
namespace gtsam {
|
||||
|
||||
namespace internal {
|
||||
// Generate/retrieve a unique global ID number that will be used to look up tic/toc statements
|
||||
GTSAM_EXPORT size_t getTicTocID(const char *description);
|
||||
GTSAM_EXPORT void ticInternal(size_t id, const char *label);
|
||||
GTSAM_EXPORT void tocInternal(size_t id, const char *label);
|
||||
|
||||
// Create new TimingOutline child for gCurrentTimer, make it gCurrentTimer, and call tic method
|
||||
GTSAM_EXPORT void tic(size_t id, const char *label);
|
||||
|
||||
// Call toc on gCurrentTimer and then set gCurrentTimer to the parent of gCurrentTimer
|
||||
GTSAM_EXPORT void toc(size_t id, const char *label);
|
||||
|
||||
/**
|
||||
* Timing Entry, arranged in a tree
|
||||
*/
|
||||
class GTSAM_EXPORT TimingOutline {
|
||||
protected:
|
||||
size_t myId_;
|
||||
size_t id_;
|
||||
size_t t_;
|
||||
size_t tWall_;
|
||||
double t2_ ; ///< cache the \sum t_i^2
|
||||
|
@ -176,29 +185,38 @@ namespace gtsam {
|
|||
void print2(const std::string& outline = "", const double parentTotal = -1.0) const;
|
||||
const boost::shared_ptr<TimingOutline>&
|
||||
child(size_t child, const std::string& label, const boost::weak_ptr<TimingOutline>& thisPtr);
|
||||
void ticInternal();
|
||||
void tocInternal();
|
||||
void tic();
|
||||
void toc();
|
||||
void finishedIteration();
|
||||
|
||||
GTSAM_EXPORT friend void tocInternal(size_t id, const char *label);
|
||||
GTSAM_EXPORT friend void toc(size_t id, const char *label);
|
||||
}; // \TimingOutline
|
||||
|
||||
/**
|
||||
* No documentation
|
||||
* Small class that calls internal::tic at construction, and internol::toc when destroyed
|
||||
*/
|
||||
class AutoTicToc {
|
||||
private:
|
||||
private:
|
||||
size_t id_;
|
||||
const char *label_;
|
||||
const char* label_;
|
||||
bool isSet_;
|
||||
public:
|
||||
AutoTicToc(size_t id, const char* label) : id_(id), label_(label), isSet_(true) { ticInternal(id_, label_); }
|
||||
void stop() { tocInternal(id_, label_); isSet_ = false; }
|
||||
~AutoTicToc() { if(isSet_) stop(); }
|
||||
|
||||
public:
|
||||
AutoTicToc(size_t id, const char* label)
|
||||
: id_(id), label_(label), isSet_(true) {
|
||||
tic(id_, label_);
|
||||
}
|
||||
void stop() {
|
||||
toc(id_, label_);
|
||||
isSet_ = false;
|
||||
}
|
||||
~AutoTicToc() {
|
||||
if (isSet_) stop();
|
||||
}
|
||||
};
|
||||
|
||||
GTSAM_EXTERN_EXPORT boost::shared_ptr<TimingOutline> timingRoot;
|
||||
GTSAM_EXTERN_EXPORT boost::weak_ptr<TimingOutline> timingCurrent;
|
||||
GTSAM_EXTERN_EXPORT boost::shared_ptr<TimingOutline> gTimingRoot;
|
||||
GTSAM_EXTERN_EXPORT boost::weak_ptr<TimingOutline> gCurrentTimer;
|
||||
}
|
||||
|
||||
// Tic and toc functions that are always active (whether or not ENABLE_TIMING is defined)
|
||||
|
@ -210,7 +228,7 @@ namespace gtsam {
|
|||
// tic
|
||||
#define gttic_(label) \
|
||||
static const size_t label##_id_tic = ::gtsam::internal::getTicTocID(#label); \
|
||||
::gtsam::internal::AutoTicToc label##_obj = ::gtsam::internal::AutoTicToc(label##_id_tic, #label)
|
||||
::gtsam::internal::AutoTicToc label##_obj(label##_id_tic, #label)
|
||||
|
||||
// toc
|
||||
#define gttoc_(label) \
|
||||
|
@ -228,26 +246,26 @@ namespace gtsam {
|
|||
|
||||
// indicate iteration is finished
|
||||
inline void tictoc_finishedIteration_() {
|
||||
::gtsam::internal::timingRoot->finishedIteration(); }
|
||||
::gtsam::internal::gTimingRoot->finishedIteration(); }
|
||||
|
||||
// print
|
||||
inline void tictoc_print_() {
|
||||
::gtsam::internal::timingRoot->print(); }
|
||||
::gtsam::internal::gTimingRoot->print(); }
|
||||
|
||||
// print mean and standard deviation
|
||||
inline void tictoc_print2_() {
|
||||
::gtsam::internal::timingRoot->print2(); }
|
||||
::gtsam::internal::gTimingRoot->print2(); }
|
||||
|
||||
// get a node by label and assign it to variable
|
||||
#define tictoc_getNode(variable, label) \
|
||||
static const size_t label##_id_getnode = ::gtsam::internal::getTicTocID(#label); \
|
||||
const boost::shared_ptr<const ::gtsam::internal::TimingOutline> variable = \
|
||||
::gtsam::internal::timingCurrent.lock()->child(label##_id_getnode, #label, ::gtsam::internal::timingCurrent);
|
||||
::gtsam::internal::gCurrentTimer.lock()->child(label##_id_getnode, #label, ::gtsam::internal::gCurrentTimer);
|
||||
|
||||
// reset
|
||||
inline void tictoc_reset_() {
|
||||
::gtsam::internal::timingRoot.reset(new ::gtsam::internal::TimingOutline("Total", ::gtsam::internal::getTicTocID("Total")));
|
||||
::gtsam::internal::timingCurrent = ::gtsam::internal::timingRoot; }
|
||||
::gtsam::internal::gTimingRoot.reset(new ::gtsam::internal::TimingOutline("Total", ::gtsam::internal::getTicTocID("Total")));
|
||||
::gtsam::internal::gCurrentTimer = ::gtsam::internal::gTimingRoot; }
|
||||
|
||||
#ifdef ENABLE_TIMING
|
||||
#define gttic(label) gttic_(label)
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file treeTraversal-inst.h
|
||||
* @author Richard Roberts
|
||||
* @date April 9, 2013
|
||||
*/
|
||||
* @file treeTraversal-inst.h
|
||||
* @author Richard Roberts
|
||||
* @date April 9, 2013
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/treeTraversal/parallelTraversalTasks.h>
|
||||
|
@ -22,6 +22,7 @@
|
|||
#include <gtsam/base/FastList.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/config.h> // for GTSAM_USE_TBB
|
||||
|
||||
#include <stack>
|
||||
#include <vector>
|
||||
|
@ -33,192 +34,197 @@
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
/** Internal functions used for traversing trees */
|
||||
namespace treeTraversal {
|
||||
/** Internal functions used for traversing trees */
|
||||
namespace treeTraversal {
|
||||
|
||||
/* ************************************************************************* */
|
||||
namespace {
|
||||
// Internal node used in DFS preorder stack
|
||||
template<typename NODE, typename DATA>
|
||||
struct TraversalNode {
|
||||
bool expanded;
|
||||
const boost::shared_ptr<NODE>& treeNode;
|
||||
DATA& parentData;
|
||||
typename FastList<DATA>::iterator dataPointer;
|
||||
TraversalNode(const boost::shared_ptr<NODE>& _treeNode, DATA& _parentData) :
|
||||
expanded(false), treeNode(_treeNode), parentData(_parentData) {}
|
||||
};
|
||||
|
||||
// Do nothing - default argument for post-visitor for tree traversal
|
||||
struct no_op {
|
||||
template<typename NODE, typename DATA>
|
||||
void operator()(const boost::shared_ptr<NODE>& node, const DATA& data) {}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
/** Traverse a forest depth-first with pre-order and post-order visits.
|
||||
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
|
||||
* and return a collection of (shared) pointers to \c FOREST::Node.
|
||||
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
|
||||
* visiting its children, and will be passed, by reference, the \c DATA object returned
|
||||
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
|
||||
* to pass to the children. The returned \c DATA object will be copy-constructed only
|
||||
* upon returning to store internally, thus may be modified by visiting the children.
|
||||
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
|
||||
* @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting
|
||||
* its children, and will be passed, by reference, the \c DATA object returned by the
|
||||
* call to \c visitorPre (the \c DATA object may be modified by visiting the children).
|
||||
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
|
||||
* root node. */
|
||||
template<class FOREST, typename DATA, typename VISITOR_PRE, typename VISITOR_POST>
|
||||
void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost)
|
||||
{
|
||||
// Typedefs
|
||||
typedef typename FOREST::Node Node;
|
||||
typedef boost::shared_ptr<Node> sharedNode;
|
||||
|
||||
// Depth first traversal stack
|
||||
typedef TraversalNode<typename FOREST::Node, DATA> TraversalNode;
|
||||
typedef FastList<TraversalNode> Stack;
|
||||
Stack stack;
|
||||
FastList<DATA> dataList; // List to store node data as it is returned from the pre-order visitor
|
||||
|
||||
// Add roots to stack (insert such that they are visited and processed in order
|
||||
{
|
||||
typename Stack::iterator insertLocation = stack.begin();
|
||||
BOOST_FOREACH(const sharedNode& root, forest.roots())
|
||||
stack.insert(insertLocation, TraversalNode(root, rootData));
|
||||
}
|
||||
|
||||
// Traverse
|
||||
while(!stack.empty())
|
||||
{
|
||||
// Get next node
|
||||
TraversalNode& node = stack.front();
|
||||
|
||||
if(node.expanded) {
|
||||
// If already expanded, then the data stored in the node is no longer needed, so visit
|
||||
// then delete it.
|
||||
(void) visitorPost(node.treeNode, *node.dataPointer);
|
||||
dataList.erase(node.dataPointer);
|
||||
stack.pop_front();
|
||||
} else {
|
||||
// If not already visited, visit the node and add its children (use reverse iterators so
|
||||
// children are processed in the order they appear)
|
||||
node.dataPointer = dataList.insert(dataList.end(), visitorPre(node.treeNode, node.parentData));
|
||||
typename Stack::iterator insertLocation = stack.begin();
|
||||
BOOST_FOREACH(const sharedNode& child, node.treeNode->children)
|
||||
stack.insert(insertLocation, TraversalNode(child, *node.dataPointer));
|
||||
node.expanded = true;
|
||||
}
|
||||
}
|
||||
assert(dataList.empty());
|
||||
}
|
||||
|
||||
/** Traverse a forest depth-first, with a pre-order visit but no post-order visit.
|
||||
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
|
||||
* and return a collection of (shared) pointers to \c FOREST::Node.
|
||||
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
|
||||
* visiting its children, and will be passed, by reference, the \c DATA object returned
|
||||
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
|
||||
* to pass to the children. The returned \c DATA object will be copy-constructed only
|
||||
* upon returning to store internally, thus may be modified by visiting the children.
|
||||
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
|
||||
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
|
||||
* root node. */
|
||||
template<class FOREST, typename DATA, typename VISITOR_PRE>
|
||||
void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre)
|
||||
{
|
||||
no_op visitorPost;
|
||||
DepthFirstForest(forest, rootData, visitorPre, visitorPost);
|
||||
}
|
||||
|
||||
/** Traverse a forest depth-first with pre-order and post-order visits.
|
||||
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
|
||||
* and return a collection of (shared) pointers to \c FOREST::Node.
|
||||
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
|
||||
* visiting its children, and will be passed, by reference, the \c DATA object returned
|
||||
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
|
||||
* to pass to the children. The returned \c DATA object will be copy-constructed only
|
||||
* upon returning to store internally, thus may be modified by visiting the children.
|
||||
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
|
||||
* @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting
|
||||
* its children, and will be passed, by reference, the \c DATA object returned by the
|
||||
* call to \c visitorPre (the \c DATA object may be modified by visiting the children).
|
||||
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
|
||||
* root node. */
|
||||
template<class FOREST, typename DATA, typename VISITOR_PRE, typename VISITOR_POST>
|
||||
void DepthFirstForestParallel(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost,
|
||||
int problemSizeThreshold = 10)
|
||||
{
|
||||
#ifdef GTSAM_USE_TBB
|
||||
// Typedefs
|
||||
typedef typename FOREST::Node Node;
|
||||
typedef boost::shared_ptr<Node> sharedNode;
|
||||
|
||||
tbb::task::spawn_root_and_wait(internal::CreateRootTask<Node>(
|
||||
forest.roots(), rootData, visitorPre, visitorPost, problemSizeThreshold));
|
||||
#else
|
||||
DepthFirstForest(forest, rootData, visitorPre, visitorPost);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
/** Traversal function for CloneForest */
|
||||
namespace {
|
||||
template<typename NODE>
|
||||
boost::shared_ptr<NODE>
|
||||
CloneForestVisitorPre(const boost::shared_ptr<NODE>& node, const boost::shared_ptr<NODE>& parentPointer)
|
||||
{
|
||||
// Clone the current node and add it to its cloned parent
|
||||
boost::shared_ptr<NODE> clone = boost::make_shared<NODE>(*node);
|
||||
clone->children.clear();
|
||||
parentPointer->children.push_back(clone);
|
||||
return clone;
|
||||
}
|
||||
}
|
||||
|
||||
/** Clone a tree, copy-constructing new nodes (calling boost::make_shared) and setting up child
|
||||
* pointers for a clone of the original tree.
|
||||
* @param forest The forest of trees to clone. The method \c forest.roots() should exist and
|
||||
* return a collection of shared pointers to \c FOREST::Node.
|
||||
* @return The new collection of roots. */
|
||||
template<class FOREST>
|
||||
FastVector<boost::shared_ptr<typename FOREST::Node> > CloneForest(const FOREST& forest)
|
||||
{
|
||||
typedef typename FOREST::Node Node;
|
||||
boost::shared_ptr<Node> rootContainer = boost::make_shared<Node>();
|
||||
DepthFirstForest(forest, rootContainer, CloneForestVisitorPre<Node>);
|
||||
return FastVector<boost::shared_ptr<Node> >(rootContainer->children.begin(), rootContainer->children.end());
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
/** Traversal function for PrintForest */
|
||||
namespace {
|
||||
struct PrintForestVisitorPre
|
||||
{
|
||||
const KeyFormatter& formatter;
|
||||
PrintForestVisitorPre(const KeyFormatter& formatter) : formatter(formatter) {}
|
||||
template<typename NODE> std::string operator()(const boost::shared_ptr<NODE>& node, const std::string& parentString)
|
||||
{
|
||||
// Print the current node
|
||||
node->print(parentString + "-", formatter);
|
||||
// Increment the indentation
|
||||
return parentString + "| ";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Print a tree, prefixing each line with \c str, and formatting keys using \c keyFormatter.
|
||||
* To print each node, this function calls the \c print function of the tree nodes. */
|
||||
template<class FOREST>
|
||||
void PrintForest(const FOREST& forest, std::string str, const KeyFormatter& keyFormatter) {
|
||||
PrintForestVisitorPre visitor(keyFormatter);
|
||||
DepthFirstForest(forest, str, visitor);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
namespace {
|
||||
// Internal node used in DFS preorder stack
|
||||
template<typename NODE, typename DATA>
|
||||
struct TraversalNode {
|
||||
bool expanded;
|
||||
const boost::shared_ptr<NODE>& treeNode;
|
||||
DATA& parentData;
|
||||
typename FastList<DATA>::iterator dataPointer;
|
||||
TraversalNode(const boost::shared_ptr<NODE>& _treeNode, DATA& _parentData) :
|
||||
expanded(false), treeNode(_treeNode), parentData(_parentData) {
|
||||
}
|
||||
};
|
||||
|
||||
// Do nothing - default argument for post-visitor for tree traversal
|
||||
struct no_op {
|
||||
template<typename NODE, typename DATA>
|
||||
void operator()(const boost::shared_ptr<NODE>& node, const DATA& data) {
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
/** Traverse a forest depth-first with pre-order and post-order visits.
|
||||
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
|
||||
* and return a collection of (shared) pointers to \c FOREST::Node.
|
||||
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
|
||||
* visiting its children, and will be passed, by reference, the \c DATA object returned
|
||||
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
|
||||
* to pass to the children. The returned \c DATA object will be copy-constructed only
|
||||
* upon returning to store internally, thus may be modified by visiting the children.
|
||||
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
|
||||
* @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting
|
||||
* its children, and will be passed, by reference, the \c DATA object returned by the
|
||||
* call to \c visitorPre (the \c DATA object may be modified by visiting the children).
|
||||
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
|
||||
* root node. */
|
||||
template<class FOREST, typename DATA, typename VISITOR_PRE,
|
||||
typename VISITOR_POST>
|
||||
void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre,
|
||||
VISITOR_POST& visitorPost) {
|
||||
// Typedefs
|
||||
typedef typename FOREST::Node Node;
|
||||
typedef boost::shared_ptr<Node> sharedNode;
|
||||
|
||||
// Depth first traversal stack
|
||||
typedef TraversalNode<typename FOREST::Node, DATA> TraversalNode;
|
||||
typedef FastList<TraversalNode> Stack;
|
||||
Stack stack;
|
||||
FastList<DATA> dataList; // List to store node data as it is returned from the pre-order visitor
|
||||
|
||||
// Add roots to stack (insert such that they are visited and processed in order
|
||||
{
|
||||
typename Stack::iterator insertLocation = stack.begin();
|
||||
BOOST_FOREACH(const sharedNode& root, forest.roots())
|
||||
stack.insert(insertLocation, TraversalNode(root, rootData));
|
||||
}
|
||||
|
||||
// Traverse
|
||||
while (!stack.empty()) {
|
||||
// Get next node
|
||||
TraversalNode& node = stack.front();
|
||||
|
||||
if (node.expanded) {
|
||||
// If already expanded, then the data stored in the node is no longer needed, so visit
|
||||
// then delete it.
|
||||
(void) visitorPost(node.treeNode, *node.dataPointer);
|
||||
dataList.erase(node.dataPointer);
|
||||
stack.pop_front();
|
||||
} else {
|
||||
// If not already visited, visit the node and add its children (use reverse iterators so
|
||||
// children are processed in the order they appear)
|
||||
node.dataPointer = dataList.insert(dataList.end(),
|
||||
visitorPre(node.treeNode, node.parentData));
|
||||
typename Stack::iterator insertLocation = stack.begin();
|
||||
BOOST_FOREACH(const sharedNode& child, node.treeNode->children)
|
||||
stack.insert(insertLocation, TraversalNode(child, *node.dataPointer));
|
||||
node.expanded = true;
|
||||
}
|
||||
}
|
||||
assert(dataList.empty());
|
||||
}
|
||||
|
||||
/** Traverse a forest depth-first, with a pre-order visit but no post-order visit.
|
||||
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
|
||||
* and return a collection of (shared) pointers to \c FOREST::Node.
|
||||
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
|
||||
* visiting its children, and will be passed, by reference, the \c DATA object returned
|
||||
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
|
||||
* to pass to the children. The returned \c DATA object will be copy-constructed only
|
||||
* upon returning to store internally, thus may be modified by visiting the children.
|
||||
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
|
||||
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
|
||||
* root node. */
|
||||
template<class FOREST, typename DATA, typename VISITOR_PRE>
|
||||
void DepthFirstForest(FOREST& forest, DATA& rootData, VISITOR_PRE& visitorPre) {
|
||||
no_op visitorPost;
|
||||
DepthFirstForest(forest, rootData, visitorPre, visitorPost);
|
||||
}
|
||||
|
||||
/** Traverse a forest depth-first with pre-order and post-order visits.
|
||||
* @param forest The forest of trees to traverse. The method \c forest.roots() should exist
|
||||
* and return a collection of (shared) pointers to \c FOREST::Node.
|
||||
* @param visitorPre \c visitorPre(node, parentData) will be called at every node, before
|
||||
* visiting its children, and will be passed, by reference, the \c DATA object returned
|
||||
* by the visit to its parent. Likewise, \c visitorPre should return the \c DATA object
|
||||
* to pass to the children. The returned \c DATA object will be copy-constructed only
|
||||
* upon returning to store internally, thus may be modified by visiting the children.
|
||||
* Regarding efficiency, this copy-on-return is usually optimized out by the compiler.
|
||||
* @param visitorPost \c visitorPost(node, data) will be called at every node, after visiting
|
||||
* its children, and will be passed, by reference, the \c DATA object returned by the
|
||||
* call to \c visitorPre (the \c DATA object may be modified by visiting the children).
|
||||
* @param rootData The data to pass by reference to \c visitorPre when it is called on each
|
||||
* root node. */
|
||||
template<class FOREST, typename DATA, typename VISITOR_PRE,
|
||||
typename VISITOR_POST>
|
||||
void DepthFirstForestParallel(FOREST& forest, DATA& rootData,
|
||||
VISITOR_PRE& visitorPre, VISITOR_POST& visitorPost,
|
||||
int problemSizeThreshold = 10) {
|
||||
#ifdef GTSAM_USE_TBB
|
||||
// Typedefs
|
||||
typedef typename FOREST::Node Node;
|
||||
typedef boost::shared_ptr<Node> sharedNode;
|
||||
|
||||
tbb::task::spawn_root_and_wait(
|
||||
internal::CreateRootTask<Node>(forest.roots(), rootData, visitorPre,
|
||||
visitorPost, problemSizeThreshold));
|
||||
#else
|
||||
DepthFirstForest(forest, rootData, visitorPre, visitorPost);
|
||||
#endif
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/** Traversal function for CloneForest */
|
||||
namespace {
|
||||
template<typename NODE>
|
||||
boost::shared_ptr<NODE> CloneForestVisitorPre(
|
||||
const boost::shared_ptr<NODE>& node,
|
||||
const boost::shared_ptr<NODE>& parentPointer) {
|
||||
// Clone the current node and add it to its cloned parent
|
||||
boost::shared_ptr<NODE> clone = boost::make_shared<NODE>(*node);
|
||||
clone->children.clear();
|
||||
parentPointer->children.push_back(clone);
|
||||
return clone;
|
||||
}
|
||||
}
|
||||
|
||||
/** Clone a tree, copy-constructing new nodes (calling boost::make_shared) and setting up child
|
||||
* pointers for a clone of the original tree.
|
||||
* @param forest The forest of trees to clone. The method \c forest.roots() should exist and
|
||||
* return a collection of shared pointers to \c FOREST::Node.
|
||||
* @return The new collection of roots. */
|
||||
template<class FOREST>
|
||||
FastVector<boost::shared_ptr<typename FOREST::Node> > CloneForest(
|
||||
const FOREST& forest) {
|
||||
typedef typename FOREST::Node Node;
|
||||
boost::shared_ptr<Node> rootContainer = boost::make_shared<Node>();
|
||||
DepthFirstForest(forest, rootContainer, CloneForestVisitorPre<Node>);
|
||||
return FastVector<boost::shared_ptr<Node> >(rootContainer->children.begin(),
|
||||
rootContainer->children.end());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/** Traversal function for PrintForest */
|
||||
namespace {
|
||||
struct PrintForestVisitorPre {
|
||||
const KeyFormatter& formatter;
|
||||
PrintForestVisitorPre(const KeyFormatter& formatter) :
|
||||
formatter(formatter) {
|
||||
}
|
||||
template<typename NODE> std::string operator()(
|
||||
const boost::shared_ptr<NODE>& node, const std::string& parentString) {
|
||||
// Print the current node
|
||||
node->print(parentString + "-", formatter);
|
||||
// Increment the indentation
|
||||
return parentString + "| ";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Print a tree, prefixing each line with \c str, and formatting keys using \c keyFormatter.
|
||||
* To print each node, this function calls the \c print function of the tree nodes. */
|
||||
template<class FOREST>
|
||||
void PrintForest(const FOREST& forest, std::string str,
|
||||
const KeyFormatter& keyFormatter) {
|
||||
PrintForestVisitorPre visitor(keyFormatter);
|
||||
DepthFirstForest(forest, str, visitor);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
|
||||
#ifdef GTSAM_USE_TBB
|
||||
# include <tbb/tbb.h>
|
||||
# include <tbb/scalable_allocator.h>
|
||||
# undef max // TBB seems to include windows.h and we don't want these macros
|
||||
# undef min
|
||||
# undef ERROR
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file types.h
|
||||
* @brief Typedefs for easier changing of types
|
||||
* @author Richard Roberts
|
||||
* @date Aug 21, 2010
|
||||
* @addtogroup base
|
||||
*/
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <boost/lexical_cast.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::string _defaultKeyFormatter(Key key) {
|
||||
const Symbol asSymbol(key);
|
||||
if(asSymbol.chr() > 0)
|
||||
return (std::string)asSymbol;
|
||||
else
|
||||
return boost::lexical_cast<std::string>(key);
|
||||
}
|
||||
|
||||
}
|
|
@ -20,19 +20,16 @@
|
|||
#pragma once
|
||||
|
||||
#include <gtsam/dllexport.h>
|
||||
#include <gtsam/config.h>
|
||||
#include <boost/function/function1.hpp>
|
||||
#include <boost/concept/assert.hpp>
|
||||
#include <boost/range/concepts.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
#include <gtsam/config.h> // for GTSAM_USE_TBB
|
||||
|
||||
#include <cstddef>
|
||||
#include <string>
|
||||
#include <ostream>
|
||||
|
||||
#ifdef GTSAM_USE_TBB
|
||||
#include <tbb/task_scheduler_init.h>
|
||||
#include <tbb/tbb_exception.h>
|
||||
#include <tbb/scalable_allocator.h>
|
||||
#include <iostream>
|
||||
#endif
|
||||
|
||||
#ifdef GTSAM_USE_EIGEN_MKL_OPENMP
|
||||
|
@ -58,22 +55,9 @@ namespace gtsam {
|
|||
/// Integer nonlinear key type
|
||||
typedef size_t Key;
|
||||
|
||||
/// Typedef for a function to format a key, i.e. to convert it to a string
|
||||
typedef boost::function<std::string(Key)> KeyFormatter;
|
||||
|
||||
// Helper function for DefaultKeyFormatter
|
||||
GTSAM_EXPORT std::string _defaultKeyFormatter(Key key);
|
||||
|
||||
/// The default KeyFormatter, which is used if no KeyFormatter is passed to
|
||||
/// a nonlinear 'print' function. Automatically detects plain integer keys
|
||||
/// and Symbol keys.
|
||||
static const KeyFormatter DefaultKeyFormatter = &_defaultKeyFormatter;
|
||||
|
||||
|
||||
/// The index type for Eigen objects
|
||||
typedef ptrdiff_t DenseIndex;
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Helper class that uses templates to select between two types based on
|
||||
|
@ -148,104 +132,6 @@ namespace gtsam {
|
|||
return ListOfOneContainer<T>(element);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/// Base exception type that uses tbb_exception if GTSAM is compiled with TBB.
|
||||
template<class DERIVED>
|
||||
class ThreadsafeException :
|
||||
#ifdef GTSAM_USE_TBB
|
||||
public tbb::tbb_exception
|
||||
#else
|
||||
public std::exception
|
||||
#endif
|
||||
{
|
||||
#ifdef GTSAM_USE_TBB
|
||||
private:
|
||||
typedef tbb::tbb_exception Base;
|
||||
protected:
|
||||
typedef std::basic_string<char, std::char_traits<char>, tbb::tbb_allocator<char> > String;
|
||||
#else
|
||||
private:
|
||||
typedef std::exception Base;
|
||||
protected:
|
||||
typedef std::string String;
|
||||
#endif
|
||||
|
||||
protected:
|
||||
bool dynamic_; ///< Whether this object was moved
|
||||
mutable boost::optional<String> description_; ///< Optional description
|
||||
|
||||
/// Default constructor is protected - may only be created from derived classes
|
||||
ThreadsafeException() : dynamic_(false) {}
|
||||
|
||||
/// Copy constructor is protected - may only be created from derived classes
|
||||
ThreadsafeException(const ThreadsafeException& other) : Base(other), dynamic_(false) {}
|
||||
|
||||
/// Construct with description string
|
||||
ThreadsafeException(const std::string& description) : dynamic_(false), description_(String(description.begin(), description.end())) {}
|
||||
|
||||
/// Default destructor doesn't have the throw()
|
||||
virtual ~ThreadsafeException() throw () {}
|
||||
|
||||
public:
|
||||
// Implement functions for tbb_exception
|
||||
#ifdef GTSAM_USE_TBB
|
||||
virtual tbb::tbb_exception* move() throw () {
|
||||
void* cloneMemory = scalable_malloc(sizeof(DERIVED));
|
||||
if (!cloneMemory) {
|
||||
std::cerr << "Failed allocating memory to copy thrown exception, exiting now." << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
DERIVED* clone = ::new(cloneMemory) DERIVED(static_cast<DERIVED&>(*this));
|
||||
clone->dynamic_ = true;
|
||||
return clone;
|
||||
}
|
||||
|
||||
virtual void destroy() throw() {
|
||||
if (dynamic_) {
|
||||
DERIVED* derivedPtr = static_cast<DERIVED*>(this);
|
||||
derivedPtr->~DERIVED();
|
||||
scalable_free(derivedPtr);
|
||||
}
|
||||
}
|
||||
|
||||
virtual void throw_self() {
|
||||
throw *static_cast<DERIVED*>(this); }
|
||||
|
||||
virtual const char* name() const throw() {
|
||||
return typeid(DERIVED).name(); }
|
||||
#endif
|
||||
|
||||
virtual const char* what() const throw() {
|
||||
return description_ ? description_->c_str() : ""; }
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
/// Threadsafe runtime error exception
|
||||
class RuntimeErrorThreadsafe : public ThreadsafeException<RuntimeErrorThreadsafe>
|
||||
{
|
||||
public:
|
||||
/// Construct with a string describing the exception
|
||||
RuntimeErrorThreadsafe(const std::string& description) : ThreadsafeException<RuntimeErrorThreadsafe>(description) {}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
/// Threadsafe runtime error exception
|
||||
class OutOfRangeThreadsafe : public ThreadsafeException<OutOfRangeThreadsafe>
|
||||
{
|
||||
public:
|
||||
/// Construct with a string describing the exception
|
||||
OutOfRangeThreadsafe(const std::string& description) : ThreadsafeException<OutOfRangeThreadsafe>(description) {}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
/// Threadsafe invalid argument exception
|
||||
class InvalidArgumentThreadsafe : public ThreadsafeException<InvalidArgumentThreadsafe>
|
||||
{
|
||||
public:
|
||||
/// Construct with a string describing the exception
|
||||
InvalidArgumentThreadsafe(const std::string& description) : ThreadsafeException<InvalidArgumentThreadsafe>(description) {}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
#ifdef __clang__
|
||||
# pragma clang diagnostic push
|
||||
|
|
|
@ -39,8 +39,8 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
FastSet<Key> DiscreteFactorGraph::keys() const {
|
||||
FastSet<Key> keys;
|
||||
KeySet DiscreteFactorGraph::keys() const {
|
||||
KeySet keys;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this)
|
||||
if (factor) keys.insert(factor->begin(), factor->end());
|
||||
return keys;
|
||||
|
|
|
@ -120,7 +120,7 @@ public:
|
|||
}
|
||||
|
||||
/** Return the set of variables involved in the factors (set union) */
|
||||
FastSet<Key> keys() const;
|
||||
KeySet keys() const;
|
||||
|
||||
/** return product of all factors as a single factor */
|
||||
DecisionTreeFactor product() const;
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
#include <gtsam/discrete/AlgebraicDecisionTree.h>
|
||||
#include <gtsam/discrete/DiscreteKey.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
|
||||
#include <boost/shared_ptr.hpp>
|
||||
#include <set>
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/DerivedValue.h>
|
||||
#include <gtsam/geometry/Point2.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/base/DerivedValue.h>
|
||||
#include <gtsam/geometry/Cal3DS2_Base.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
#pragma once
|
||||
|
||||
#include <gtsam/geometry/Cal3DS2_Base.h>
|
||||
#include <gtsam/base/DerivedValue.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
|
@ -83,10 +83,21 @@ Point2 Cal3_S2::uncalibrate(const Point2& p, OptionalJacobian<2, 5> Dcal,
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Point2 Cal3_S2::calibrate(const Point2& p) const {
|
||||
const double u = p.x(), v = p.y();
|
||||
return Point2((1 / fx_) * (u - u0_ - (s_ / fy_) * (v - v0_)),
|
||||
(1 / fy_) * (v - v0_));
|
||||
Point2 Cal3_S2::calibrate(const Point2& p, OptionalJacobian<2,5> Dcal,
|
||||
OptionalJacobian<2,2> Dp) const {
|
||||
const double u = p.x(), v = p.y();
|
||||
double delta_u = u - u0_, delta_v = v - v0_;
|
||||
double inv_fx = 1/ fx_, inv_fy = 1/fy_;
|
||||
double inv_fy_delta_v = inv_fy * delta_v, inv_fx_s_inv_fy = inv_fx * s_ * inv_fy;
|
||||
Point2 point(inv_fx * (delta_u - s_ * inv_fy_delta_v),
|
||||
inv_fy_delta_v);
|
||||
if(Dcal)
|
||||
*Dcal << - inv_fx * point.x(), inv_fx * s_ * inv_fy * inv_fy_delta_v, -inv_fx * point.y(),
|
||||
-inv_fx, inv_fx_s_inv_fy,
|
||||
0, -inv_fy * point.y(), 0, 0, -inv_fy;
|
||||
if(Dp)
|
||||
*Dp << inv_fx, -inv_fx_s_inv_fy, 0, inv_fy;
|
||||
return point;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -156,9 +156,12 @@ public:
|
|||
/**
|
||||
* convert image coordinates uv to intrinsic coordinates xy
|
||||
* @param p point in image coordinates
|
||||
* @param Dcal optional 2*5 Jacobian wrpt Cal3_S2 parameters
|
||||
* @param Dp optional 2*2 Jacobian wrpt intrinsic coordinates
|
||||
* @return point in intrinsic coordinates
|
||||
*/
|
||||
Point2 calibrate(const Point2& p) const;
|
||||
Point2 calibrate(const Point2& p, OptionalJacobian<2,5> Dcal = boost::none,
|
||||
OptionalJacobian<2,2> Dp = boost::none) const;
|
||||
|
||||
/**
|
||||
* convert homogeneous image coordinates to intrinsic coordinates
|
||||
|
|
|
@ -85,8 +85,7 @@ const Pose3& PinholeBase::getPose(OptionalJacobian<6, 6> H) const {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Point2 PinholeBase::project_to_camera(const Point3& pc,
|
||||
OptionalJacobian<2, 3> Dpoint) {
|
||||
Point2 PinholeBase::Project(const Point3& pc, OptionalJacobian<2, 3> Dpoint) {
|
||||
double d = 1.0 / pc.z();
|
||||
const double u = pc.x() * d, v = pc.y() * d;
|
||||
if (Dpoint)
|
||||
|
@ -94,10 +93,22 @@ Point2 PinholeBase::project_to_camera(const Point3& pc,
|
|||
return Point2(u, v);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Point2 PinholeBase::Project(const Unit3& pc, OptionalJacobian<2, 2> Dpoint) {
|
||||
if (Dpoint) {
|
||||
Matrix32 Dpoint3_pc;
|
||||
Matrix23 Duv_point3;
|
||||
Point2 uv = Project(pc.point3(Dpoint3_pc), Duv_point3);
|
||||
*Dpoint = Duv_point3 * Dpoint3_pc;
|
||||
return uv;
|
||||
} else
|
||||
return Project(pc.point3());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
pair<Point2, bool> PinholeBase::projectSafe(const Point3& pw) const {
|
||||
const Point3 pc = pose().transform_to(pw);
|
||||
const Point2 pn = project_to_camera(pc);
|
||||
const Point2 pn = Project(pc);
|
||||
return make_pair(pn, pc.z() > 0);
|
||||
}
|
||||
|
||||
|
@ -109,9 +120,9 @@ Point2 PinholeBase::project2(const Point3& point, OptionalJacobian<2, 6> Dpose,
|
|||
const Point3 q = pose().transform_to(point, boost::none, Dpoint ? &Rt : 0);
|
||||
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
if (q.z() <= 0)
|
||||
throw CheiralityException();
|
||||
throw CheiralityException();
|
||||
#endif
|
||||
const Point2 pn = project_to_camera(q);
|
||||
const Point2 pn = Project(q);
|
||||
|
||||
if (Dpose || Dpoint) {
|
||||
const double d = 1.0 / q.z();
|
||||
|
@ -123,6 +134,32 @@ Point2 PinholeBase::project2(const Point3& point, OptionalJacobian<2, 6> Dpose,
|
|||
return pn;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Point2 PinholeBase::project2(const Unit3& pw, OptionalJacobian<2, 6> Dpose,
|
||||
OptionalJacobian<2, 2> Dpoint) const {
|
||||
|
||||
// world to camera coordinate
|
||||
Matrix23 Dpc_rot;
|
||||
Matrix2 Dpc_point;
|
||||
const Unit3 pc = pose().rotation().unrotate(pw, Dpose ? &Dpc_rot : 0,
|
||||
Dpose ? &Dpc_point : 0);
|
||||
|
||||
// camera to normalized image coordinate
|
||||
Matrix2 Dpn_pc;
|
||||
const Point2 pn = Project(pc, Dpose || Dpoint ? &Dpn_pc : 0);
|
||||
|
||||
// chain the Jacobian matrices
|
||||
if (Dpose) {
|
||||
// only rotation is important
|
||||
Matrix26 Dpc_pose;
|
||||
Dpc_pose.setZero();
|
||||
Dpc_pose.leftCols<3>() = Dpc_rot;
|
||||
*Dpose = Dpn_pc * Dpc_pose; // 2x2 * 2x6
|
||||
}
|
||||
if (Dpoint)
|
||||
*Dpoint = Dpn_pc * Dpc_point; // 2x2 * 2*2
|
||||
return pn;
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
Point3 PinholeBase::backproject_from_camera(const Point2& p,
|
||||
const double depth) {
|
||||
|
|
|
@ -19,6 +19,11 @@
|
|||
#pragma once
|
||||
|
||||
#include <gtsam/geometry/Pose3.h>
|
||||
#include <gtsam/base/concepts.h>
|
||||
#include <gtsam/base/Manifold.h>
|
||||
#include <gtsam/base/ThreadsafeException.h>
|
||||
#include <gtsam/dllexport.h>
|
||||
#include <boost/serialization/nvp.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -39,6 +44,18 @@ public:
|
|||
*/
|
||||
class GTSAM_EXPORT PinholeBase {
|
||||
|
||||
public:
|
||||
|
||||
/** Pose Concept requirements */
|
||||
typedef Rot3 Rotation;
|
||||
typedef Point3 Translation;
|
||||
|
||||
/**
|
||||
* Some classes template on either PinholeCamera or StereoCamera,
|
||||
* and this typedef informs those classes what "project" returns.
|
||||
*/
|
||||
typedef Point2 Measurement;
|
||||
|
||||
private:
|
||||
|
||||
Pose3 pose_; ///< 3D pose of camera
|
||||
|
@ -130,6 +147,16 @@ public:
|
|||
return pose_;
|
||||
}
|
||||
|
||||
/// get rotation
|
||||
const Rot3& rotation() const {
|
||||
return pose_.rotation();
|
||||
}
|
||||
|
||||
/// get translation
|
||||
const Point3& translation() const {
|
||||
return pose_.translation();
|
||||
}
|
||||
|
||||
/// return pose, with derivative
|
||||
const Pose3& getPose(OptionalJacobian<6, 6> H) const;
|
||||
|
||||
|
@ -142,14 +169,21 @@ public:
|
|||
* Does *not* throw a CheiralityException, even if pc behind image plane
|
||||
* @param pc point in camera coordinates
|
||||
*/
|
||||
static Point2 project_to_camera(const Point3& pc, //
|
||||
static Point2 Project(const Point3& pc, //
|
||||
OptionalJacobian<2, 3> Dpoint = boost::none);
|
||||
|
||||
/**
|
||||
* Project from 3D point at infinity in camera coordinates into image
|
||||
* Does *not* throw a CheiralityException, even if pc behind image plane
|
||||
* @param pc point in camera coordinates
|
||||
*/
|
||||
static Point2 Project(const Unit3& pc, //
|
||||
OptionalJacobian<2, 2> Dpoint = boost::none);
|
||||
|
||||
/// Project a point into the image and check depth
|
||||
std::pair<Point2, bool> projectSafe(const Point3& pw) const;
|
||||
|
||||
/**
|
||||
* Project point into the image
|
||||
/** Project point into the image
|
||||
* Throws a CheiralityException if point behind image plane iff GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
* @param point 3D point in world coordinates
|
||||
* @return the intrinsic coordinates of the projected point
|
||||
|
@ -157,9 +191,33 @@ public:
|
|||
Point2 project2(const Point3& point, OptionalJacobian<2, 6> Dpose =
|
||||
boost::none, OptionalJacobian<2, 3> Dpoint = boost::none) const;
|
||||
|
||||
/** Project point at infinity into the image
|
||||
* Throws a CheiralityException if point behind image plane iff GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
* @param point 3D point in world coordinates
|
||||
* @return the intrinsic coordinates of the projected point
|
||||
*/
|
||||
Point2 project2(const Unit3& point,
|
||||
OptionalJacobian<2, 6> Dpose = boost::none,
|
||||
OptionalJacobian<2, 2> Dpoint = boost::none) const;
|
||||
|
||||
/// backproject a 2-dimensional point to a 3-dimensional point at given depth
|
||||
static Point3 backproject_from_camera(const Point2& p, const double depth);
|
||||
|
||||
/// @}
|
||||
/// @name Advanced interface
|
||||
/// @{
|
||||
|
||||
/**
|
||||
* Return the start and end indices (inclusive) of the translation component of the
|
||||
* exponential map parameterization
|
||||
* @return a pair of [start, end] indices into the tangent space vector
|
||||
*/
|
||||
inline static std::pair<size_t, size_t> translationInterval() {
|
||||
return std::make_pair(3, 5);
|
||||
}
|
||||
|
||||
/// @}
|
||||
|
||||
private:
|
||||
|
||||
/** Serialization function */
|
||||
|
|
|
@ -21,13 +21,14 @@
|
|||
#include <gtsam/geometry/Point3.h>
|
||||
#include <gtsam/geometry/CalibratedCamera.h> // for Cheirality exception
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/SymmetricBlockMatrix.h>
|
||||
#include <gtsam/base/FastMap.h>
|
||||
#include <vector>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* @brief A set of cameras, all with their own calibration
|
||||
* Assumes that a camera is laid out as 6 Pose3 parameters then calibration
|
||||
*/
|
||||
template<class CAMERA>
|
||||
class CameraSet: public std::vector<CAMERA> {
|
||||
|
@ -40,28 +41,46 @@ protected:
|
|||
*/
|
||||
typedef typename CAMERA::Measurement Z;
|
||||
|
||||
static const int D = traits<CAMERA>::dimension; ///< Camera dimension
|
||||
static const int ZDim = traits<Z>::dimension; ///< Measurement dimension
|
||||
static const int Dim = traits<CAMERA>::dimension; ///< Camera dimension
|
||||
|
||||
/// Make a vector of re-projection errors
|
||||
static Vector ErrorVector(const std::vector<Z>& predicted,
|
||||
const std::vector<Z>& measured) {
|
||||
|
||||
// Check size
|
||||
size_t m = predicted.size();
|
||||
if (measured.size() != m)
|
||||
throw std::runtime_error("CameraSet::errors: size mismatch");
|
||||
|
||||
// Project and fill error vector
|
||||
Vector b(ZDim * m);
|
||||
for (size_t i = 0, row = 0; i < m; i++, row += ZDim) {
|
||||
Z e = predicted[i] - measured[i];
|
||||
b.segment<ZDim>(row) = e.vector();
|
||||
}
|
||||
return b;
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
/// Definitions for blocks of F
|
||||
typedef Eigen::Matrix<double, ZDim, Dim> MatrixZD; // F
|
||||
typedef std::pair<Key, MatrixZD> FBlock; // Fblocks
|
||||
typedef Eigen::Matrix<double, ZDim, D> MatrixZD;
|
||||
typedef std::vector<MatrixZD> FBlocks;
|
||||
|
||||
/**
|
||||
* print
|
||||
* @param s optional string naming the factor
|
||||
* @param keyFormatter optional formatter useful for printing Symbols
|
||||
*/
|
||||
void print(const std::string& s = "") const {
|
||||
virtual void print(const std::string& s = "") const {
|
||||
std::cout << s << "CameraSet, cameras = \n";
|
||||
for (size_t k = 0; k < this->size(); ++k)
|
||||
this->at(k).print();
|
||||
this->at(k).print(s);
|
||||
}
|
||||
|
||||
/// equals
|
||||
virtual bool equals(const CameraSet& p, double tol = 1e-9) const {
|
||||
bool equals(const CameraSet& p, double tol = 1e-9) const {
|
||||
if (this->size() != p.size())
|
||||
return false;
|
||||
bool camerasAreEqual = true;
|
||||
|
@ -74,31 +93,227 @@ public:
|
|||
}
|
||||
|
||||
/**
|
||||
* Project a point, with derivatives in this, point, and calibration
|
||||
* Project a point (possibly Unit3 at infinity), with derivatives
|
||||
* Note that F is a sparse block-diagonal matrix, so instead of a large dense
|
||||
* matrix this function returns the diagonal blocks.
|
||||
* throws CheiralityException
|
||||
*/
|
||||
std::vector<Z> project(const Point3& point, boost::optional<Matrix&> F =
|
||||
boost::none, boost::optional<Matrix&> E = boost::none,
|
||||
boost::optional<Matrix&> H = boost::none) const {
|
||||
template<class POINT>
|
||||
std::vector<Z> project2(const POINT& point, //
|
||||
boost::optional<FBlocks&> Fs = boost::none, //
|
||||
boost::optional<Matrix&> E = boost::none) const {
|
||||
|
||||
size_t nrCameras = this->size();
|
||||
if (F) F->resize(ZDim * nrCameras, 6);
|
||||
if (E) E->resize(ZDim * nrCameras, 3);
|
||||
if (H && Dim > 6) H->resize(ZDim * nrCameras, Dim - 6);
|
||||
std::vector<Z> z(nrCameras);
|
||||
static const int N = FixedDimension<POINT>::value;
|
||||
|
||||
for (size_t i = 0; i < nrCameras; i++) {
|
||||
Eigen::Matrix<double, ZDim, 6> Fi;
|
||||
Eigen::Matrix<double, ZDim, 3> Ei;
|
||||
Eigen::Matrix<double, ZDim, Dim - 6> Hi;
|
||||
z[i] = this->at(i).project(point, F ? &Fi : 0, E ? &Ei : 0, H ? &Hi : 0);
|
||||
if (F) F->block<ZDim, 6>(ZDim * i, 0) = Fi;
|
||||
if (E) E->block<ZDim, 3>(ZDim * i, 0) = Ei;
|
||||
if (H) H->block<ZDim, Dim - 6>(ZDim * i, 0) = Hi;
|
||||
// Allocate result
|
||||
size_t m = this->size();
|
||||
std::vector<Z> z(m);
|
||||
|
||||
// Allocate derivatives
|
||||
if (E)
|
||||
E->resize(ZDim * m, N);
|
||||
if (Fs)
|
||||
Fs->resize(m);
|
||||
|
||||
// Project and fill derivatives
|
||||
for (size_t i = 0; i < m; i++) {
|
||||
MatrixZD Fi;
|
||||
Eigen::Matrix<double, ZDim, N> Ei;
|
||||
z[i] = this->at(i).project2(point, Fs ? &Fi : 0, E ? &Ei : 0);
|
||||
if (Fs)
|
||||
(*Fs)[i] = Fi;
|
||||
if (E)
|
||||
E->block<ZDim, N>(ZDim * i, 0) = Ei;
|
||||
}
|
||||
|
||||
return z;
|
||||
}
|
||||
|
||||
/// Calculate vector [project2(point)-z] of re-projection errors
|
||||
template<class POINT>
|
||||
Vector reprojectionError(const POINT& point, const std::vector<Z>& measured,
|
||||
boost::optional<FBlocks&> Fs = boost::none, //
|
||||
boost::optional<Matrix&> E = boost::none) const {
|
||||
return ErrorVector(project2(point, Fs, E), measured);
|
||||
}
|
||||
|
||||
/**
|
||||
* Do Schur complement, given Jacobian as Fs,E,P, return SymmetricBlockMatrix
|
||||
* G = F' * F - F' * E * P * E' * F
|
||||
* g = F' * (b - E * P * E' * b)
|
||||
* Fixed size version
|
||||
*/
|
||||
template<int N> // N = 2 or 3
|
||||
static SymmetricBlockMatrix SchurComplement(const FBlocks& Fs,
|
||||
const Matrix& E, const Eigen::Matrix<double, N, N>& P, const Vector& b) {
|
||||
|
||||
// a single point is observed in m cameras
|
||||
size_t m = Fs.size();
|
||||
|
||||
// Create a SymmetricBlockMatrix
|
||||
size_t M1 = D * m + 1;
|
||||
std::vector<DenseIndex> dims(m + 1); // this also includes the b term
|
||||
std::fill(dims.begin(), dims.end() - 1, D);
|
||||
dims.back() = 1;
|
||||
SymmetricBlockMatrix augmentedHessian(dims, Matrix::Zero(M1, M1));
|
||||
|
||||
// Blockwise Schur complement
|
||||
for (size_t i = 0; i < m; i++) { // for each camera
|
||||
|
||||
const MatrixZD& Fi = Fs[i];
|
||||
const Eigen::Matrix<double, 2, N> Ei_P = //
|
||||
E.block(ZDim * i, 0, ZDim, N) * P;
|
||||
|
||||
// D = (Dx2) * ZDim
|
||||
augmentedHessian(i, m) = Fi.transpose() * b.segment<ZDim>(ZDim * i) // F' * b
|
||||
- Fi.transpose() * (Ei_P * (E.transpose() * b)); // D = (DxZDim) * (ZDimx3) * (N*ZDimm) * (ZDimm x 1)
|
||||
|
||||
// (DxD) = (DxZDim) * ( (ZDimxD) - (ZDimx3) * (3xZDim) * (ZDimxD) )
|
||||
augmentedHessian(i, i) = Fi.transpose()
|
||||
* (Fi - Ei_P * E.block(ZDim * i, 0, ZDim, N).transpose() * Fi);
|
||||
|
||||
// upper triangular part of the hessian
|
||||
for (size_t j = i + 1; j < m; j++) { // for each camera
|
||||
const MatrixZD& Fj = Fs[j];
|
||||
|
||||
// (DxD) = (Dx2) * ( (2x2) * (2xD) )
|
||||
augmentedHessian(i, j) = -Fi.transpose()
|
||||
* (Ei_P * E.block(ZDim * j, 0, ZDim, N).transpose() * Fj);
|
||||
}
|
||||
} // end of for over cameras
|
||||
|
||||
augmentedHessian(m, m)(0, 0) += b.squaredNorm();
|
||||
return augmentedHessian;
|
||||
}
|
||||
|
||||
/// Computes Point Covariance P, with lambda parameter
|
||||
template<int N> // N = 2 or 3
|
||||
static void ComputePointCovariance(Eigen::Matrix<double, N, N>& P,
|
||||
const Matrix& E, double lambda, bool diagonalDamping = false) {
|
||||
|
||||
Matrix EtE = E.transpose() * E;
|
||||
|
||||
if (diagonalDamping) { // diagonal of the hessian
|
||||
EtE.diagonal() += lambda * EtE.diagonal();
|
||||
} else {
|
||||
DenseIndex n = E.cols();
|
||||
EtE += lambda * Eigen::MatrixXd::Identity(n, n);
|
||||
}
|
||||
|
||||
P = (EtE).inverse();
|
||||
}
|
||||
|
||||
/// Computes Point Covariance P, with lambda parameter, dynamic version
|
||||
static Matrix PointCov(const Matrix& E, const double lambda = 0.0,
|
||||
bool diagonalDamping = false) {
|
||||
if (E.cols() == 2) {
|
||||
Matrix2 P2;
|
||||
ComputePointCovariance(P2, E, lambda, diagonalDamping);
|
||||
return P2;
|
||||
} else {
|
||||
Matrix3 P3;
|
||||
ComputePointCovariance(P3, E, lambda, diagonalDamping);
|
||||
return P3;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Do Schur complement, given Jacobian as Fs,E,P, return SymmetricBlockMatrix
|
||||
* Dynamic version
|
||||
*/
|
||||
static SymmetricBlockMatrix SchurComplement(const FBlocks& Fblocks,
|
||||
const Matrix& E, const Vector& b, const double lambda = 0.0,
|
||||
bool diagonalDamping = false) {
|
||||
SymmetricBlockMatrix augmentedHessian;
|
||||
if (E.cols() == 2) {
|
||||
Matrix2 P;
|
||||
ComputePointCovariance(P, E, lambda, diagonalDamping);
|
||||
augmentedHessian = SchurComplement(Fblocks, E, P, b);
|
||||
} else {
|
||||
Matrix3 P;
|
||||
ComputePointCovariance(P, E, lambda, diagonalDamping);
|
||||
augmentedHessian = SchurComplement(Fblocks, E, P, b);
|
||||
}
|
||||
return augmentedHessian;
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies Schur complement (exploiting block structure) to get a smart factor on cameras,
|
||||
* and adds the contribution of the smart factor to a pre-allocated augmented Hessian.
|
||||
*/
|
||||
template<int N> // N = 2 or 3
|
||||
static void UpdateSchurComplement(const FBlocks& Fs, const Matrix& E,
|
||||
const Eigen::Matrix<double, N, N>& P, const Vector& b,
|
||||
const FastVector<Key>& allKeys, const FastVector<Key>& keys,
|
||||
/*output ->*/SymmetricBlockMatrix& augmentedHessian) {
|
||||
|
||||
assert(keys.size()==Fs.size());
|
||||
assert(keys.size()<=allKeys.size());
|
||||
|
||||
FastMap<Key, size_t> KeySlotMap;
|
||||
for (size_t slot = 0; slot < allKeys.size(); slot++)
|
||||
KeySlotMap.insert(std::make_pair(allKeys[slot], slot));
|
||||
|
||||
// Schur complement trick
|
||||
// G = F' * F - F' * E * P * E' * F
|
||||
// g = F' * (b - E * P * E' * b)
|
||||
|
||||
Eigen::Matrix<double, D, D> matrixBlock;
|
||||
typedef SymmetricBlockMatrix::Block Block; ///< A block from the Hessian matrix
|
||||
|
||||
// a single point is observed in m cameras
|
||||
size_t m = Fs.size(); // cameras observing current point
|
||||
size_t M = (augmentedHessian.rows() - 1) / D; // all cameras in the group
|
||||
assert(allKeys.size()==M);
|
||||
|
||||
// Blockwise Schur complement
|
||||
for (size_t i = 0; i < m; i++) { // for each camera in the current factor
|
||||
|
||||
const MatrixZD& Fi = Fs[i];
|
||||
const Eigen::Matrix<double, 2, N> Ei_P = E.template block<ZDim, N>(
|
||||
ZDim * i, 0) * P;
|
||||
|
||||
// D = (DxZDim) * (ZDim)
|
||||
// allKeys are the list of all camera keys in the group, e.g, (1,3,4,5,7)
|
||||
// we should map those to a slot in the local (grouped) hessian (0,1,2,3,4)
|
||||
// Key cameraKey_i = this->keys_[i];
|
||||
DenseIndex aug_i = KeySlotMap.at(keys[i]);
|
||||
|
||||
// information vector - store previous vector
|
||||
// vectorBlock = augmentedHessian(aug_i, aug_m).knownOffDiagonal();
|
||||
// add contribution of current factor
|
||||
augmentedHessian(aug_i, M) = augmentedHessian(aug_i, M).knownOffDiagonal()
|
||||
+ Fi.transpose() * b.segment<ZDim>(ZDim * i) // F' * b
|
||||
- Fi.transpose() * (Ei_P * (E.transpose() * b)); // D = (DxZDim) * (ZDimx3) * (N*ZDimm) * (ZDimm x 1)
|
||||
|
||||
// (DxD) = (DxZDim) * ( (ZDimxD) - (ZDimx3) * (3xZDim) * (ZDimxD) )
|
||||
// main block diagonal - store previous block
|
||||
matrixBlock = augmentedHessian(aug_i, aug_i);
|
||||
// add contribution of current factor
|
||||
augmentedHessian(aug_i, aug_i) = matrixBlock
|
||||
+ (Fi.transpose()
|
||||
* (Fi - Ei_P * E.template block<ZDim, N>(ZDim * i, 0).transpose() * Fi));
|
||||
|
||||
// upper triangular part of the hessian
|
||||
for (size_t j = i + 1; j < m; j++) { // for each camera
|
||||
const MatrixZD& Fj = Fs[j];
|
||||
|
||||
DenseIndex aug_j = KeySlotMap.at(keys[j]);
|
||||
|
||||
// (DxD) = (DxZDim) * ( (ZDimxZDim) * (ZDimxD) )
|
||||
// off diagonal block - store previous block
|
||||
// matrixBlock = augmentedHessian(aug_i, aug_j).knownOffDiagonal();
|
||||
// add contribution of current factor
|
||||
augmentedHessian(aug_i, aug_j) =
|
||||
augmentedHessian(aug_i, aug_j).knownOffDiagonal()
|
||||
- Fi.transpose()
|
||||
* (Ei_P * E.template block<ZDim, N>(ZDim * j, 0).transpose() * Fj);
|
||||
}
|
||||
} // end of for over cameras
|
||||
|
||||
augmentedHessian(M, M)(0, 0) += b.squaredNorm();
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
/// Serialization function
|
||||
|
@ -109,6 +324,9 @@ private:
|
|||
}
|
||||
};
|
||||
|
||||
template<class CAMERA>
|
||||
const int CameraSet<CAMERA>::D;
|
||||
|
||||
template<class CAMERA>
|
||||
const int CameraSet<CAMERA>::ZDim;
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
* @file OrientedPlane3.cpp
|
||||
* @date Dec 19, 2013
|
||||
* @author Alex Trevor
|
||||
* @author Zhaoyang Lv
|
||||
* @brief A plane, represented by a normal direction and perpendicular distance
|
||||
*/
|
||||
|
||||
|
@ -25,79 +26,54 @@ using namespace std;
|
|||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
/// The print fuction
|
||||
void OrientedPlane3::print(const std::string& s) const {
|
||||
Vector coeffs = planeCoefficients();
|
||||
void OrientedPlane3::print(const string& s) const {
|
||||
Vector4 coeffs = planeCoefficients();
|
||||
cout << s << " : " << coeffs << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
OrientedPlane3 OrientedPlane3::Transform(const gtsam::OrientedPlane3& plane,
|
||||
const gtsam::Pose3& xr, OptionalJacobian<3, 6> Hr,
|
||||
OptionalJacobian<3, 3> Hp) {
|
||||
Matrix n_hr;
|
||||
Matrix n_hp;
|
||||
Unit3 n_rotated = xr.rotation().unrotate(plane.n_, n_hr, n_hp);
|
||||
OrientedPlane3 OrientedPlane3::transform(const Pose3& xr, OptionalJacobian<3, 3> Hp,
|
||||
OptionalJacobian<3, 6> Hr) const {
|
||||
Matrix23 D_rotated_plane;
|
||||
Matrix22 D_rotated_pose;
|
||||
Unit3 n_rotated = xr.rotation().unrotate(n_, D_rotated_plane, D_rotated_pose);
|
||||
|
||||
Vector n_unit = plane.n_.unitVector();
|
||||
Vector unit_vec = n_rotated.unitVector();
|
||||
double pred_d = n_unit.dot(xr.translation().vector()) + plane.d_;
|
||||
OrientedPlane3 transformed_plane(unit_vec(0), unit_vec(1), unit_vec(2),
|
||||
pred_d);
|
||||
Vector3 unit_vec = n_rotated.unitVector();
|
||||
double pred_d = n_.unitVector().dot(xr.translation().vector()) + d_;
|
||||
|
||||
if (Hr) {
|
||||
*Hr = gtsam::zeros(3, 6);
|
||||
(*Hr).block<2, 3>(0, 0) = n_hr;
|
||||
(*Hr).block<1, 3>(2, 3) = unit_vec;
|
||||
*Hr = zeros(3, 6);
|
||||
Hr->block<2, 3>(0, 0) = D_rotated_plane;
|
||||
Hr->block<1, 3>(2, 3) = unit_vec;
|
||||
}
|
||||
if (Hp) {
|
||||
Vector xrp = xr.translation().vector();
|
||||
Matrix n_basis = plane.n_.basis();
|
||||
Vector hpp = n_basis.transpose() * xrp;
|
||||
*Hp = gtsam::zeros(3, 3);
|
||||
(*Hp).block<2, 2>(0, 0) = n_hp;
|
||||
(*Hp).block<1, 2>(2, 0) = hpp;
|
||||
Vector2 hpp = n_.basis().transpose() * xr.translation().vector();
|
||||
*Hp = Z_3x3;
|
||||
Hp->block<2, 2>(0, 0) = D_rotated_pose;
|
||||
Hp->block<1, 2>(2, 0) = hpp;
|
||||
(*Hp)(2, 2) = 1;
|
||||
}
|
||||
|
||||
return (transformed_plane);
|
||||
return OrientedPlane3(unit_vec(0), unit_vec(1), unit_vec(2), pred_d);
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector3 OrientedPlane3::error(const gtsam::OrientedPlane3& plane) const {
|
||||
Vector3 OrientedPlane3::error(const OrientedPlane3& plane) const {
|
||||
Vector2 n_error = -n_.localCoordinates(plane.n_);
|
||||
double d_error = d_ - plane.d_;
|
||||
Vector3 e;
|
||||
e << n_error(0), n_error(1), d_error;
|
||||
return (e);
|
||||
return Vector3(n_error(0), n_error(1), d_ - plane.d_);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
OrientedPlane3 OrientedPlane3::retract(const Vector3& v) const {
|
||||
// Retract the Unit3
|
||||
Vector2 n_v(v(0), v(1));
|
||||
Unit3 n_retracted = n_.retract(n_v);
|
||||
double d_retracted = d_ + v(2);
|
||||
return OrientedPlane3(n_retracted, d_retracted);
|
||||
return OrientedPlane3(n_.retract(Vector2(v(0), v(1))), d_ + v(2));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector3 OrientedPlane3::localCoordinates(const OrientedPlane3& y) const {
|
||||
Vector2 n_local = n_.localCoordinates(y.n_);
|
||||
double d_local = d_ - y.d_;
|
||||
Vector3 e;
|
||||
e << n_local(0), n_local(1), -d_local;
|
||||
return e;
|
||||
return Vector3(n_local(0), n_local(1), -d_local);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Vector3 OrientedPlane3::planeCoefficients() const {
|
||||
Vector unit_vec = n_.unitVector();
|
||||
Vector3 a;
|
||||
a << unit_vec[0], unit_vec[1], unit_vec[2], d_;
|
||||
return a;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
* @date Dec 19, 2013
|
||||
* @author Alex Trevor
|
||||
* @author Frank Dellaert
|
||||
* @author Zhaoyang Lv
|
||||
* @brief An infinite plane, represented by a normal direction and perpendicular distance
|
||||
*/
|
||||
|
||||
|
@ -22,46 +23,56 @@
|
|||
#include <gtsam/geometry/Rot3.h>
|
||||
#include <gtsam/geometry/Unit3.h>
|
||||
#include <gtsam/geometry/Pose3.h>
|
||||
#include <gtsam/base/DerivedValue.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/// Represents an infinite plane in 3D.
|
||||
class OrientedPlane3: public DerivedValue<OrientedPlane3> {
|
||||
/**
|
||||
* @brief Represents an infinite plane in 3D, which is composed of a planar normal and its
|
||||
* perpendicular distance to the origin.
|
||||
* Currently it provides a transform of the plane, and a norm 1 differencing of two planes.
|
||||
* Refer to Trevor12iros for more math details.
|
||||
*/
|
||||
class GTSAM_EXPORT OrientedPlane3 {
|
||||
|
||||
private:
|
||||
|
||||
Unit3 n_; /// The direction of the planar normal
|
||||
double d_; /// The perpendicular distance to this plane
|
||||
Unit3 n_; ///< The direction of the planar normal
|
||||
double d_; ///< The perpendicular distance to this plane
|
||||
|
||||
public:
|
||||
enum {
|
||||
dimension = 3
|
||||
};
|
||||
|
||||
/// @name Constructors
|
||||
/// @{
|
||||
|
||||
/// Default constructor
|
||||
OrientedPlane3() :
|
||||
n_(), d_(0.0) {
|
||||
n_(), d_(0.0) {
|
||||
}
|
||||
|
||||
/// Construct from a Unit3 and a distance
|
||||
OrientedPlane3(const Unit3& s, double d) :
|
||||
n_(s), d_(d) {
|
||||
n_(s), d_(d) {
|
||||
}
|
||||
|
||||
/// Construct from a vector of plane coefficients
|
||||
OrientedPlane3(const Vector4& vec) :
|
||||
n_(vec(0), vec(1), vec(2)), d_(vec(3)) {
|
||||
n_(vec(0), vec(1), vec(2)), d_(vec(3)) {
|
||||
}
|
||||
|
||||
/// Construct from a, b, c, d
|
||||
/// Construct from four numbers of plane coeffcients (a, b, c, d)
|
||||
OrientedPlane3(double a, double b, double c, double d) {
|
||||
Point3 p(a, b, c);
|
||||
n_ = Unit3(p);
|
||||
d_ = d;
|
||||
}
|
||||
|
||||
/// @}
|
||||
/// @name Testable
|
||||
/// @{
|
||||
|
||||
/// The print function
|
||||
void print(const std::string& s = std::string()) const;
|
||||
|
||||
|
@ -70,13 +81,38 @@ public:
|
|||
return (n_.equals(s.n_, tol) && (fabs(d_ - s.d_) < tol));
|
||||
}
|
||||
|
||||
/// Transforms a plane to the specified pose
|
||||
static OrientedPlane3 Transform(const gtsam::OrientedPlane3& plane,
|
||||
const gtsam::Pose3& xr, OptionalJacobian<3, 6> Hr = boost::none,
|
||||
OptionalJacobian<3, 3> Hp = boost::none);
|
||||
/// @}
|
||||
|
||||
/// Computes the error between two poses
|
||||
Vector3 error(const gtsam::OrientedPlane3& plane) const;
|
||||
/** Transforms a plane to the specified pose
|
||||
* @param xr a transformation in current coordiante
|
||||
* @param Hp optional Jacobian wrpt the destination plane
|
||||
* @param Hr optional jacobian wrpt the pose transformation
|
||||
* @return the transformed plane
|
||||
*/
|
||||
OrientedPlane3 transform(const Pose3& xr,
|
||||
OptionalJacobian<3, 3> Hp = boost::none,
|
||||
OptionalJacobian<3, 6> Hr = boost::none) const;
|
||||
|
||||
/**
|
||||
* @deprecated the static method has wrong Jacobian order,
|
||||
* please use the member method transform()
|
||||
* @param The raw plane
|
||||
* @param xr a transformation in current coordiante
|
||||
* @param Hr optional jacobian wrpt the pose transformation
|
||||
* @param Hp optional Jacobian wrpt the destination plane
|
||||
* @return the transformed plane
|
||||
*/
|
||||
static OrientedPlane3 Transform(const OrientedPlane3& plane,
|
||||
const Pose3& xr, OptionalJacobian<3, 6> Hr = boost::none,
|
||||
OptionalJacobian<3, 3> Hp = boost::none) {
|
||||
return plane.transform(xr, Hp, Hr);
|
||||
}
|
||||
|
||||
/** Computes the error between two planes.
|
||||
* The error is a norm 1 difference in tangent space.
|
||||
* @param the other plane
|
||||
*/
|
||||
Vector3 error(const OrientedPlane3& plane) const;
|
||||
|
||||
/// Dimensionality of tangent space = 3 DOF
|
||||
inline static size_t Dim() {
|
||||
|
@ -94,22 +130,31 @@ public:
|
|||
/// The local coordinates function
|
||||
Vector3 localCoordinates(const OrientedPlane3& s) const;
|
||||
|
||||
/// Returns the plane coefficients (a, b, c, d)
|
||||
Vector3 planeCoefficients() const;
|
||||
/// Returns the plane coefficients
|
||||
inline Vector4 planeCoefficients() const {
|
||||
Vector3 unit_vec = n_.unitVector();
|
||||
return Vector4(unit_vec[0], unit_vec[1], unit_vec[2], d_);
|
||||
}
|
||||
|
||||
/// Return the normal
|
||||
inline Unit3 normal() const {
|
||||
return n_;
|
||||
}
|
||||
|
||||
/// Return the perpendicular distance to the origin
|
||||
inline double distance() const {
|
||||
return d_;
|
||||
}
|
||||
|
||||
/// @}
|
||||
};
|
||||
|
||||
template<> struct traits<OrientedPlane3> : public internal::Manifold<
|
||||
OrientedPlane3> {
|
||||
OrientedPlane3> {
|
||||
};
|
||||
|
||||
template<> struct traits<const OrientedPlane3> : public internal::Manifold<
|
||||
OrientedPlane3> {
|
||||
OrientedPlane3> {
|
||||
};
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -31,14 +31,6 @@ namespace gtsam {
|
|||
template<typename Calibration>
|
||||
class GTSAM_EXPORT PinholeCamera: public PinholeBaseK<Calibration> {
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* Some classes template on either PinholeCamera or StereoCamera,
|
||||
* and this typedef informs those classes what "project" returns.
|
||||
*/
|
||||
typedef Point2 Measurement;
|
||||
|
||||
private:
|
||||
|
||||
typedef PinholeBaseK<Calibration> Base; ///< base class has 3D pose as private member
|
||||
|
@ -153,7 +145,7 @@ public:
|
|||
const Pose3& getPose(OptionalJacobian<6, dimension> H) const {
|
||||
if (H) {
|
||||
H->setZero();
|
||||
H->block(0, 0, 6, 6) = I_6x6;
|
||||
H->template block<6, 6>(0, 0) = I_6x6;
|
||||
}
|
||||
return Base::pose();
|
||||
}
|
||||
|
@ -184,16 +176,15 @@ public:
|
|||
if ((size_t) d.size() == 6)
|
||||
return PinholeCamera(this->pose().retract(d), calibration());
|
||||
else
|
||||
return PinholeCamera(this->pose().retract(d.head(6)),
|
||||
return PinholeCamera(this->pose().retract(d.head<6>()),
|
||||
calibration().retract(d.tail(calibration().dim())));
|
||||
}
|
||||
|
||||
/// return canonical coordinate
|
||||
VectorK6 localCoordinates(const PinholeCamera& T2) const {
|
||||
VectorK6 d;
|
||||
// TODO: why does d.head<6>() not compile??
|
||||
d.head(6) = this->pose().localCoordinates(T2.pose());
|
||||
d.tail(DimK) = calibration().localCoordinates(T2.calibration());
|
||||
d.template head<6>() = this->pose().localCoordinates(T2.pose());
|
||||
d.template tail<DimK>() = calibration().localCoordinates(T2.calibration());
|
||||
return d;
|
||||
}
|
||||
|
||||
|
@ -208,101 +199,34 @@ public:
|
|||
|
||||
typedef Eigen::Matrix<double, 2, DimK> Matrix2K;
|
||||
|
||||
/** project a point from world coordinate to the image
|
||||
* @param pw is a point in world coordinates
|
||||
* @param Dpose is the Jacobian w.r.t. pose3
|
||||
* @param Dpoint is the Jacobian w.r.t. point3
|
||||
* @param Dcal is the Jacobian w.r.t. calibration
|
||||
/** Templated projection of a 3D point or a point at infinity into the image
|
||||
* @param pw either a Point3 or a Unit3, in world coordinates
|
||||
*/
|
||||
Point2 project(const Point3& pw, OptionalJacobian<2, 6> Dpose = boost::none,
|
||||
OptionalJacobian<2, 3> Dpoint = boost::none,
|
||||
OptionalJacobian<2, DimK> Dcal = boost::none) const {
|
||||
|
||||
// project to normalized coordinates
|
||||
const Point2 pn = PinholeBase::project2(pw, Dpose, Dpoint);
|
||||
|
||||
// uncalibrate to pixel coordinates
|
||||
Matrix2 Dpi_pn;
|
||||
const Point2 pi = calibration().uncalibrate(pn, Dcal,
|
||||
Dpose || Dpoint ? &Dpi_pn : 0);
|
||||
|
||||
// If needed, apply chain rule
|
||||
if (Dpose)
|
||||
*Dpose = Dpi_pn * *Dpose;
|
||||
if (Dpoint)
|
||||
*Dpoint = Dpi_pn * *Dpoint;
|
||||
|
||||
return pi;
|
||||
}
|
||||
|
||||
/** project a point at infinity from world coordinate to the image
|
||||
* @param pw is a point in the world coordinate (it is pw = lambda*[pw_x pw_y pw_z] with lambda->inf)
|
||||
* @param Dpose is the Jacobian w.r.t. pose3
|
||||
* @param Dpoint is the Jacobian w.r.t. point3
|
||||
* @param Dcal is the Jacobian w.r.t. calibration
|
||||
*/
|
||||
Point2 projectPointAtInfinity(const Point3& pw, OptionalJacobian<2, 6> Dpose =
|
||||
boost::none, OptionalJacobian<2, 2> Dpoint = boost::none,
|
||||
OptionalJacobian<2, DimK> Dcal = boost::none) const {
|
||||
|
||||
if (!Dpose && !Dpoint && !Dcal) {
|
||||
const Point3 pc = this->pose().rotation().unrotate(pw); // get direction in camera frame (translation does not matter)
|
||||
const Point2 pn = Base::project_to_camera(pc); // project the point to the camera
|
||||
return K_.uncalibrate(pn);
|
||||
}
|
||||
|
||||
// world to camera coordinate
|
||||
Matrix3 Dpc_rot, Dpc_point;
|
||||
const Point3 pc = this->pose().rotation().unrotate(pw, Dpc_rot, Dpc_point);
|
||||
|
||||
Matrix36 Dpc_pose;
|
||||
Dpc_pose.setZero();
|
||||
Dpc_pose.leftCols<3>() = Dpc_rot;
|
||||
|
||||
// camera to normalized image coordinate
|
||||
Matrix23 Dpn_pc; // 2*3
|
||||
const Point2 pn = Base::project_to_camera(pc, Dpn_pc);
|
||||
|
||||
// uncalibration
|
||||
Matrix2 Dpi_pn; // 2*2
|
||||
const Point2 pi = K_.uncalibrate(pn, Dcal, Dpi_pn);
|
||||
|
||||
// chain the Jacobian matrices
|
||||
const Matrix23 Dpi_pc = Dpi_pn * Dpn_pc;
|
||||
if (Dpose)
|
||||
*Dpose = Dpi_pc * Dpc_pose;
|
||||
if (Dpoint)
|
||||
*Dpoint = (Dpi_pc * Dpc_point).leftCols<2>(); // only 2dof are important for the point (direction-only)
|
||||
return pi;
|
||||
}
|
||||
|
||||
/** project a point from world coordinate to the image, fixed Jacobians
|
||||
* @param pw is a point in the world coordinate
|
||||
*/
|
||||
Point2 project2(
|
||||
const Point3& pw, //
|
||||
OptionalJacobian<2, dimension> Dcamera = boost::none,
|
||||
OptionalJacobian<2, 3> Dpoint = boost::none) const {
|
||||
|
||||
// project to normalized coordinates
|
||||
template<class POINT>
|
||||
Point2 _project2(const POINT& pw, OptionalJacobian<2, dimension> Dcamera,
|
||||
OptionalJacobian<2, FixedDimension<POINT>::value> Dpoint) const {
|
||||
// We just call 3-derivative version in Base
|
||||
Matrix26 Dpose;
|
||||
const Point2 pn = PinholeBase::project2(pw, Dpose, Dpoint);
|
||||
|
||||
// uncalibrate to pixel coordinates
|
||||
Matrix2K Dcal;
|
||||
Matrix2 Dpi_pn;
|
||||
const Point2 pi = calibration().uncalibrate(pn, Dcamera ? &Dcal : 0,
|
||||
Dcamera || Dpoint ? &Dpi_pn : 0);
|
||||
|
||||
// If needed, calculate derivatives
|
||||
Eigen::Matrix<double, 2, DimK> Dcal;
|
||||
Point2 pi = Base::project(pw, Dcamera ? &Dpose : 0, Dpoint,
|
||||
Dcamera ? &Dcal : 0);
|
||||
if (Dcamera)
|
||||
*Dcamera << Dpi_pn * Dpose, Dcal;
|
||||
if (Dpoint)
|
||||
*Dpoint = Dpi_pn * (*Dpoint);
|
||||
|
||||
*Dcamera << Dpose, Dcal;
|
||||
return pi;
|
||||
}
|
||||
|
||||
/// project a 3D point from world coordinates into the image
|
||||
Point2 project2(const Point3& pw, OptionalJacobian<2, dimension> Dcamera =
|
||||
boost::none, OptionalJacobian<2, 3> Dpoint = boost::none) const {
|
||||
return _project2(pw, Dcamera, Dpoint);
|
||||
}
|
||||
|
||||
/// project a point at infinity from world coordinates into the image
|
||||
Point2 project2(const Unit3& pw, OptionalJacobian<2, dimension> Dcamera =
|
||||
boost::none, OptionalJacobian<2, 2> Dpoint = boost::none) const {
|
||||
return _project2(pw, Dcamera, Dpoint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate range to a landmark
|
||||
* @param point 3D location of landmark
|
||||
|
@ -350,7 +274,7 @@ public:
|
|||
if (Dother) {
|
||||
Dother->resize(1, 6 + CalibrationB::dimension);
|
||||
Dother->setZero();
|
||||
Dother->block(0, 0, 1, 6) = Dother_;
|
||||
Dother->block<1, 6>(0, 0) = Dother_;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -30,14 +30,19 @@ namespace gtsam {
|
|||
* @addtogroup geometry
|
||||
* \nosubgrouping
|
||||
*/
|
||||
template<typename Calibration>
|
||||
template<typename CALIBRATION>
|
||||
class GTSAM_EXPORT PinholeBaseK: public PinholeBase {
|
||||
|
||||
GTSAM_CONCEPT_MANIFOLD_TYPE(Calibration)
|
||||
private:
|
||||
|
||||
public :
|
||||
GTSAM_CONCEPT_MANIFOLD_TYPE(CALIBRATION);
|
||||
|
||||
typedef Calibration CalibrationType;
|
||||
// Get dimensions of calibration type at compile time
|
||||
static const int DimK = FixedDimension<CALIBRATION>::value;
|
||||
|
||||
public:
|
||||
|
||||
typedef CALIBRATION CalibrationType;
|
||||
|
||||
/// @name Standard Constructors
|
||||
/// @{
|
||||
|
@ -67,7 +72,7 @@ public :
|
|||
}
|
||||
|
||||
/// return calibration
|
||||
virtual const Calibration& calibration() const = 0;
|
||||
virtual const CALIBRATION& calibration() const = 0;
|
||||
|
||||
/// @}
|
||||
/// @name Transformations and measurement functions
|
||||
|
@ -80,27 +85,65 @@ public :
|
|||
return pn;
|
||||
}
|
||||
|
||||
/** project a point from world coordinate to the image, fixed Jacobians
|
||||
/** project a point from world coordinate to the image
|
||||
* @param pw is a point in the world coordinates
|
||||
*/
|
||||
Point2 project2(const Point3& pw, OptionalJacobian<2, 6> Dpose = boost::none,
|
||||
OptionalJacobian<2, 3> Dpoint = boost::none) const {
|
||||
Point2 project(const Point3& pw) const {
|
||||
const Point2 pn = PinholeBase::project2(pw); // project to normalized coordinates
|
||||
return calibration().uncalibrate(pn); // uncalibrate to pixel coordinates
|
||||
}
|
||||
|
||||
/** project a point from world coordinate to the image
|
||||
* @param pw is a point at infinity in the world coordinates
|
||||
*/
|
||||
Point2 project(const Unit3& pw) const {
|
||||
const Unit3 pc = pose().rotation().unrotate(pw); // convert to camera frame
|
||||
const Point2 pn = PinholeBase::Project(pc); // project to normalized coordinates
|
||||
return calibration().uncalibrate(pn); // uncalibrate to pixel coordinates
|
||||
}
|
||||
|
||||
/** Templated projection of a point (possibly at infinity) from world coordinate to the image
|
||||
* @param pw is a 3D point or aUnit3 (point at infinity) in world coordinates
|
||||
* @param Dpose is the Jacobian w.r.t. pose3
|
||||
* @param Dpoint is the Jacobian w.r.t. point3
|
||||
* @param Dcal is the Jacobian w.r.t. calibration
|
||||
*/
|
||||
template <class POINT>
|
||||
Point2 _project(const POINT& pw, OptionalJacobian<2, 6> Dpose,
|
||||
OptionalJacobian<2, FixedDimension<POINT>::value> Dpoint,
|
||||
OptionalJacobian<2, DimK> Dcal) const {
|
||||
|
||||
// project to normalized coordinates
|
||||
const Point2 pn = PinholeBase::project2(pw, Dpose, Dpoint);
|
||||
|
||||
// uncalibrate to pixel coordinates
|
||||
Matrix2 Dpi_pn;
|
||||
const Point2 pi = calibration().uncalibrate(pn, boost::none,
|
||||
const Point2 pi = calibration().uncalibrate(pn, Dcal,
|
||||
Dpose || Dpoint ? &Dpi_pn : 0);
|
||||
|
||||
// If needed, apply chain rule
|
||||
if (Dpose) *Dpose = Dpi_pn * (*Dpose);
|
||||
if (Dpoint) *Dpoint = Dpi_pn * (*Dpoint);
|
||||
if (Dpose)
|
||||
*Dpose = Dpi_pn * *Dpose;
|
||||
if (Dpoint)
|
||||
*Dpoint = Dpi_pn * *Dpoint;
|
||||
|
||||
return pi;
|
||||
}
|
||||
|
||||
/// project a 3D point from world coordinates into the image
|
||||
Point2 project(const Point3& pw, OptionalJacobian<2, 6> Dpose,
|
||||
OptionalJacobian<2, 3> Dpoint = boost::none,
|
||||
OptionalJacobian<2, DimK> Dcal = boost::none) const {
|
||||
return _project(pw, Dpose, Dpoint, Dcal);
|
||||
}
|
||||
|
||||
/// project a point at infinity from world coordinates into the image
|
||||
Point2 project(const Unit3& pw, OptionalJacobian<2, 6> Dpose,
|
||||
OptionalJacobian<2, 2> Dpoint = boost::none,
|
||||
OptionalJacobian<2, DimK> Dcal = boost::none) const {
|
||||
return _project(pw, Dpose, Dpoint, Dcal);
|
||||
}
|
||||
|
||||
/// backproject a 2-dimensional point to a 3-dimensional point at given depth
|
||||
Point3 backproject(const Point2& p, double depth) const {
|
||||
const Point2 pn = calibration().calibrate(p);
|
||||
|
@ -108,9 +151,9 @@ public :
|
|||
}
|
||||
|
||||
/// backproject a 2-dimensional point to a 3-dimensional point at infinity
|
||||
Point3 backprojectPointAtInfinity(const Point2& p) const {
|
||||
Unit3 backprojectPointAtInfinity(const Point2& p) const {
|
||||
const Point2 pn = calibration().calibrate(p);
|
||||
const Point3 pc(pn.x(), pn.y(), 1.0); //by convention the last element is 1
|
||||
const Unit3 pc(pn.x(), pn.y(), 1.0); //by convention the last element is 1
|
||||
return pose().rotation().rotate(pc);
|
||||
}
|
||||
|
||||
|
@ -178,13 +221,13 @@ private:
|
|||
* @addtogroup geometry
|
||||
* \nosubgrouping
|
||||
*/
|
||||
template<typename Calibration>
|
||||
class GTSAM_EXPORT PinholePose: public PinholeBaseK<Calibration> {
|
||||
template<typename CALIBRATION>
|
||||
class GTSAM_EXPORT PinholePose: public PinholeBaseK<CALIBRATION> {
|
||||
|
||||
private:
|
||||
|
||||
typedef PinholeBaseK<Calibration> Base; ///< base class has 3D pose as private member
|
||||
boost::shared_ptr<Calibration> K_; ///< shared pointer to fixed calibration
|
||||
typedef PinholeBaseK<CALIBRATION> Base; ///< base class has 3D pose as private member
|
||||
boost::shared_ptr<CALIBRATION> K_; ///< shared pointer to fixed calibration
|
||||
|
||||
public:
|
||||
|
||||
|
@ -201,11 +244,11 @@ public:
|
|||
|
||||
/** constructor with pose, uses default calibration */
|
||||
explicit PinholePose(const Pose3& pose) :
|
||||
Base(pose), K_(new Calibration()) {
|
||||
Base(pose), K_(new CALIBRATION()) {
|
||||
}
|
||||
|
||||
/** constructor with pose and calibration */
|
||||
PinholePose(const Pose3& pose, const boost::shared_ptr<Calibration>& K) :
|
||||
PinholePose(const Pose3& pose, const boost::shared_ptr<CALIBRATION>& K) :
|
||||
Base(pose), K_(K) {
|
||||
}
|
||||
|
||||
|
@ -220,14 +263,14 @@ public:
|
|||
* (theta 0 = looking in direction of positive X axis)
|
||||
* @param height camera height
|
||||
*/
|
||||
static PinholePose Level(const boost::shared_ptr<Calibration>& K,
|
||||
static PinholePose Level(const boost::shared_ptr<CALIBRATION>& K,
|
||||
const Pose2& pose2, double height) {
|
||||
return PinholePose(Base::LevelPose(pose2, height), K);
|
||||
}
|
||||
|
||||
/// PinholePose::level with default calibration
|
||||
static PinholePose Level(const Pose2& pose2, double height) {
|
||||
return PinholePose::Level(boost::make_shared<Calibration>(), pose2, height);
|
||||
return PinholePose::Level(boost::make_shared<CALIBRATION>(), pose2, height);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -240,8 +283,8 @@ public:
|
|||
* @param K optional calibration parameter
|
||||
*/
|
||||
static PinholePose Lookat(const Point3& eye, const Point3& target,
|
||||
const Point3& upVector, const boost::shared_ptr<Calibration>& K =
|
||||
boost::make_shared<Calibration>()) {
|
||||
const Point3& upVector, const boost::shared_ptr<CALIBRATION>& K =
|
||||
boost::make_shared<CALIBRATION>()) {
|
||||
return PinholePose(Base::LookatPose(eye, target, upVector), K);
|
||||
}
|
||||
|
||||
|
@ -251,12 +294,12 @@ public:
|
|||
|
||||
/// Init from 6D vector
|
||||
explicit PinholePose(const Vector &v) :
|
||||
Base(v), K_(new Calibration()) {
|
||||
Base(v), K_(new CALIBRATION()) {
|
||||
}
|
||||
|
||||
/// Init from Vector and calibration
|
||||
PinholePose(const Vector &v, const Vector &K) :
|
||||
Base(v), K_(new Calibration(K)) {
|
||||
Base(v), K_(new CALIBRATION(K)) {
|
||||
}
|
||||
|
||||
/// @}
|
||||
|
@ -286,10 +329,26 @@ public:
|
|||
}
|
||||
|
||||
/// return calibration
|
||||
virtual const Calibration& calibration() const {
|
||||
virtual const CALIBRATION& calibration() const {
|
||||
return *K_;
|
||||
}
|
||||
|
||||
/** project a point from world coordinate to the image, 2 derivatives only
|
||||
* @param pw is a point in world coordinates
|
||||
* @param Dpose is the Jacobian w.r.t. the whole camera (really only the pose)
|
||||
* @param Dpoint is the Jacobian w.r.t. point3
|
||||
*/
|
||||
Point2 project2(const Point3& pw, OptionalJacobian<2, 6> Dpose = boost::none,
|
||||
OptionalJacobian<2, 3> Dpoint = boost::none) const {
|
||||
return Base::project(pw, Dpose, Dpoint);
|
||||
}
|
||||
|
||||
/// project2 version for point at infinity
|
||||
Point2 project2(const Unit3& pw, OptionalJacobian<2, 6> Dpose = boost::none,
|
||||
OptionalJacobian<2, 2> Dpoint = boost::none) const {
|
||||
return Base::project(pw, Dpose, Dpoint);
|
||||
}
|
||||
|
||||
/// @}
|
||||
/// @name Manifold
|
||||
/// @{
|
||||
|
@ -336,14 +395,14 @@ private:
|
|||
};
|
||||
// end of class PinholePose
|
||||
|
||||
template<typename Calibration>
|
||||
struct traits<PinholePose<Calibration> > : public internal::Manifold<
|
||||
PinholePose<Calibration> > {
|
||||
template<typename CALIBRATION>
|
||||
struct traits<PinholePose<CALIBRATION> > : public internal::Manifold<
|
||||
PinholePose<CALIBRATION> > {
|
||||
};
|
||||
|
||||
template<typename Calibration>
|
||||
struct traits<const PinholePose<Calibration> > : public internal::Manifold<
|
||||
PinholePose<Calibration> > {
|
||||
template<typename CALIBRATION>
|
||||
struct traits<const PinholePose<CALIBRATION> > : public internal::Manifold<
|
||||
PinholePose<CALIBRATION> > {
|
||||
};
|
||||
|
||||
} // \ gtsam
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file PinholeSet.h
|
||||
* @brief A CameraSet of either CalibratedCamera, PinholePose, or PinholeCamera
|
||||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/geometry/CameraSet.h>
|
||||
#include <gtsam/geometry/triangulation.h>
|
||||
#include <boost/optional.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* PinholeSet: triangulates point and keeps an estimate of it around.
|
||||
*/
|
||||
template<class CAMERA>
|
||||
class PinholeSet: public CameraSet<CAMERA> {
|
||||
|
||||
private:
|
||||
typedef CameraSet<CAMERA> Base;
|
||||
typedef PinholeSet<CAMERA> This;
|
||||
|
||||
protected:
|
||||
|
||||
public:
|
||||
|
||||
/** Virtual destructor */
|
||||
virtual ~PinholeSet() {
|
||||
}
|
||||
|
||||
/// @name Testable
|
||||
/// @{
|
||||
|
||||
/// print
|
||||
virtual void print(const std::string& s = "") const {
|
||||
Base::print(s);
|
||||
}
|
||||
|
||||
/// equals
|
||||
bool equals(const PinholeSet& p, double tol = 1e-9) const {
|
||||
return Base::equals(p, tol); // TODO all flags
|
||||
}
|
||||
|
||||
/// @}
|
||||
|
||||
/// triangulateSafe
|
||||
TriangulationResult triangulateSafe(
|
||||
const std::vector<Point2>& measured,
|
||||
const TriangulationParameters& params) const {
|
||||
return gtsam::triangulateSafe(*this, measured, params);
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
/// Serialization function
|
||||
friend class boost::serialization::access;
|
||||
template<class ARCHIVE>
|
||||
void serialize(ARCHIVE & ar, const unsigned int version) {
|
||||
ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
|
||||
}
|
||||
};
|
||||
|
||||
template<class CAMERA>
|
||||
struct traits<PinholeSet<CAMERA> > : public Testable<PinholeSet<CAMERA> > {
|
||||
};
|
||||
|
||||
template<class CAMERA>
|
||||
struct traits<const PinholeSet<CAMERA> > : public Testable<PinholeSet<CAMERA> > {
|
||||
};
|
||||
|
||||
} // \ namespace gtsam
|
|
@ -32,7 +32,7 @@ GTSAM_CONCEPT_POSE_INST(Pose3);
|
|||
|
||||
/* ************************************************************************* */
|
||||
Pose3::Pose3(const Pose2& pose2) :
|
||||
R_(Rot3::rodriguez(0, 0, pose2.theta())), t_(
|
||||
R_(Rot3::Rodrigues(0, 0, pose2.theta())), t_(
|
||||
Point3(pose2.x(), pose2.y(), 0)) {
|
||||
}
|
||||
|
||||
|
@ -112,24 +112,20 @@ bool Pose3::equals(const Pose3& pose, double tol) const {
|
|||
/* ************************************************************************* */
|
||||
/** Modified from Murray94book version (which assumes w and v normalized?) */
|
||||
Pose3 Pose3::Expmap(const Vector6& xi, OptionalJacobian<6, 6> H) {
|
||||
if (H) {
|
||||
*H = ExpmapDerivative(xi);
|
||||
}
|
||||
if (H) *H = ExpmapDerivative(xi);
|
||||
|
||||
// get angular velocity omega and translational velocity v from twist xi
|
||||
Point3 w(xi(0), xi(1), xi(2)), v(xi(3), xi(4), xi(5));
|
||||
Point3 omega(xi(0), xi(1), xi(2)), v(xi(3), xi(4), xi(5));
|
||||
|
||||
double theta = w.norm();
|
||||
if (theta < 1e-10) {
|
||||
static const Rot3 I;
|
||||
return Pose3(I, v);
|
||||
} else {
|
||||
Point3 n(w / theta); // axis unit vector
|
||||
Rot3 R = Rot3::rodriguez(n.vector(), theta);
|
||||
double vn = n.dot(v); // translation parallel to n
|
||||
Point3 n_cross_v = n.cross(v); // points towards axis
|
||||
Point3 t = (n_cross_v - R * n_cross_v) / theta + vn * n;
|
||||
Rot3 R = Rot3::Expmap(omega.vector());
|
||||
double theta2 = omega.dot(omega);
|
||||
if (theta2 > std::numeric_limits<double>::epsilon()) {
|
||||
double omega_v = omega.dot(v); // translation parallel to axis
|
||||
Point3 omega_cross_v = omega.cross(v); // points towards axis
|
||||
Point3 t = (omega_cross_v - R * omega_cross_v + omega_v * omega) / theta2;
|
||||
return Pose3(R, t);
|
||||
} else {
|
||||
return Pose3(R, v);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include <gtsam/base/Lie.h>
|
||||
#include <gtsam/base/concepts.h>
|
||||
#include <gtsam/geometry/SO3.h> // Logmap/Expmap derivatives
|
||||
#include <limits>
|
||||
|
||||
#define QUATERNION_TYPE Eigen::Quaternion<_Scalar,_Options>
|
||||
|
||||
|
@ -73,14 +74,22 @@ struct traits<QUATERNION_TYPE> {
|
|||
return g.inverse();
|
||||
}
|
||||
|
||||
/// Exponential map, simply be converting omega to axis/angle representation
|
||||
/// Exponential map, using the inlined code from Eigen's conversion from axis/angle
|
||||
static Q Expmap(const Eigen::Ref<const TangentVector>& omega,
|
||||
ChartJacobian H = boost::none) {
|
||||
if(H) *H = SO3::ExpmapDerivative(omega);
|
||||
if (omega.isZero()) return Q::Identity();
|
||||
else {
|
||||
_Scalar angle = omega.norm();
|
||||
return Q(Eigen::AngleAxis<_Scalar>(angle, omega / angle));
|
||||
ChartJacobian H = boost::none) {
|
||||
using std::cos;
|
||||
using std::sin;
|
||||
if (H) *H = SO3::ExpmapDerivative(omega.template cast<double>());
|
||||
_Scalar theta2 = omega.dot(omega);
|
||||
if (theta2 > std::numeric_limits<_Scalar>::epsilon()) {
|
||||
_Scalar theta = std::sqrt(theta2);
|
||||
_Scalar ha = _Scalar(0.5) * theta;
|
||||
Vector3 vec = (sin(ha) / theta) * omega;
|
||||
return Q(cos(ha), vec.x(), vec.y(), vec.z());
|
||||
} else {
|
||||
// first order approximation sin(theta/2)/theta = 0.5
|
||||
Vector3 vec = _Scalar(0.5) * omega;
|
||||
return Q(1.0, vec.x(), vec.y(), vec.z());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,9 +102,9 @@ struct traits<QUATERNION_TYPE> {
|
|||
static const double twoPi = 2.0 * M_PI, NearlyOne = 1.0 - 1e-10,
|
||||
NearlyNegativeOne = -1.0 + 1e-10;
|
||||
|
||||
Vector3 omega;
|
||||
TangentVector omega;
|
||||
|
||||
const double qw = q.w();
|
||||
const _Scalar qw = q.w();
|
||||
// See Quaternion-Logmap.nb in doc for Taylor expansions
|
||||
if (qw > NearlyOne) {
|
||||
// Taylor expansion of (angle / s) at 1
|
||||
|
@ -107,7 +116,7 @@ struct traits<QUATERNION_TYPE> {
|
|||
omega = (-8. / 3. - 2. / 3. * qw) * q.vec();
|
||||
} else {
|
||||
// Normal, away from zero case
|
||||
double angle = 2 * acos(qw), s = sqrt(1 - qw * qw);
|
||||
_Scalar angle = 2 * acos(qw), s = sqrt(1 - qw * qw);
|
||||
// Important: convert to [-pi,pi] to keep error continuous
|
||||
if (angle > M_PI)
|
||||
angle -= twoPi;
|
||||
|
@ -116,7 +125,7 @@ struct traits<QUATERNION_TYPE> {
|
|||
omega = (angle / s) * q.vec();
|
||||
}
|
||||
|
||||
if(H) *H = SO3::LogmapDerivative(omega);
|
||||
if(H) *H = SO3::LogmapDerivative(omega.template cast<double>());
|
||||
return omega;
|
||||
}
|
||||
|
||||
|
|
|
@ -34,29 +34,12 @@ void Rot3::print(const std::string& s) const {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Rot3 Rot3::rodriguez(const Point3& w, double theta) {
|
||||
return rodriguez((Vector)w.vector(),theta);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Rot3 Rot3::rodriguez(const Unit3& w, double theta) {
|
||||
return rodriguez(w.point3(),theta);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Rot3 Rot3::Random(boost::mt19937 & rng) {
|
||||
Rot3 Rot3::Random(boost::mt19937& rng) {
|
||||
// TODO allow any engine without including all of boost :-(
|
||||
Unit3 w = Unit3::Random(rng);
|
||||
boost::uniform_real<double> randomAngle(-M_PI,M_PI);
|
||||
Unit3 axis = Unit3::Random(rng);
|
||||
boost::uniform_real<double> randomAngle(-M_PI, M_PI);
|
||||
double angle = randomAngle(rng);
|
||||
return rodriguez(w,angle);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Rot3 Rot3::rodriguez(const Vector3& w) {
|
||||
double t = w.norm();
|
||||
if (t < 1e-10) return Rot3();
|
||||
return rodriguez(w/t, t);
|
||||
return AxisAngle(axis, angle);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -22,8 +22,9 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/geometry/Quaternion.h>
|
||||
#include <gtsam/geometry/Unit3.h>
|
||||
#include <gtsam/geometry/Quaternion.h>
|
||||
#include <gtsam/geometry/SO3.h>
|
||||
#include <gtsam/base/concepts.h>
|
||||
#include <gtsam/config.h> // Get GTSAM_USE_QUATERNIONS macro
|
||||
|
||||
|
@ -140,7 +141,7 @@ namespace gtsam {
|
|||
* as described in http://www.sedris.org/wg8home/Documents/WG80462.pdf.
|
||||
* Assumes vehicle coordinate frame X forward, Y right, Z down.
|
||||
*/
|
||||
static Rot3 ypr (double y, double p, double r) { return RzRyRx(r,p,y);}
|
||||
static Rot3 ypr(double y, double p, double r) { return RzRyRx(r,p,y);}
|
||||
|
||||
/** Create from Quaternion coefficients */
|
||||
static Rot3 quaternion(double w, double x, double y, double z) {
|
||||
|
@ -149,45 +150,58 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
/**
|
||||
* Rodriguez' formula to compute an incremental rotation matrix
|
||||
* @param w is the rotation axis, unit length
|
||||
* @param theta rotation angle
|
||||
* @return incremental rotation matrix
|
||||
* Convert from axis/angle representation
|
||||
* @param axis is the rotation axis, unit length
|
||||
* @param angle rotation angle
|
||||
* @return incremental rotation
|
||||
*/
|
||||
static Rot3 rodriguez(const Vector3& w, double theta);
|
||||
static Rot3 AxisAngle(const Vector3& axis, double angle) {
|
||||
#ifdef GTSAM_USE_QUATERNIONS
|
||||
return Quaternion(Eigen::AngleAxis<double>(angle, axis));
|
||||
#else
|
||||
return SO3::AxisAngle(axis,angle);
|
||||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
* Rodriguez' formula to compute an incremental rotation matrix
|
||||
* @param w is the rotation axis, unit length
|
||||
* @param theta rotation angle
|
||||
* @return incremental rotation matrix
|
||||
* Convert from axis/angle representation
|
||||
* @param axisw is the rotation axis, unit length
|
||||
* @param angle rotation angle
|
||||
* @return incremental rotation
|
||||
*/
|
||||
static Rot3 rodriguez(const Point3& w, double theta);
|
||||
static Rot3 AxisAngle(const Point3& axis, double angle) {
|
||||
return AxisAngle(axis.vector(),angle);
|
||||
}
|
||||
|
||||
/**
|
||||
* Rodriguez' formula to compute an incremental rotation matrix
|
||||
* @param w is the rotation axis
|
||||
* @param theta rotation angle
|
||||
* @return incremental rotation matrix
|
||||
* Convert from axis/angle representation
|
||||
* @param axis is the rotation axis
|
||||
* @param angle rotation angle
|
||||
* @return incremental rotation
|
||||
*/
|
||||
static Rot3 rodriguez(const Unit3& w, double theta);
|
||||
static Rot3 AxisAngle(const Unit3& axis, double angle) {
|
||||
return AxisAngle(axis.unitVector(),angle);
|
||||
}
|
||||
|
||||
/**
|
||||
* Rodriguez' formula to compute an incremental rotation matrix
|
||||
* @param v a vector of incremental roll,pitch,yaw
|
||||
* @return incremental rotation matrix
|
||||
* Rodrigues' formula to compute an incremental rotation
|
||||
* @param w a vector of incremental roll,pitch,yaw
|
||||
* @return incremental rotation
|
||||
*/
|
||||
static Rot3 rodriguez(const Vector3& v);
|
||||
static Rot3 Rodrigues(const Vector3& w) {
|
||||
return Rot3::Expmap(w);
|
||||
}
|
||||
|
||||
/**
|
||||
* Rodriguez' formula to compute an incremental rotation matrix
|
||||
* Rodrigues' formula to compute an incremental rotation
|
||||
* @param wx Incremental roll (about X)
|
||||
* @param wy Incremental pitch (about Y)
|
||||
* @param wz Incremental yaw (about Z)
|
||||
* @return incremental rotation matrix
|
||||
* @return incremental rotation
|
||||
*/
|
||||
static Rot3 rodriguez(double wx, double wy, double wz)
|
||||
{ return rodriguez(Vector3(wx, wy, wz));}
|
||||
static Rot3 Rodrigues(double wx, double wy, double wz) {
|
||||
return Rodrigues(Vector3(wx, wy, wz));
|
||||
}
|
||||
|
||||
/// @}
|
||||
/// @name Testable
|
||||
|
@ -272,11 +286,15 @@ namespace gtsam {
|
|||
|
||||
/**
|
||||
* Exponential map at identity - create a rotation from canonical coordinates
|
||||
* \f$ [R_x,R_y,R_z] \f$ using Rodriguez' formula
|
||||
* \f$ [R_x,R_y,R_z] \f$ using Rodrigues' formula
|
||||
*/
|
||||
static Rot3 Expmap(const Vector& v, OptionalJacobian<3,3> H = boost::none) {
|
||||
if(H) *H = Rot3::ExpmapDerivative(v);
|
||||
if (zero(v)) return Rot3(); else return rodriguez(v);
|
||||
#ifdef GTSAM_USE_QUATERNIONS
|
||||
return traits<Quaternion>::Expmap(v);
|
||||
#else
|
||||
return traits<SO3>::Expmap(v);
|
||||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -422,6 +440,14 @@ namespace gtsam {
|
|||
GTSAM_EXPORT friend std::ostream &operator<<(std::ostream &os, const Rot3& p);
|
||||
|
||||
/// @}
|
||||
/// @name Deprecated
|
||||
/// @{
|
||||
static Rot3 rodriguez(const Vector3& axis, double angle) { return AxisAngle(axis, angle); }
|
||||
static Rot3 rodriguez(const Point3& axis, double angle) { return AxisAngle(axis, angle); }
|
||||
static Rot3 rodriguez(const Unit3& axis, double angle) { return AxisAngle(axis, angle); }
|
||||
static Rot3 rodriguez(const Vector3& w) { return Rodrigues(w); }
|
||||
static Rot3 rodriguez(double wx, double wy, double wz) { return Rodrigues(wx, wy, wz); }
|
||||
/// @}
|
||||
|
||||
private:
|
||||
|
||||
|
|
|
@ -117,11 +117,6 @@ Rot3 Rot3::RzRyRx(double x, double y, double z) {
|
|||
);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Rot3 Rot3::rodriguez(const Vector3& w, double theta) {
|
||||
return SO3::Rodrigues(w,theta);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Rot3 Rot3::operator*(const Rot3& R2) const {
|
||||
return Rot3(Matrix3(rot_*R2.rot_));
|
||||
|
|
|
@ -83,10 +83,6 @@ namespace gtsam {
|
|||
Quaternion(Eigen::AngleAxisd(x, Eigen::Vector3d::UnitX())));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Rot3 Rot3::rodriguez(const Vector3& w, double theta) {
|
||||
return Quaternion(Eigen::AngleAxis<double>(theta, w));
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
Rot3 Rot3::operator*(const Rot3& R2) const {
|
||||
return Rot3(quaternion_ * R2.quaternion_);
|
||||
|
|
|
@ -19,47 +19,69 @@
|
|||
#include <gtsam/geometry/SO3.h>
|
||||
#include <gtsam/base/concepts.h>
|
||||
#include <cmath>
|
||||
|
||||
using namespace std;
|
||||
#include <limits>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
SO3 SO3::Rodrigues(const Vector3& axis, double theta) {
|
||||
using std::cos;
|
||||
using std::sin;
|
||||
|
||||
// get components of axis \omega
|
||||
double wx = axis(0), wy = axis(1), wz = axis(2);
|
||||
|
||||
double c = cos(theta), s = sin(theta), c_1 = 1 - c;
|
||||
double wwTxx = wx * wx, wwTyy = wy * wy, wwTzz = wz * wz;
|
||||
double swx = wx * s, swy = wy * s, swz = wz * s;
|
||||
|
||||
double C00 = c_1 * wwTxx, C01 = c_1 * wx * wy, C02 = c_1 * wx * wz;
|
||||
double C11 = c_1 * wwTyy, C12 = c_1 * wy * wz;
|
||||
double C22 = c_1 * wwTzz;
|
||||
|
||||
// Near zero, we just have I + skew(omega)
|
||||
static SO3 FirstOrder(const Vector3& omega) {
|
||||
Matrix3 R;
|
||||
R << c + C00, -swz + C01, swy + C02, //
|
||||
swz + C01, c + C11, -swx + C12, //
|
||||
-swy + C02, swx + C12, c + C22;
|
||||
|
||||
R(0, 0) = 1.;
|
||||
R(1, 0) = omega.z();
|
||||
R(2, 0) = -omega.y();
|
||||
R(0, 1) = -omega.z();
|
||||
R(1, 1) = 1.;
|
||||
R(2, 1) = omega.x();
|
||||
R(0, 2) = omega.y();
|
||||
R(1, 2) = -omega.x();
|
||||
R(2, 2) = 1.;
|
||||
return R;
|
||||
}
|
||||
|
||||
SO3 SO3::AxisAngle(const Vector3& axis, double theta) {
|
||||
if (theta*theta > std::numeric_limits<double>::epsilon()) {
|
||||
using std::cos;
|
||||
using std::sin;
|
||||
|
||||
// get components of axis \omega, where is a unit vector
|
||||
const double& wx = axis.x(), wy = axis.y(), wz = axis.z();
|
||||
|
||||
const double costheta = cos(theta), sintheta = sin(theta), c_1 = 1 - costheta;
|
||||
const double wx_sintheta = wx * sintheta, wy_sintheta = wy * sintheta,
|
||||
wz_sintheta = wz * sintheta;
|
||||
|
||||
const double C00 = c_1 * wx * wx, C01 = c_1 * wx * wy, C02 = c_1 * wx * wz;
|
||||
const double C11 = c_1 * wy * wy, C12 = c_1 * wy * wz;
|
||||
const double C22 = c_1 * wz * wz;
|
||||
|
||||
Matrix3 R;
|
||||
R(0, 0) = costheta + C00;
|
||||
R(1, 0) = wz_sintheta + C01;
|
||||
R(2, 0) = -wy_sintheta + C02;
|
||||
R(0, 1) = -wz_sintheta + C01;
|
||||
R(1, 1) = costheta + C11;
|
||||
R(2, 1) = wx_sintheta + C12;
|
||||
R(0, 2) = wy_sintheta + C02;
|
||||
R(1, 2) = -wx_sintheta + C12;
|
||||
R(2, 2) = costheta + C22;
|
||||
return R;
|
||||
} else {
|
||||
return FirstOrder(axis*theta);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/// simply convert omega to axis/angle representation
|
||||
SO3 SO3::Expmap(const Vector3& omega,
|
||||
ChartJacobian H) {
|
||||
SO3 SO3::Expmap(const Vector3& omega, ChartJacobian H) {
|
||||
if (H) *H = ExpmapDerivative(omega);
|
||||
|
||||
if (H)
|
||||
*H = ExpmapDerivative(omega);
|
||||
|
||||
if (omega.isZero())
|
||||
return Identity();
|
||||
else {
|
||||
double angle = omega.norm();
|
||||
return Rodrigues(omega / angle, angle);
|
||||
double theta2 = omega.dot(omega);
|
||||
if (theta2 > std::numeric_limits<double>::epsilon()) {
|
||||
double theta = std::sqrt(theta2);
|
||||
return AxisAngle(omega / theta, theta);
|
||||
} else {
|
||||
return FirstOrder(omega);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -111,8 +133,9 @@ Matrix3 SO3::ExpmapDerivative(const Vector3& omega) {
|
|||
using std::cos;
|
||||
using std::sin;
|
||||
|
||||
if(zero(omega)) return I_3x3;
|
||||
double theta = omega.norm(); // rotation angle
|
||||
double theta2 = omega.dot(omega);
|
||||
if (theta2 <= std::numeric_limits<double>::epsilon()) return I_3x3;
|
||||
double theta = std::sqrt(theta2); // rotation angle
|
||||
#ifdef DUY_VERSION
|
||||
/// Follow Iserles05an, B10, pg 147, with a sign change in the second term (left version)
|
||||
Matrix3 X = skewSymmetric(omega);
|
||||
|
@ -142,8 +165,9 @@ Matrix3 SO3::LogmapDerivative(const Vector3& omega) {
|
|||
using std::cos;
|
||||
using std::sin;
|
||||
|
||||
if(zero(omega)) return I_3x3;
|
||||
double theta = omega.norm();
|
||||
double theta2 = omega.dot(omega);
|
||||
if (theta2 <= std::numeric_limits<double>::epsilon()) return I_3x3;
|
||||
double theta = std::sqrt(theta2); // rotation angle
|
||||
#ifdef DUY_VERSION
|
||||
/// Follow Iserles05an, B11, pg 147, with a sign change in the second term (left version)
|
||||
Matrix3 X = skewSymmetric(omega);
|
||||
|
|
|
@ -59,7 +59,7 @@ public:
|
|||
}
|
||||
|
||||
/// Static, named constructor TODO think about relation with above
|
||||
static SO3 Rodrigues(const Vector3& axis, double theta);
|
||||
static SO3 AxisAngle(const Vector3& axis, double theta);
|
||||
|
||||
/// @}
|
||||
/// @name Testable
|
||||
|
@ -93,7 +93,7 @@ public:
|
|||
|
||||
/**
|
||||
* Exponential map at identity - create a rotation from canonical coordinates
|
||||
* \f$ [R_x,R_y,R_z] \f$ using Rodriguez' formula
|
||||
* \f$ [R_x,R_y,R_z] \f$ using Rodrigues' formula
|
||||
*/
|
||||
static SO3 Expmap(const Vector3& omega, ChartJacobian H = boost::none);
|
||||
|
||||
|
|
|
@ -91,8 +91,42 @@ namespace gtsam {
|
|||
double Z = K_->baseline() * K_->fx() / (measured[0] - measured[1]);
|
||||
double X = Z * (measured[0] - K_->px()) / K_->fx();
|
||||
double Y = Z * (measured[2] - K_->py()) / K_->fy();
|
||||
Point3 world_point = leftCamPose_.transform_from(Point3(X, Y, Z));
|
||||
return world_point;
|
||||
Point3 point = leftCamPose_.transform_from(Point3(X, Y, Z));
|
||||
return point;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Point3 StereoCamera::backproject2(const StereoPoint2& z, OptionalJacobian<3, 6> H1,
|
||||
OptionalJacobian<3, 3> H2) const {
|
||||
const Cal3_S2Stereo& K = *K_;
|
||||
const double fx = K.fx(), fy = K.fy(), cx = K.px(), cy = K.py(), b = K.baseline();
|
||||
|
||||
double uL = z.uL(), uR = z.uR(), v = z.v();
|
||||
double disparity = uL - uR;
|
||||
|
||||
double local_z = b * fx / disparity;
|
||||
const Point3 local(local_z * (uL - cx)/ fx, local_z * (v - cy) / fy, local_z);
|
||||
|
||||
if(H1 || H2) {
|
||||
double z_partial_uR = local_z/disparity;
|
||||
double x_partial_uR = local.x()/disparity;
|
||||
double y_partial_uR = local.y()/disparity;
|
||||
Matrix3 D_local_z;
|
||||
D_local_z << -x_partial_uR + local.z()/fx, x_partial_uR, 0,
|
||||
-y_partial_uR, y_partial_uR, local.z() / fy,
|
||||
-z_partial_uR, z_partial_uR, 0;
|
||||
|
||||
Matrix3 D_point_local;
|
||||
const Point3 point = leftCamPose_.transform_from(local, H1, D_point_local);
|
||||
|
||||
if(H2) {
|
||||
*H2 = D_point_local * D_local_z;
|
||||
}
|
||||
|
||||
return point;
|
||||
}
|
||||
|
||||
return leftCamPose_.transform_from(local);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -137,6 +137,14 @@ public:
|
|||
/// back-project a measurement
|
||||
Point3 backproject(const StereoPoint2& z) const;
|
||||
|
||||
/** Back-project the 2D point and compute optional derivatives
|
||||
* @param H1 derivative with respect to pose
|
||||
* @param H2 derivative with respect to point
|
||||
*/
|
||||
Point3 backproject2(const StereoPoint2& z,
|
||||
OptionalJacobian<3, 6> H1 = boost::none,
|
||||
OptionalJacobian<3, 3> H2 = boost::none) const;
|
||||
|
||||
/// @}
|
||||
/// @name Deprecated
|
||||
/// @{
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include <gtsam/geometry/Unit3.h>
|
||||
#include <gtsam/geometry/Point2.h>
|
||||
#include <boost/random/mersenne_twister.hpp>
|
||||
#include <gtsam/config.h> // for GTSAM_USE_TBB
|
||||
|
||||
#ifdef __clang__
|
||||
# pragma clang diagnostic push
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
|
||||
#include <gtsam/base/Manifold.h>
|
||||
#include <gtsam/geometry/Point3.h>
|
||||
#include <gtsam/base/DerivedValue.h>
|
||||
|
||||
#include <boost/random/mersenne_twister.hpp>
|
||||
#include <boost/optional.hpp>
|
||||
|
||||
|
|
|
@ -26,6 +26,8 @@ GTSAM_CONCEPT_MANIFOLD_INST(Cal3_S2)
|
|||
|
||||
static Cal3_S2 K(500, 500, 0.1, 640 / 2, 480 / 2);
|
||||
static Point2 p(1, -2);
|
||||
static Point2 p_uv(1320.3, 1740);
|
||||
static Point2 p_xy(2, 3);
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( Cal3_S2, easy_constructor)
|
||||
|
@ -73,6 +75,27 @@ TEST( Cal3_S2, Duncalibrate2)
|
|||
CHECK(assert_equal(numerical,computed,1e-9));
|
||||
}
|
||||
|
||||
Point2 calibrate_(const Cal3_S2& k, const Point2& pt) {return k.calibrate(pt); }
|
||||
/* ************************************************************************* */
|
||||
TEST(Cal3_S2, Dcalibrate1)
|
||||
{
|
||||
Matrix computed;
|
||||
Point2 expected = K.calibrate(p_uv, computed, boost::none);
|
||||
Matrix numerical = numericalDerivative21(calibrate_, K, p_uv);
|
||||
CHECK(assert_equal(expected, p_xy, 1e-8));
|
||||
CHECK(assert_equal(numerical, computed, 1e-8));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(Cal3_S2, Dcalibrate2)
|
||||
{
|
||||
Matrix computed;
|
||||
Point2 expected = K.calibrate(p_uv, boost::none, computed);
|
||||
Matrix numerical = numericalDerivative22(calibrate_, K, p_uv);
|
||||
CHECK(assert_equal(expected, p_xy, 1e-8));
|
||||
CHECK(assert_equal(numerical, computed, 1e-8));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( Cal3_S2, assert_equal)
|
||||
{
|
||||
|
|
|
@ -88,13 +88,30 @@ TEST( CalibratedCamera, project)
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( CalibratedCamera, Dproject_to_camera1) {
|
||||
Point3 pp(155,233,131);
|
||||
Matrix actual;
|
||||
CalibratedCamera::project_to_camera(pp, actual);
|
||||
Matrix expected_numerical = numericalDerivative11<Point2,Point3>(
|
||||
boost::bind(CalibratedCamera::project_to_camera, _1, boost::none), pp);
|
||||
CHECK(assert_equal(expected_numerical, actual));
|
||||
static Point2 Project1(const Point3& point) {
|
||||
return PinholeBase::Project(point);
|
||||
}
|
||||
|
||||
TEST( CalibratedCamera, DProject1) {
|
||||
Point3 pp(155, 233, 131);
|
||||
Matrix test1;
|
||||
CalibratedCamera::Project(pp, test1);
|
||||
Matrix test2 = numericalDerivative11<Point2, Point3>(Project1, pp);
|
||||
CHECK(assert_equal(test1, test2));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
static Point2 Project2(const Unit3& point) {
|
||||
return PinholeBase::Project(point);
|
||||
}
|
||||
|
||||
Unit3 pointAtInfinity(0, 0, 1000);
|
||||
TEST( CalibratedCamera, DProjectInfinity) {
|
||||
Matrix test1;
|
||||
CalibratedCamera::Project(pointAtInfinity, test1);
|
||||
Matrix test2 = numericalDerivative11<Point2, Unit3>(Project2,
|
||||
pointAtInfinity);
|
||||
CHECK(assert_equal(test1, test2));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -128,6 +145,36 @@ TEST( CalibratedCamera, Dproject_point_pose2)
|
|||
CHECK(assert_equal(numerical_point, Dpoint, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
static Point2 projectAtInfinity(const CalibratedCamera& camera, const Unit3& point) {
|
||||
return camera.project2(point);
|
||||
}
|
||||
|
||||
TEST( CalibratedCamera, Dproject_point_pose_infinity)
|
||||
{
|
||||
Matrix Dpose, Dpoint;
|
||||
Point2 result = camera.project2(pointAtInfinity, Dpose, Dpoint);
|
||||
Matrix numerical_pose = numericalDerivative21(projectAtInfinity, camera, pointAtInfinity);
|
||||
Matrix numerical_point = numericalDerivative22(projectAtInfinity, camera, pointAtInfinity);
|
||||
CHECK(assert_equal(Point2(), result));
|
||||
CHECK(assert_equal(numerical_pose, Dpose, 1e-7));
|
||||
CHECK(assert_equal(numerical_point, Dpoint, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Add a test with more arbitrary rotation
|
||||
TEST( CalibratedCamera, Dproject_point_pose2_infinity)
|
||||
{
|
||||
static const Pose3 pose1(Rot3::ypr(0.1, -0.1, 0.4), Point3(0, 0, -10));
|
||||
static const CalibratedCamera camera(pose1);
|
||||
Matrix Dpose, Dpoint;
|
||||
camera.project2(pointAtInfinity, Dpose, Dpoint);
|
||||
Matrix numerical_pose = numericalDerivative21(projectAtInfinity, camera, pointAtInfinity);
|
||||
Matrix numerical_point = numericalDerivative22(projectAtInfinity, camera, pointAtInfinity);
|
||||
CHECK(assert_equal(numerical_pose, Dpose, 1e-7));
|
||||
CHECK(assert_equal(numerical_point, Dpoint, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -31,43 +31,105 @@ using namespace gtsam;
|
|||
#include <gtsam/geometry/Cal3Bundler.h>
|
||||
TEST(CameraSet, Pinhole) {
|
||||
typedef PinholeCamera<Cal3Bundler> Camera;
|
||||
typedef CameraSet<Camera> Set;
|
||||
typedef vector<Point2> ZZ;
|
||||
CameraSet<Camera> set;
|
||||
Set set;
|
||||
Camera camera;
|
||||
set.push_back(camera);
|
||||
set.push_back(camera);
|
||||
Point3 p(0, 0, 1);
|
||||
CHECK(assert_equal(set, set));
|
||||
CameraSet<Camera> set2 = set;
|
||||
EXPECT(assert_equal(set, set));
|
||||
Set set2 = set;
|
||||
set2.push_back(camera);
|
||||
CHECK(!set.equals(set2));
|
||||
EXPECT(!set.equals(set2));
|
||||
|
||||
// Check measurements
|
||||
Point2 expected;
|
||||
ZZ z = set.project(p);
|
||||
CHECK(assert_equal(expected, z[0]));
|
||||
CHECK(assert_equal(expected, z[1]));
|
||||
ZZ z = set.project2(p);
|
||||
EXPECT(assert_equal(expected, z[0]));
|
||||
EXPECT(assert_equal(expected, z[1]));
|
||||
|
||||
// Calculate expected derivatives using Pinhole
|
||||
Matrix46 actualF;
|
||||
Matrix43 actualE;
|
||||
Matrix43 actualH;
|
||||
Matrix actualE;
|
||||
Matrix29 F1;
|
||||
{
|
||||
Matrix26 F1;
|
||||
Matrix23 E1;
|
||||
Matrix23 H1;
|
||||
camera.project(p, F1, E1, H1);
|
||||
camera.project2(p, F1, E1);
|
||||
actualE.resize(4,3);
|
||||
actualE << E1, E1;
|
||||
actualF << F1, F1;
|
||||
actualH << H1, H1;
|
||||
}
|
||||
|
||||
// Check computed derivatives
|
||||
Matrix F, E, H;
|
||||
set.project(p, F, E, H);
|
||||
CHECK(assert_equal(actualF, F));
|
||||
CHECK(assert_equal(actualE, E));
|
||||
CHECK(assert_equal(actualH, H));
|
||||
Set::FBlocks Fs;
|
||||
Matrix E;
|
||||
set.project2(p, Fs, E);
|
||||
LONGS_EQUAL(2, Fs.size());
|
||||
EXPECT(assert_equal(F1, Fs[0]));
|
||||
EXPECT(assert_equal(F1, Fs[1]));
|
||||
EXPECT(assert_equal(actualE, E));
|
||||
|
||||
// Check errors
|
||||
ZZ measured;
|
||||
measured.push_back(Point2(1, 2));
|
||||
measured.push_back(Point2(3, 4));
|
||||
Vector4 expectedV;
|
||||
|
||||
// reprojectionError
|
||||
expectedV << -1, -2, -3, -4;
|
||||
Vector actualV = set.reprojectionError(p, measured);
|
||||
EXPECT(assert_equal(expectedV, actualV));
|
||||
|
||||
// Check Schur complement
|
||||
Matrix F(4, 18);
|
||||
F << F1, Matrix29::Zero(), Matrix29::Zero(), F1;
|
||||
Matrix Ft = F.transpose();
|
||||
Matrix34 Et = E.transpose();
|
||||
Matrix3 P = Et * E;
|
||||
Matrix schur(19, 19);
|
||||
Vector4 b = actualV;
|
||||
Vector v = Ft * (b - E * P * Et * b);
|
||||
schur << Ft * F - Ft * E * P * Et * F, v, v.transpose(), 30;
|
||||
SymmetricBlockMatrix actualReduced = Set::SchurComplement(Fs, E, P, b);
|
||||
EXPECT(assert_equal(schur, actualReduced.matrix()));
|
||||
|
||||
// Check Schur complement update, same order, should just double
|
||||
FastVector<Key> allKeys, keys;
|
||||
allKeys.push_back(1);
|
||||
allKeys.push_back(2);
|
||||
keys.push_back(1);
|
||||
keys.push_back(2);
|
||||
Set::UpdateSchurComplement(Fs, E, P, b, allKeys, keys, actualReduced);
|
||||
EXPECT(assert_equal((Matrix )(2.0 * schur), actualReduced.matrix()));
|
||||
|
||||
// Check Schur complement update, keys reversed
|
||||
FastVector<Key> keys2;
|
||||
keys2.push_back(2);
|
||||
keys2.push_back(1);
|
||||
Set::UpdateSchurComplement(Fs, E, P, b, allKeys, keys2, actualReduced);
|
||||
Vector4 reverse_b;
|
||||
reverse_b << b.tail<2>(), b.head<2>();
|
||||
Vector reverse_v = Ft * (reverse_b - E * P * Et * reverse_b);
|
||||
Matrix A(19, 19);
|
||||
A << Ft * F - Ft * E * P * Et * F, reverse_v, reverse_v.transpose(), 30;
|
||||
EXPECT(assert_equal((Matrix )(2.0 * schur + A), actualReduced.matrix()));
|
||||
|
||||
// reprojectionErrorAtInfinity
|
||||
Unit3 pointAtInfinity(0, 0, 1000);
|
||||
EXPECT(
|
||||
assert_equal(pointAtInfinity,
|
||||
camera.backprojectPointAtInfinity(Point2())));
|
||||
actualV = set.reprojectionError(pointAtInfinity, measured, Fs, E);
|
||||
EXPECT(assert_equal(expectedV, actualV));
|
||||
LONGS_EQUAL(2, Fs.size());
|
||||
{
|
||||
Matrix22 E1;
|
||||
camera.project2(pointAtInfinity, F1, E1);
|
||||
actualE.resize(4,2);
|
||||
actualE << E1, E1;
|
||||
}
|
||||
EXPECT(assert_equal(F1, Fs[0]));
|
||||
EXPECT(assert_equal(F1, Fs[1]));
|
||||
EXPECT(assert_equal(actualE, E));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -83,26 +145,27 @@ TEST(CameraSet, Stereo) {
|
|||
|
||||
// Check measurements
|
||||
StereoPoint2 expected(0, -1, 0);
|
||||
ZZ z = set.project(p);
|
||||
CHECK(assert_equal(expected, z[0]));
|
||||
CHECK(assert_equal(expected, z[1]));
|
||||
ZZ z = set.project2(p);
|
||||
EXPECT(assert_equal(expected, z[0]));
|
||||
EXPECT(assert_equal(expected, z[1]));
|
||||
|
||||
// Calculate expected derivatives using Pinhole
|
||||
Matrix66 actualF;
|
||||
Matrix63 actualE;
|
||||
Matrix F1;
|
||||
{
|
||||
Matrix36 F1;
|
||||
Matrix33 E1;
|
||||
camera.project(p, F1, E1);
|
||||
camera.project2(p, F1, E1);
|
||||
actualE << E1, E1;
|
||||
actualF << F1, F1;
|
||||
}
|
||||
|
||||
// Check computed derivatives
|
||||
Matrix F, E;
|
||||
set.project(p, F, E);
|
||||
CHECK(assert_equal(actualF, F));
|
||||
CHECK(assert_equal(actualE, E));
|
||||
CameraSet<StereoCamera>::FBlocks Fs;
|
||||
Matrix E;
|
||||
set.project2(p, Fs, E);
|
||||
LONGS_EQUAL(2, Fs.size());
|
||||
EXPECT(assert_equal(F1, Fs[0]));
|
||||
EXPECT(assert_equal(F1, Fs[1]));
|
||||
EXPECT(assert_equal(actualE, E));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
* @file testOrientedPlane3.cpp
|
||||
* @date Dec 19, 2012
|
||||
* @author Alex Trevor
|
||||
* @author Zhaoyang Lv
|
||||
* @brief Tests the OrientedPlane3 class
|
||||
*/
|
||||
|
||||
|
@ -30,39 +31,67 @@ GTSAM_CONCEPT_TESTABLE_INST(OrientedPlane3)
|
|||
GTSAM_CONCEPT_MANIFOLD_INST(OrientedPlane3)
|
||||
|
||||
//*******************************************************************************
|
||||
TEST (OrientedPlane3, getMethods) {
|
||||
Vector4 c;
|
||||
c << -1, 0, 0, 5;
|
||||
OrientedPlane3 plane1(c);
|
||||
OrientedPlane3 plane2(c[0], c[1], c[2], c[3]);
|
||||
Vector4 coefficient1 = plane1.planeCoefficients();
|
||||
double distance1 = plane1.distance();
|
||||
EXPECT(assert_equal(coefficient1, c, 1e-8));
|
||||
EXPECT(assert_equal(Unit3(-1,0,0).unitVector(), plane1.normal().unitVector()));
|
||||
EXPECT_DOUBLES_EQUAL(distance1, 5, 1e-8);
|
||||
Vector4 coefficient2 = plane2.planeCoefficients();
|
||||
double distance2 = plane2.distance();
|
||||
EXPECT(assert_equal(coefficient2, c, 1e-8));
|
||||
EXPECT_DOUBLES_EQUAL(distance2, 5, 1e-8);
|
||||
EXPECT(assert_equal(Unit3(-1,0,0).unitVector(), plane2.normal().unitVector()));
|
||||
}
|
||||
|
||||
|
||||
//*******************************************************************************
|
||||
OrientedPlane3 Transform_(const OrientedPlane3& plane, const Pose3& xr) {
|
||||
return OrientedPlane3::Transform(plane, xr);
|
||||
}
|
||||
|
||||
OrientedPlane3 transform_(const OrientedPlane3& plane, const Pose3& xr) {
|
||||
return plane.transform(xr);
|
||||
}
|
||||
|
||||
TEST (OrientedPlane3, transform) {
|
||||
// Test transforming a plane to a pose
|
||||
gtsam::Pose3 pose(gtsam::Rot3::ypr(-M_PI / 4.0, 0.0, 0.0),
|
||||
gtsam::Point3(2.0, 3.0, 4.0));
|
||||
OrientedPlane3 plane(-1, 0, 0, 5);
|
||||
OrientedPlane3 expected_meas(-sqrt(2.0) / 2.0, -sqrt(2.0) / 2.0, 0.0, 3);
|
||||
OrientedPlane3 transformed_plane = OrientedPlane3::Transform(plane, pose,
|
||||
OrientedPlane3 expectedPlane(-sqrt(2.0) / 2.0, -sqrt(2.0) / 2.0, 0.0, 3);
|
||||
OrientedPlane3 transformedPlane1 = OrientedPlane3::Transform(plane, pose,
|
||||
none, none);
|
||||
EXPECT(assert_equal(expected_meas, transformed_plane, 1e-9));
|
||||
OrientedPlane3 transformedPlane2 = plane.transform(pose, none, none);
|
||||
EXPECT(assert_equal(expectedPlane, transformedPlane1, 1e-9));
|
||||
EXPECT(assert_equal(expectedPlane, transformedPlane2, 1e-9));
|
||||
|
||||
// Test the jacobians of transform
|
||||
Matrix actualH1, expectedH1, actualH2, expectedH2;
|
||||
{
|
||||
expectedH1 = numericalDerivative11<OrientedPlane3, Pose3>(
|
||||
boost::bind(&OrientedPlane3::Transform, plane, _1, none, none), pose);
|
||||
|
||||
OrientedPlane3 tformed = OrientedPlane3::Transform(plane, pose, actualH1,
|
||||
none);
|
||||
// because the Transform class uses a wrong order of Jacobians in interface
|
||||
OrientedPlane3::Transform(plane, pose, actualH1, none);
|
||||
expectedH1 = numericalDerivative22(Transform_, plane, pose);
|
||||
EXPECT(assert_equal(expectedH1, actualH1, 1e-9));
|
||||
}
|
||||
{
|
||||
expectedH2 = numericalDerivative11<OrientedPlane3, OrientedPlane3>(
|
||||
boost::bind(&OrientedPlane3::Transform, _1, pose, none, none), plane);
|
||||
|
||||
OrientedPlane3 tformed = OrientedPlane3::Transform(plane, pose, none,
|
||||
actualH2);
|
||||
OrientedPlane3::Transform(plane, pose, none, actualH2);
|
||||
expectedH2 = numericalDerivative21(Transform_, plane, pose);
|
||||
EXPECT(assert_equal(expectedH2, actualH2, 1e-9));
|
||||
}
|
||||
{
|
||||
plane.transform(pose, actualH1, none);
|
||||
expectedH1 = numericalDerivative21(transform_, plane, pose);
|
||||
EXPECT(assert_equal(expectedH1, actualH1, 1e-9));
|
||||
plane.transform(pose, none, actualH2);
|
||||
expectedH2 = numericalDerivative22(Transform_, plane, pose);
|
||||
EXPECT(assert_equal(expectedH2, actualH2, 1e-9));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//*******************************************************************************
|
||||
// Returns a random vector -- copied from testUnit3.cpp
|
||||
// Returns a any size random vector
|
||||
inline static Vector randomVector(const Vector& minLimits,
|
||||
const Vector& maxLimits) {
|
||||
|
||||
|
@ -82,11 +111,11 @@ inline static Vector randomVector(const Vector& minLimits,
|
|||
TEST(OrientedPlane3, localCoordinates_retract) {
|
||||
|
||||
size_t numIterations = 10000;
|
||||
gtsam::Vector minPlaneLimit(4), maxPlaneLimit(4);
|
||||
Vector4 minPlaneLimit, maxPlaneLimit;
|
||||
minPlaneLimit << -1.0, -1.0, -1.0, 0.01;
|
||||
maxPlaneLimit << 1.0, 1.0, 1.0, 10.0;
|
||||
|
||||
Vector minXiLimit(3), maxXiLimit(3);
|
||||
Vector3 minXiLimit, maxXiLimit;
|
||||
minXiLimit << -M_PI, -M_PI, -10.0;
|
||||
maxXiLimit << M_PI, M_PI, 10.0;
|
||||
for (size_t i = 0; i < numIterations; i++) {
|
||||
|
@ -98,15 +127,15 @@ TEST(OrientedPlane3, localCoordinates_retract) {
|
|||
Vector v12 = randomVector(minXiLimit, maxXiLimit);
|
||||
|
||||
// Magnitude of the rotation can be at most pi
|
||||
if (v12.segment<3>(0).norm() > M_PI)
|
||||
v12.segment<3>(0) = v12.segment<3>(0) / M_PI;
|
||||
if (v12.head<3>().norm() > M_PI)
|
||||
v12.head<3>() = v12.head<3>() / M_PI;
|
||||
OrientedPlane3 p2 = p1.retract(v12);
|
||||
|
||||
// Check if the local coordinates and retract return the same results.
|
||||
Vector actual_v12 = p1.localCoordinates(p2);
|
||||
EXPECT(assert_equal(v12, actual_v12, 1e-3));
|
||||
EXPECT(assert_equal(v12, actual_v12, 1e-6));
|
||||
OrientedPlane3 actual_p2 = p1.retract(actual_v12);
|
||||
EXPECT(assert_equal(p2, actual_p2, 1e-3));
|
||||
EXPECT(assert_equal(p2, actual_p2, 1e-6));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,10 +21,6 @@
|
|||
#include <gtsam/geometry/Pose2.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/numericalDerivative.h>
|
||||
#include <gtsam/slam/dataset.h>
|
||||
#include <gtsam/slam/GeneralSFMFactor.h>
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
|
||||
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
|
@ -49,10 +45,10 @@ static const Point3 point2(-0.08, 0.08, 0.0);
|
|||
static const Point3 point3( 0.08, 0.08, 0.0);
|
||||
static const Point3 point4( 0.08,-0.08, 0.0);
|
||||
|
||||
static const Point3 point1_inf(-0.16,-0.16, -1.0);
|
||||
static const Point3 point2_inf(-0.16, 0.16, -1.0);
|
||||
static const Point3 point3_inf( 0.16, 0.16, -1.0);
|
||||
static const Point3 point4_inf( 0.16,-0.16, -1.0);
|
||||
static const Unit3 point1_inf(-0.16,-0.16, -1.0);
|
||||
static const Unit3 point2_inf(-0.16, 0.16, -1.0);
|
||||
static const Unit3 point3_inf( 0.16, 0.16, -1.0);
|
||||
static const Unit3 point4_inf( 0.16,-0.16, -1.0);
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( PinholeCamera, constructor)
|
||||
|
@ -158,9 +154,9 @@ TEST( PinholeCamera, backprojectInfinity2)
|
|||
Rot3 rot(1., 0., 0., 0., 0., 1., 0., -1., 0.); // a camera1 looking down
|
||||
Camera camera(Pose3(rot, origin), K);
|
||||
|
||||
Point3 actual = camera.backprojectPointAtInfinity(Point2());
|
||||
Point3 expected(0., 1., 0.);
|
||||
Point2 x = camera.projectPointAtInfinity(expected);
|
||||
Unit3 actual = camera.backprojectPointAtInfinity(Point2());
|
||||
Unit3 expected(0., 1., 0.);
|
||||
Point2 x = camera.project(expected);
|
||||
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
EXPECT(assert_equal(Point2(), x));
|
||||
|
@ -173,9 +169,9 @@ TEST( PinholeCamera, backprojectInfinity3)
|
|||
Rot3 rot(1., 0., 0., 0., 1., 0., 0., 0., 1.); // identity
|
||||
Camera camera(Pose3(rot, origin), K);
|
||||
|
||||
Point3 actual = camera.backprojectPointAtInfinity(Point2());
|
||||
Point3 expected(0., 0., 1.);
|
||||
Point2 x = camera.projectPointAtInfinity(expected);
|
||||
Unit3 actual = camera.backprojectPointAtInfinity(Point2());
|
||||
Unit3 expected(0., 0., 1.);
|
||||
Point2 x = camera.project(expected);
|
||||
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
EXPECT(assert_equal(Point2(), x));
|
||||
|
@ -201,17 +197,17 @@ TEST( PinholeCamera, Dproject)
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
static Point2 projectInfinity3(const Pose3& pose, const Point3& point3D, const Cal3_S2& cal) {
|
||||
return Camera(pose,cal).projectPointAtInfinity(point3D);
|
||||
static Point2 projectInfinity3(const Pose3& pose, const Unit3& point3D, const Cal3_S2& cal) {
|
||||
return Camera(pose,cal).project(point3D);
|
||||
}
|
||||
|
||||
TEST( PinholeCamera, Dproject_Infinity)
|
||||
{
|
||||
Matrix Dpose, Dpoint, Dcal;
|
||||
Point3 point3D(point1.x(), point1.y(), -10.0); // a point in front of the camera1
|
||||
Unit3 point3D(point1.x(), point1.y(), -10.0); // a point in front of the camera1
|
||||
|
||||
// test Projection
|
||||
Point2 actual = camera.projectPointAtInfinity(point3D, Dpose, Dpoint, Dcal);
|
||||
Point2 actual = camera.project(point3D, Dpose, Dpoint, Dcal);
|
||||
Point2 expected(-5.0, 5.0);
|
||||
EXPECT(assert_equal(actual, expected, 1e-7));
|
||||
|
||||
|
@ -325,34 +321,6 @@ TEST( PinholeCamera, range3) {
|
|||
EXPECT(assert_equal(Hexpected2, D2, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
typedef GeneralSFMFactor<Camera2, Point3> sfmFactor;
|
||||
using symbol_shorthand::P;
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( PinholeCamera, BAL) {
|
||||
string filename = findExampleDataFile("dubrovnik-3-7-pre");
|
||||
SfM_data db;
|
||||
bool success = readBAL(filename, db);
|
||||
if (!success) throw runtime_error("Could not access file!");
|
||||
|
||||
SharedNoiseModel unit2 = noiseModel::Unit::Create(2);
|
||||
NonlinearFactorGraph graph;
|
||||
|
||||
for (size_t j = 0; j < db.number_tracks(); j++) {
|
||||
BOOST_FOREACH(const SfM_Measurement& m, db.tracks[j].measurements)
|
||||
graph.push_back(sfmFactor(m.second, unit2, m.first, P(j)));
|
||||
}
|
||||
|
||||
Values initial = initialCamerasAndPointsEstimate(db);
|
||||
|
||||
LevenbergMarquardtOptimizer lm(graph, initial);
|
||||
|
||||
Values actual = lm.optimize();
|
||||
double actualError = graph.error(actual);
|
||||
EXPECT_DOUBLES_EQUAL(0.0199833, actualError, 1e-7);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -46,10 +46,10 @@ static const Point3 point2(-0.08, 0.08, 0.0);
|
|||
static const Point3 point3( 0.08, 0.08, 0.0);
|
||||
static const Point3 point4( 0.08,-0.08, 0.0);
|
||||
|
||||
static const Point3 point1_inf(-0.16,-0.16, -1.0);
|
||||
static const Point3 point2_inf(-0.16, 0.16, -1.0);
|
||||
static const Point3 point3_inf( 0.16, 0.16, -1.0);
|
||||
static const Point3 point4_inf( 0.16,-0.16, -1.0);
|
||||
static const Unit3 point1_inf(-0.16,-0.16, -1.0);
|
||||
static const Unit3 point2_inf(-0.16, 0.16, -1.0);
|
||||
static const Unit3 point3_inf( 0.16, 0.16, -1.0);
|
||||
static const Unit3 point4_inf( 0.16,-0.16, -1.0);
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( PinholePose, constructor)
|
||||
|
@ -144,11 +144,11 @@ TEST( PinholePose, Dproject)
|
|||
{
|
||||
Matrix Dpose, Dpoint;
|
||||
Point2 result = camera.project2(point1, Dpose, Dpoint);
|
||||
Matrix numerical_pose = numericalDerivative31(project3, pose, point1, K);
|
||||
Matrix Hexpected2 = numericalDerivative32(project3, pose, point1, K);
|
||||
Matrix expectedDcamera = numericalDerivative31(project3, pose, point1, K);
|
||||
Matrix expectedDpoint = numericalDerivative32(project3, pose, point1, K);
|
||||
EXPECT(assert_equal(Point2(-100, 100), result));
|
||||
EXPECT(assert_equal(numerical_pose, Dpose, 1e-7));
|
||||
EXPECT(assert_equal(Hexpected2, Dpoint, 1e-7));
|
||||
EXPECT(assert_equal(expectedDcamera, Dpose, 1e-7));
|
||||
EXPECT(assert_equal(expectedDpoint, Dpoint, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -161,11 +161,11 @@ TEST( PinholePose, Dproject2)
|
|||
{
|
||||
Matrix Dcamera, Dpoint;
|
||||
Point2 result = camera.project2(point1, Dcamera, Dpoint);
|
||||
Matrix Hexpected1 = numericalDerivative21(project4, camera, point1);
|
||||
Matrix Hexpected2 = numericalDerivative22(project4, camera, point1);
|
||||
Matrix expectedDcamera = numericalDerivative21(project4, camera, point1);
|
||||
Matrix expectedDpoint = numericalDerivative22(project4, camera, point1);
|
||||
EXPECT(assert_equal(result, Point2(-100, 100) ));
|
||||
EXPECT(assert_equal(Hexpected1, Dcamera, 1e-7));
|
||||
EXPECT(assert_equal(Hexpected2, Dpoint, 1e-7));
|
||||
EXPECT(assert_equal(expectedDcamera, Dcamera, 1e-7));
|
||||
EXPECT(assert_equal(expectedDpoint, Dpoint, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -176,12 +176,31 @@ TEST( CalibratedCamera, Dproject3)
|
|||
static const Camera camera(pose1);
|
||||
Matrix Dpose, Dpoint;
|
||||
camera.project2(point1, Dpose, Dpoint);
|
||||
Matrix numerical_pose = numericalDerivative21(project4, camera, point1);
|
||||
Matrix expectedDcamera = numericalDerivative21(project4, camera, point1);
|
||||
Matrix numerical_point = numericalDerivative22(project4, camera, point1);
|
||||
CHECK(assert_equal(numerical_pose, Dpose, 1e-7));
|
||||
CHECK(assert_equal(expectedDcamera, Dpose, 1e-7));
|
||||
CHECK(assert_equal(numerical_point, Dpoint, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
static Point2 project(const Pose3& pose, const Unit3& pointAtInfinity,
|
||||
const Cal3_S2::shared_ptr& cal) {
|
||||
return Camera(pose, cal).project(pointAtInfinity);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( PinholePose, DprojectAtInfinity2)
|
||||
{
|
||||
Unit3 pointAtInfinity(0,0,1000);
|
||||
Matrix Dpose, Dpoint;
|
||||
Point2 result = camera.project2(pointAtInfinity, Dpose, Dpoint);
|
||||
Matrix expectedDcamera = numericalDerivative31(project, pose, pointAtInfinity, K);
|
||||
Matrix expectedDpoint = numericalDerivative32(project, pose, pointAtInfinity, K);
|
||||
EXPECT(assert_equal(Point2(0,0), result));
|
||||
EXPECT(assert_equal(expectedDcamera, Dpose, 1e-7));
|
||||
EXPECT(assert_equal(expectedDpoint, Dpoint, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
static double range0(const Camera& camera, const Point3& point) {
|
||||
return camera.range(point);
|
||||
|
@ -191,12 +210,12 @@ static double range0(const Camera& camera, const Point3& point) {
|
|||
TEST( PinholePose, range0) {
|
||||
Matrix D1; Matrix D2;
|
||||
double result = camera.range(point1, D1, D2);
|
||||
Matrix Hexpected1 = numericalDerivative21(range0, camera, point1);
|
||||
Matrix Hexpected2 = numericalDerivative22(range0, camera, point1);
|
||||
Matrix expectedDcamera = numericalDerivative21(range0, camera, point1);
|
||||
Matrix expectedDpoint = numericalDerivative22(range0, camera, point1);
|
||||
EXPECT_DOUBLES_EQUAL(point1.distance(camera.pose().translation()), result,
|
||||
1e-9);
|
||||
EXPECT(assert_equal(Hexpected1, D1, 1e-7));
|
||||
EXPECT(assert_equal(Hexpected2, D2, 1e-7));
|
||||
EXPECT(assert_equal(expectedDcamera, D1, 1e-7));
|
||||
EXPECT(assert_equal(expectedDpoint, D2, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -208,11 +227,11 @@ static double range1(const Camera& camera, const Pose3& pose) {
|
|||
TEST( PinholePose, range1) {
|
||||
Matrix D1; Matrix D2;
|
||||
double result = camera.range(pose1, D1, D2);
|
||||
Matrix Hexpected1 = numericalDerivative21(range1, camera, pose1);
|
||||
Matrix Hexpected2 = numericalDerivative22(range1, camera, pose1);
|
||||
Matrix expectedDcamera = numericalDerivative21(range1, camera, pose1);
|
||||
Matrix expectedDpoint = numericalDerivative22(range1, camera, pose1);
|
||||
EXPECT_DOUBLES_EQUAL(1, result, 1e-9);
|
||||
EXPECT(assert_equal(Hexpected1, D1, 1e-7));
|
||||
EXPECT(assert_equal(Hexpected2, D2, 1e-7));
|
||||
EXPECT(assert_equal(expectedDcamera, D1, 1e-7));
|
||||
EXPECT(assert_equal(expectedDpoint, D2, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -228,11 +247,11 @@ static double range2(const Camera& camera, const Camera2& camera2) {
|
|||
TEST( PinholePose, range2) {
|
||||
Matrix D1; Matrix D2;
|
||||
double result = camera.range<Cal3Bundler>(camera2, D1, D2);
|
||||
Matrix Hexpected1 = numericalDerivative21(range2, camera, camera2);
|
||||
Matrix Hexpected2 = numericalDerivative22(range2, camera, camera2);
|
||||
Matrix expectedDcamera = numericalDerivative21(range2, camera, camera2);
|
||||
Matrix expectedDpoint = numericalDerivative22(range2, camera, camera2);
|
||||
EXPECT_DOUBLES_EQUAL(1, result, 1e-9);
|
||||
EXPECT(assert_equal(Hexpected1, D1, 1e-7));
|
||||
EXPECT(assert_equal(Hexpected2, D2, 1e-7));
|
||||
EXPECT(assert_equal(expectedDcamera, D1, 1e-7));
|
||||
EXPECT(assert_equal(expectedDpoint, D2, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -245,11 +264,11 @@ static double range3(const Camera& camera, const CalibratedCamera& camera3) {
|
|||
TEST( PinholePose, range3) {
|
||||
Matrix D1; Matrix D2;
|
||||
double result = camera.range(camera3, D1, D2);
|
||||
Matrix Hexpected1 = numericalDerivative21(range3, camera, camera3);
|
||||
Matrix Hexpected2 = numericalDerivative22(range3, camera, camera3);
|
||||
Matrix expectedDcamera = numericalDerivative21(range3, camera, camera3);
|
||||
Matrix expectedDpoint = numericalDerivative22(range3, camera, camera3);
|
||||
EXPECT_DOUBLES_EQUAL(1, result, 1e-9);
|
||||
EXPECT(assert_equal(Hexpected1, D1, 1e-7));
|
||||
EXPECT(assert_equal(Hexpected2, D2, 1e-7));
|
||||
EXPECT(assert_equal(expectedDcamera, D1, 1e-7));
|
||||
EXPECT(assert_equal(expectedDpoint, D2, 1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -0,0 +1,158 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file testCameraSet.cpp
|
||||
* @brief Unit tests for testCameraSet Class
|
||||
* @author Frank Dellaert
|
||||
* @date Feb 19, 2015
|
||||
*/
|
||||
|
||||
#include <gtsam/geometry/PinholeSet.h>
|
||||
#include <gtsam/geometry/Pose3.h>
|
||||
#include <gtsam/base/numericalDerivative.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
#include <boost/bind.hpp>
|
||||
|
||||
using namespace std;
|
||||
using namespace gtsam;
|
||||
|
||||
/* ************************************************************************* */
|
||||
#include <gtsam/geometry/CalibratedCamera.h>
|
||||
TEST(PinholeSet, Stereo) {
|
||||
typedef vector<Point2> ZZ;
|
||||
PinholeSet<CalibratedCamera> set;
|
||||
CalibratedCamera camera;
|
||||
set.push_back(camera);
|
||||
set.push_back(camera);
|
||||
// set.print("set: ");
|
||||
Point3 p(0, 0, 1);
|
||||
EXPECT_LONGS_EQUAL(6, traits<CalibratedCamera>::dimension);
|
||||
|
||||
// Check measurements
|
||||
Point2 expected(0, 0);
|
||||
ZZ z = set.project2(p);
|
||||
EXPECT(assert_equal(expected, z[0]));
|
||||
EXPECT(assert_equal(expected, z[1]));
|
||||
|
||||
// Calculate expected derivatives using Pinhole
|
||||
Matrix43 actualE;
|
||||
Matrix F1;
|
||||
{
|
||||
Matrix23 E1;
|
||||
camera.project2(p, F1, E1);
|
||||
actualE << E1, E1;
|
||||
}
|
||||
|
||||
// Check computed derivatives
|
||||
PinholeSet<CalibratedCamera>::FBlocks Fs;
|
||||
Matrix E;
|
||||
set.project2(p, Fs, E);
|
||||
LONGS_EQUAL(2, Fs.size());
|
||||
EXPECT(assert_equal(F1, Fs[0]));
|
||||
EXPECT(assert_equal(F1, Fs[1]));
|
||||
EXPECT(assert_equal(actualE, E));
|
||||
|
||||
// Instantiate triangulateSafe
|
||||
// TODO triangulation does not work yet for CalibratedCamera
|
||||
// PinholeSet<CalibratedCamera>::Result actual = set.triangulateSafe(z);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Cal3Bundler test
|
||||
#include <gtsam/geometry/PinholeCamera.h>
|
||||
#include <gtsam/geometry/Cal3Bundler.h>
|
||||
TEST(PinholeSet, Pinhole) {
|
||||
typedef PinholeCamera<Cal3Bundler> Camera;
|
||||
typedef vector<Point2> ZZ;
|
||||
PinholeSet<Camera> set;
|
||||
Camera camera;
|
||||
set.push_back(camera);
|
||||
set.push_back(camera);
|
||||
Point3 p(0, 0, 1);
|
||||
EXPECT(assert_equal(set, set));
|
||||
PinholeSet<Camera> set2 = set;
|
||||
set2.push_back(camera);
|
||||
EXPECT(!set.equals(set2));
|
||||
|
||||
// Check measurements
|
||||
Point2 expected;
|
||||
ZZ z = set.project2(p);
|
||||
EXPECT(assert_equal(expected, z[0]));
|
||||
EXPECT(assert_equal(expected, z[1]));
|
||||
|
||||
// Calculate expected derivatives using Pinhole
|
||||
Matrix actualE;
|
||||
Matrix F1;
|
||||
{
|
||||
Matrix23 E1;
|
||||
camera.project2(p, F1, E1);
|
||||
actualE.resize(4, 3);
|
||||
actualE << E1, E1;
|
||||
}
|
||||
|
||||
// Check computed derivatives
|
||||
{
|
||||
PinholeSet<Camera>::FBlocks Fs;
|
||||
Matrix E;
|
||||
set.project2(p, Fs, E);
|
||||
EXPECT(assert_equal(actualE, E));
|
||||
LONGS_EQUAL(2, Fs.size());
|
||||
EXPECT(assert_equal(F1, Fs[0]));
|
||||
EXPECT(assert_equal(F1, Fs[1]));
|
||||
}
|
||||
|
||||
// Check errors
|
||||
ZZ measured;
|
||||
measured.push_back(Point2(1, 2));
|
||||
measured.push_back(Point2(3, 4));
|
||||
Vector4 expectedV;
|
||||
|
||||
// reprojectionError
|
||||
expectedV << -1, -2, -3, -4;
|
||||
Vector actualV = set.reprojectionError(p, measured);
|
||||
EXPECT(assert_equal(expectedV, actualV));
|
||||
|
||||
// reprojectionErrorAtInfinity
|
||||
Unit3 pointAtInfinity(0, 0, 1000);
|
||||
{
|
||||
Matrix22 E1;
|
||||
camera.project2(pointAtInfinity, F1, E1);
|
||||
actualE.resize(4, 2);
|
||||
actualE << E1, E1;
|
||||
}
|
||||
EXPECT(
|
||||
assert_equal(pointAtInfinity,
|
||||
camera.backprojectPointAtInfinity(Point2())));
|
||||
{
|
||||
PinholeSet<Camera>::FBlocks Fs;
|
||||
Matrix E;
|
||||
actualV = set.reprojectionError(pointAtInfinity, measured, Fs, E);
|
||||
EXPECT(assert_equal(actualE, E));
|
||||
LONGS_EQUAL(2, Fs.size());
|
||||
EXPECT(assert_equal(F1, Fs[0]));
|
||||
EXPECT(assert_equal(F1, Fs[1]));
|
||||
}
|
||||
EXPECT(assert_equal(expectedV, actualV));
|
||||
|
||||
// Instantiate triangulateSafe
|
||||
TriangulationParameters params;
|
||||
TriangulationResult actual = set.triangulateSafe(z, params);
|
||||
CHECK(actual.degenerate());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
TestResult tr;
|
||||
return TestRegistry::runAllTests(tr);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
|
|
@ -32,10 +32,10 @@ GTSAM_CONCEPT_TESTABLE_INST(Pose3)
|
|||
GTSAM_CONCEPT_LIE_INST(Pose3)
|
||||
|
||||
static Point3 P(0.2,0.7,-2);
|
||||
static Rot3 R = Rot3::rodriguez(0.3,0,0);
|
||||
static Rot3 R = Rot3::Rodrigues(0.3,0,0);
|
||||
static Pose3 T(R,Point3(3.5,-8.2,4.2));
|
||||
static Pose3 T2(Rot3::rodriguez(0.3,0.2,0.1),Point3(3.5,-8.2,4.2));
|
||||
static Pose3 T3(Rot3::rodriguez(-90, 0, 0), Point3(1, 2, 3));
|
||||
static Pose3 T2(Rot3::Rodrigues(0.3,0.2,0.1),Point3(3.5,-8.2,4.2));
|
||||
static Pose3 T3(Rot3::Rodrigues(-90, 0, 0), Point3(1, 2, 3));
|
||||
const double tol=1e-5;
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -50,7 +50,7 @@ TEST( Pose3, equals)
|
|||
/* ************************************************************************* */
|
||||
TEST( Pose3, constructors)
|
||||
{
|
||||
Pose3 expected(Rot3::rodriguez(0,0,3),Point3(1,2,0));
|
||||
Pose3 expected(Rot3::Rodrigues(0,0,3),Point3(1,2,0));
|
||||
Pose2 pose2(1,2,3);
|
||||
EXPECT(assert_equal(expected,Pose3(pose2)));
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ TEST(Pose3, expmap_b)
|
|||
{
|
||||
Pose3 p1(Rot3(), Point3(100, 0, 0));
|
||||
Pose3 p2 = p1.retract((Vector(6) << 0.0, 0.0, 0.1, 0.0, 0.0, 0.0).finished());
|
||||
Pose3 expected(Rot3::rodriguez(0.0, 0.0, 0.1), Point3(100.0, 0.0, 0.0));
|
||||
Pose3 expected(Rot3::Rodrigues(0.0, 0.0, 0.1), Point3(100.0, 0.0, 0.0));
|
||||
EXPECT(assert_equal(expected, p2,1e-2));
|
||||
}
|
||||
|
||||
|
@ -266,7 +266,7 @@ TEST( Pose3, inverse)
|
|||
/* ************************************************************************* */
|
||||
TEST( Pose3, inverseDerivatives2)
|
||||
{
|
||||
Rot3 R = Rot3::rodriguez(0.3,0.4,-0.5);
|
||||
Rot3 R = Rot3::Rodrigues(0.3,0.4,-0.5);
|
||||
Point3 t(3.5,-8.2,4.2);
|
||||
Pose3 T(R,t);
|
||||
|
||||
|
@ -388,7 +388,7 @@ TEST( Pose3, transform_to_translate)
|
|||
/* ************************************************************************* */
|
||||
TEST( Pose3, transform_to_rotate)
|
||||
{
|
||||
Pose3 transform(Rot3::rodriguez(0,0,-1.570796), Point3());
|
||||
Pose3 transform(Rot3::Rodrigues(0,0,-1.570796), Point3());
|
||||
Point3 actual = transform.transform_to(Point3(2,1,10));
|
||||
Point3 expected(-1,2,10);
|
||||
EXPECT(assert_equal(expected, actual, 0.001));
|
||||
|
@ -397,7 +397,7 @@ TEST( Pose3, transform_to_rotate)
|
|||
/* ************************************************************************* */
|
||||
TEST( Pose3, transform_to)
|
||||
{
|
||||
Pose3 transform(Rot3::rodriguez(0,0,-1.570796), Point3(2,4, 0));
|
||||
Pose3 transform(Rot3::Rodrigues(0,0,-1.570796), Point3(2,4, 0));
|
||||
Point3 actual = transform.transform_to(Point3(3,2,10));
|
||||
Point3 expected(2,1,10);
|
||||
EXPECT(assert_equal(expected, actual, 0.001));
|
||||
|
@ -439,7 +439,7 @@ TEST( Pose3, transformPose_to_itself)
|
|||
TEST( Pose3, transformPose_to_translation)
|
||||
{
|
||||
// transform translation only
|
||||
Rot3 r = Rot3::rodriguez(-1.570796,0,0);
|
||||
Rot3 r = Rot3::Rodrigues(-1.570796,0,0);
|
||||
Pose3 pose2(r, Point3(21.,32.,13.));
|
||||
Pose3 actual = pose2.transform_to(Pose3(Rot3(), Point3(1,2,3)));
|
||||
Pose3 expected(r, Point3(20.,30.,10.));
|
||||
|
@ -450,7 +450,7 @@ TEST( Pose3, transformPose_to_translation)
|
|||
TEST( Pose3, transformPose_to_simple_rotate)
|
||||
{
|
||||
// transform translation only
|
||||
Rot3 r = Rot3::rodriguez(0,0,-1.570796);
|
||||
Rot3 r = Rot3::Rodrigues(0,0,-1.570796);
|
||||
Pose3 pose2(r, Point3(21.,32.,13.));
|
||||
Pose3 transform(r, Point3(1,2,3));
|
||||
Pose3 actual = pose2.transform_to(transform);
|
||||
|
@ -462,12 +462,12 @@ TEST( Pose3, transformPose_to_simple_rotate)
|
|||
TEST( Pose3, transformPose_to)
|
||||
{
|
||||
// transform to
|
||||
Rot3 r = Rot3::rodriguez(0,0,-1.570796); //-90 degree yaw
|
||||
Rot3 r2 = Rot3::rodriguez(0,0,0.698131701); //40 degree yaw
|
||||
Rot3 r = Rot3::Rodrigues(0,0,-1.570796); //-90 degree yaw
|
||||
Rot3 r2 = Rot3::Rodrigues(0,0,0.698131701); //40 degree yaw
|
||||
Pose3 pose2(r2, Point3(21.,32.,13.));
|
||||
Pose3 transform(r, Point3(1,2,3));
|
||||
Pose3 actual = pose2.transform_to(transform);
|
||||
Pose3 expected(Rot3::rodriguez(0,0,2.26892803), Point3(-30.,20.,10.));
|
||||
Pose3 expected(Rot3::Rodrigues(0,0,2.26892803), Point3(-30.,20.,10.));
|
||||
EXPECT(assert_equal(expected, actual, 0.001));
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ using namespace gtsam;
|
|||
GTSAM_CONCEPT_TESTABLE_INST(Rot3)
|
||||
GTSAM_CONCEPT_LIE_INST(Rot3)
|
||||
|
||||
static Rot3 R = Rot3::rodriguez(0.1, 0.4, 0.2);
|
||||
static Rot3 R = Rot3::Rodrigues(0.1, 0.4, 0.2);
|
||||
static Point3 P(0.2, 0.7, -2.0);
|
||||
static double error = 1e-9, epsilon = 0.001;
|
||||
|
||||
|
@ -95,7 +95,7 @@ TEST( Rot3, equals)
|
|||
|
||||
/* ************************************************************************* */
|
||||
// Notice this uses J^2 whereas fast uses w*w', and has cos(t)*I + ....
|
||||
Rot3 slow_but_correct_rodriguez(const Vector& w) {
|
||||
Rot3 slow_but_correct_Rodrigues(const Vector& w) {
|
||||
double t = norm_2(w);
|
||||
Matrix J = skewSymmetric(w / t);
|
||||
if (t < 1e-5) return Rot3();
|
||||
|
@ -104,20 +104,20 @@ Rot3 slow_but_correct_rodriguez(const Vector& w) {
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( Rot3, rodriguez)
|
||||
TEST( Rot3, Rodrigues)
|
||||
{
|
||||
Rot3 R1 = Rot3::rodriguez(epsilon, 0, 0);
|
||||
Rot3 R1 = Rot3::Rodrigues(epsilon, 0, 0);
|
||||
Vector w = (Vector(3) << epsilon, 0., 0.).finished();
|
||||
Rot3 R2 = slow_but_correct_rodriguez(w);
|
||||
Rot3 R2 = slow_but_correct_Rodrigues(w);
|
||||
CHECK(assert_equal(R2,R1));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( Rot3, rodriguez2)
|
||||
TEST( Rot3, Rodrigues2)
|
||||
{
|
||||
Vector axis = Vector3(0., 1., 0.); // rotation around Y
|
||||
double angle = 3.14 / 4.0;
|
||||
Rot3 actual = Rot3::rodriguez(axis, angle);
|
||||
Rot3 actual = Rot3::AxisAngle(axis, angle);
|
||||
Rot3 expected(0.707388, 0, 0.706825,
|
||||
0, 1, 0,
|
||||
-0.706825, 0, 0.707388);
|
||||
|
@ -125,26 +125,26 @@ TEST( Rot3, rodriguez2)
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( Rot3, rodriguez3)
|
||||
TEST( Rot3, Rodrigues3)
|
||||
{
|
||||
Vector w = Vector3(0.1, 0.2, 0.3);
|
||||
Rot3 R1 = Rot3::rodriguez(w / norm_2(w), norm_2(w));
|
||||
Rot3 R2 = slow_but_correct_rodriguez(w);
|
||||
Rot3 R1 = Rot3::AxisAngle(w / norm_2(w), norm_2(w));
|
||||
Rot3 R2 = slow_but_correct_Rodrigues(w);
|
||||
CHECK(assert_equal(R2,R1));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( Rot3, rodriguez4)
|
||||
TEST( Rot3, Rodrigues4)
|
||||
{
|
||||
Vector axis = Vector3(0., 0., 1.); // rotation around Z
|
||||
double angle = M_PI/2.0;
|
||||
Rot3 actual = Rot3::rodriguez(axis, angle);
|
||||
Rot3 actual = Rot3::AxisAngle(axis, angle);
|
||||
double c=cos(angle),s=sin(angle);
|
||||
Rot3 expected(c,-s, 0,
|
||||
s, c, 0,
|
||||
0, 0, 1);
|
||||
CHECK(assert_equal(expected,actual));
|
||||
CHECK(assert_equal(slow_but_correct_rodriguez(axis*angle),actual));
|
||||
CHECK(assert_equal(slow_but_correct_Rodrigues(axis*angle),actual));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -168,7 +168,7 @@ TEST(Rot3, log)
|
|||
|
||||
#define CHECK_OMEGA(X,Y,Z) \
|
||||
w = (Vector(3) << (double)X, (double)Y, double(Z)).finished(); \
|
||||
R = Rot3::rodriguez(w); \
|
||||
R = Rot3::Rodrigues(w); \
|
||||
EXPECT(assert_equal(w, Rot3::Logmap(R),1e-12));
|
||||
|
||||
// Check zero
|
||||
|
@ -201,7 +201,7 @@ TEST(Rot3, log)
|
|||
// Windows and Linux have flipped sign in quaternion mode
|
||||
#if !defined(__APPLE__) && defined (GTSAM_USE_QUATERNIONS)
|
||||
w = (Vector(3) << x*PI, y*PI, z*PI).finished();
|
||||
R = Rot3::rodriguez(w);
|
||||
R = Rot3::Rodrigues(w);
|
||||
EXPECT(assert_equal(Vector(-w), Rot3::Logmap(R),1e-12));
|
||||
#else
|
||||
CHECK_OMEGA(x*PI,y*PI,z*PI)
|
||||
|
@ -210,7 +210,7 @@ TEST(Rot3, log)
|
|||
// Check 360 degree rotations
|
||||
#define CHECK_OMEGA_ZERO(X,Y,Z) \
|
||||
w = (Vector(3) << (double)X, (double)Y, double(Z)).finished(); \
|
||||
R = Rot3::rodriguez(w); \
|
||||
R = Rot3::Rodrigues(w); \
|
||||
EXPECT(assert_equal(zero(3), Rot3::Logmap(R)));
|
||||
|
||||
CHECK_OMEGA_ZERO( 2.0*PI, 0, 0)
|
||||
|
@ -312,8 +312,8 @@ TEST( Rot3, jacobianLogmap )
|
|||
/* ************************************************************************* */
|
||||
TEST(Rot3, manifold_expmap)
|
||||
{
|
||||
Rot3 gR1 = Rot3::rodriguez(0.1, 0.4, 0.2);
|
||||
Rot3 gR2 = Rot3::rodriguez(0.3, 0.1, 0.7);
|
||||
Rot3 gR1 = Rot3::Rodrigues(0.1, 0.4, 0.2);
|
||||
Rot3 gR2 = Rot3::Rodrigues(0.3, 0.1, 0.7);
|
||||
Rot3 origin;
|
||||
|
||||
// log behaves correctly
|
||||
|
@ -399,8 +399,8 @@ TEST( Rot3, unrotate)
|
|||
/* ************************************************************************* */
|
||||
TEST( Rot3, compose )
|
||||
{
|
||||
Rot3 R1 = Rot3::rodriguez(0.1, 0.2, 0.3);
|
||||
Rot3 R2 = Rot3::rodriguez(0.2, 0.3, 0.5);
|
||||
Rot3 R1 = Rot3::Rodrigues(0.1, 0.2, 0.3);
|
||||
Rot3 R2 = Rot3::Rodrigues(0.2, 0.3, 0.5);
|
||||
|
||||
Rot3 expected = R1 * R2;
|
||||
Matrix actualH1, actualH2;
|
||||
|
@ -419,7 +419,7 @@ TEST( Rot3, compose )
|
|||
/* ************************************************************************* */
|
||||
TEST( Rot3, inverse )
|
||||
{
|
||||
Rot3 R = Rot3::rodriguez(0.1, 0.2, 0.3);
|
||||
Rot3 R = Rot3::Rodrigues(0.1, 0.2, 0.3);
|
||||
|
||||
Rot3 I;
|
||||
Matrix3 actualH;
|
||||
|
@ -444,13 +444,13 @@ TEST( Rot3, between )
|
|||
0.0, 0.0, 1.0).finished();
|
||||
EXPECT(assert_equal(expectedr1, r1.matrix()));
|
||||
|
||||
Rot3 R = Rot3::rodriguez(0.1, 0.4, 0.2);
|
||||
Rot3 R = Rot3::Rodrigues(0.1, 0.4, 0.2);
|
||||
Rot3 origin;
|
||||
EXPECT(assert_equal(R, origin.between(R)));
|
||||
EXPECT(assert_equal(R.inverse(), R.between(origin)));
|
||||
|
||||
Rot3 R1 = Rot3::rodriguez(0.1, 0.2, 0.3);
|
||||
Rot3 R2 = Rot3::rodriguez(0.2, 0.3, 0.5);
|
||||
Rot3 R1 = Rot3::Rodrigues(0.1, 0.2, 0.3);
|
||||
Rot3 R2 = Rot3::Rodrigues(0.2, 0.3, 0.5);
|
||||
|
||||
Rot3 expected = R1.inverse() * R2;
|
||||
Matrix actualH1, actualH2;
|
||||
|
@ -652,8 +652,8 @@ TEST( Rot3, slerp)
|
|||
}
|
||||
|
||||
//******************************************************************************
|
||||
Rot3 T1(Rot3::rodriguez(Vector3(0, 0, 1), 1));
|
||||
Rot3 T2(Rot3::rodriguez(Vector3(0, 1, 0), 2));
|
||||
Rot3 T1(Rot3::AxisAngle(Vector3(0, 0, 1), 1));
|
||||
Rot3 T2(Rot3::AxisAngle(Vector3(0, 1, 0), 2));
|
||||
|
||||
//******************************************************************************
|
||||
TEST(Rot3 , Invariants) {
|
||||
|
|
|
@ -28,14 +28,14 @@ using namespace gtsam;
|
|||
GTSAM_CONCEPT_TESTABLE_INST(Rot3)
|
||||
GTSAM_CONCEPT_LIE_INST(Rot3)
|
||||
|
||||
static Rot3 R = Rot3::rodriguez(0.1, 0.4, 0.2);
|
||||
static Rot3 R = Rot3::Rodrigues(0.1, 0.4, 0.2);
|
||||
static Point3 P(0.2, 0.7, -2.0);
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(Rot3, manifold_cayley)
|
||||
{
|
||||
Rot3 gR1 = Rot3::rodriguez(0.1, 0.4, 0.2);
|
||||
Rot3 gR2 = Rot3::rodriguez(0.3, 0.1, 0.7);
|
||||
Rot3 gR1 = Rot3::Rodrigues(0.1, 0.4, 0.2);
|
||||
Rot3 gR2 = Rot3::Rodrigues(0.3, 0.1, 0.7);
|
||||
Rot3 origin;
|
||||
|
||||
// log behaves correctly
|
||||
|
|
|
@ -99,13 +99,13 @@ TEST( StereoCamera, Dproject)
|
|||
Matrix actual1; stereoCam.project2(landmark, actual1, boost::none);
|
||||
CHECK(assert_equal(expected1,actual1,1e-7));
|
||||
|
||||
Matrix expected2 = numericalDerivative32(project3,camPose, landmark, *K);
|
||||
Matrix expected2 = numericalDerivative32(project3, camPose, landmark, *K);
|
||||
Matrix actual2; stereoCam.project2(landmark, boost::none, actual2);
|
||||
CHECK(assert_equal(expected2,actual2,1e-7));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( StereoCamera, backproject)
|
||||
TEST( StereoCamera, backproject_case1)
|
||||
{
|
||||
Cal3_S2Stereo::shared_ptr K2(new Cal3_S2Stereo(1500, 1500, 0, 320, 240, 0.5));
|
||||
StereoCamera stereoCam2(Pose3(), K2);
|
||||
|
@ -117,7 +117,7 @@ TEST( StereoCamera, backproject)
|
|||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( StereoCamera, backproject2)
|
||||
TEST( StereoCamera, backproject_case2)
|
||||
{
|
||||
Rot3 R(0.589511291, -0.804859792, 0.0683931805,
|
||||
-0.804435942, -0.592650676, -0.0405925523,
|
||||
|
@ -132,6 +132,53 @@ TEST( StereoCamera, backproject2)
|
|||
CHECK(assert_equal(z, actual, 1e-3));
|
||||
}
|
||||
|
||||
static Point3 backproject3(const Pose3& pose, const StereoPoint2& point, const Cal3_S2Stereo& K) {
|
||||
return StereoCamera(pose, boost::make_shared<Cal3_S2Stereo>(K)).backproject(point);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST( StereoCamera, backproject2_case1)
|
||||
{
|
||||
Cal3_S2Stereo::shared_ptr K2(new Cal3_S2Stereo(1500, 1500, 0, 320, 240, 0.5));
|
||||
StereoCamera stereoCam2(Pose3(), K2);
|
||||
|
||||
Point3 expected_point(1.2, 2.3, 4.5);
|
||||
StereoPoint2 stereo_point = stereoCam2.project(expected_point);
|
||||
|
||||
Matrix actual_jacobian_1, actual_jacobian_2;
|
||||
Point3 actual_point = stereoCam2.backproject2(stereo_point, actual_jacobian_1, actual_jacobian_2);
|
||||
CHECK(assert_equal(expected_point, actual_point, 1e-8));
|
||||
|
||||
Matrix expected_jacobian_to_pose = numericalDerivative31(backproject3, Pose3(), stereo_point, *K2);
|
||||
CHECK(assert_equal(expected_jacobian_to_pose, actual_jacobian_1, 1e-6));
|
||||
|
||||
Matrix expected_jacobian_to_point = numericalDerivative32(backproject3, Pose3(), stereo_point, *K2);
|
||||
CHECK(assert_equal(expected_jacobian_to_point, actual_jacobian_2, 1e-6));
|
||||
}
|
||||
|
||||
TEST( StereoCamera, backproject2_case2)
|
||||
{
|
||||
Rot3 R(0.589511291, -0.804859792, 0.0683931805,
|
||||
-0.804435942, -0.592650676, -0.0405925523,
|
||||
0.0732045588, -0.0310882277, -0.996832359);
|
||||
Point3 t(53.5239823, 23.7866016, -4.42379876);
|
||||
Cal3_S2Stereo::shared_ptr K(new Cal3_S2Stereo(1733.75, 1733.75, 0, 689.645, 508.835, 0.0699612));
|
||||
StereoCamera camera(Pose3(R,t), K);
|
||||
StereoPoint2 z(184.812, 129.068, 714.768);
|
||||
|
||||
Matrix actual_jacobian_1, actual_jacobian_2;
|
||||
Point3 l = camera.backproject2(z, actual_jacobian_1, actual_jacobian_2);
|
||||
|
||||
StereoPoint2 actual = camera.project(l);
|
||||
CHECK(assert_equal(z, actual, 1e-3));
|
||||
|
||||
Matrix expected_jacobian_to_pose = numericalDerivative31(backproject3, Pose3(R,t), z, *K);
|
||||
CHECK(assert_equal(expected_jacobian_to_pose, actual_jacobian_1, 1e-6));
|
||||
|
||||
Matrix expected_jacobian_to_point = numericalDerivative32(backproject3, Pose3(R,t), z, *K);
|
||||
CHECK(assert_equal(expected_jacobian_to_point, actual_jacobian_2, 1e-6));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr);}
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
|
||||
#include <gtsam/geometry/triangulation.h>
|
||||
#include <gtsam/geometry/SimpleCamera.h>
|
||||
#include <gtsam/geometry/Cal3Bundler.h>
|
||||
#include <CppUnitLite/TestHarness.h>
|
||||
|
||||
|
@ -49,6 +50,7 @@ Point2 z1 = camera1.project(landmark);
|
|||
Point2 z2 = camera2.project(landmark);
|
||||
|
||||
//******************************************************************************
|
||||
// Simple test with a well-behaved two camera situation
|
||||
TEST( triangulation, twoPoses) {
|
||||
|
||||
vector<Pose3> poses;
|
||||
|
@ -57,24 +59,37 @@ TEST( triangulation, twoPoses) {
|
|||
poses += pose1, pose2;
|
||||
measurements += z1, z2;
|
||||
|
||||
bool optimize = true;
|
||||
double rank_tol = 1e-9;
|
||||
|
||||
boost::optional<Point3> triangulated_landmark = triangulatePoint3(poses,
|
||||
sharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(landmark, *triangulated_landmark, 1e-2));
|
||||
// 1. Test simple DLT, perfect in no noise situation
|
||||
bool optimize = false;
|
||||
boost::optional<Point3> actual1 = //
|
||||
triangulatePoint3(poses, sharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(landmark, *actual1, 1e-7));
|
||||
|
||||
// 2. Add some noise and try again: result should be ~ (4.995, 0.499167, 1.19814)
|
||||
// 2. test with optimization on, same answer
|
||||
optimize = true;
|
||||
boost::optional<Point3> actual2 = //
|
||||
triangulatePoint3(poses, sharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(landmark, *actual2, 1e-7));
|
||||
|
||||
// 3. Add some noise and try again: result should be ~ (4.995, 0.499167, 1.19814)
|
||||
measurements.at(0) += Point2(0.1, 0.5);
|
||||
measurements.at(1) += Point2(-0.2, 0.3);
|
||||
optimize = false;
|
||||
boost::optional<Point3> actual3 = //
|
||||
triangulatePoint3(poses, sharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19814), *actual3, 1e-4));
|
||||
|
||||
boost::optional<Point3> triangulated_landmark_noise = triangulatePoint3(poses,
|
||||
sharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(landmark, *triangulated_landmark_noise, 1e-2));
|
||||
// 4. Now with optimization on
|
||||
optimize = true;
|
||||
boost::optional<Point3> actual4 = //
|
||||
triangulatePoint3(poses, sharedCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19814), *actual4, 1e-4));
|
||||
}
|
||||
|
||||
//******************************************************************************
|
||||
|
||||
// Similar, but now with Bundler calibration
|
||||
TEST( triangulation, twoPosesBundler) {
|
||||
|
||||
boost::shared_ptr<Cal3Bundler> bundlerCal = //
|
||||
|
@ -95,17 +110,17 @@ TEST( triangulation, twoPosesBundler) {
|
|||
bool optimize = true;
|
||||
double rank_tol = 1e-9;
|
||||
|
||||
boost::optional<Point3> triangulated_landmark = triangulatePoint3(poses,
|
||||
bundlerCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(landmark, *triangulated_landmark, 1e-2));
|
||||
boost::optional<Point3> actual = //
|
||||
triangulatePoint3(poses, bundlerCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(landmark, *actual, 1e-7));
|
||||
|
||||
// 2. Add some noise and try again: result should be ~ (4.995, 0.499167, 1.19814)
|
||||
// Add some noise and try again
|
||||
measurements.at(0) += Point2(0.1, 0.5);
|
||||
measurements.at(1) += Point2(-0.2, 0.3);
|
||||
|
||||
boost::optional<Point3> triangulated_landmark_noise = triangulatePoint3(poses,
|
||||
bundlerCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(landmark, *triangulated_landmark_noise, 1e-2));
|
||||
boost::optional<Point3> actual2 = //
|
||||
triangulatePoint3(poses, bundlerCal, measurements, rank_tol, optimize);
|
||||
EXPECT(assert_equal(Point3(4.995, 0.499167, 1.19847), *actual2, 1e-4));
|
||||
}
|
||||
|
||||
//******************************************************************************
|
||||
|
@ -116,17 +131,17 @@ TEST( triangulation, fourPoses) {
|
|||
poses += pose1, pose2;
|
||||
measurements += z1, z2;
|
||||
|
||||
boost::optional<Point3> triangulated_landmark = triangulatePoint3(poses,
|
||||
sharedCal, measurements);
|
||||
EXPECT(assert_equal(landmark, *triangulated_landmark, 1e-2));
|
||||
boost::optional<Point3> actual = triangulatePoint3(poses, sharedCal,
|
||||
measurements);
|
||||
EXPECT(assert_equal(landmark, *actual, 1e-2));
|
||||
|
||||
// 2. Add some noise and try again: result should be ~ (4.995, 0.499167, 1.19814)
|
||||
measurements.at(0) += Point2(0.1, 0.5);
|
||||
measurements.at(1) += Point2(-0.2, 0.3);
|
||||
|
||||
boost::optional<Point3> triangulated_landmark_noise = //
|
||||
boost::optional<Point3> actual2 = //
|
||||
triangulatePoint3(poses, sharedCal, measurements);
|
||||
EXPECT(assert_equal(landmark, *triangulated_landmark_noise, 1e-2));
|
||||
EXPECT(assert_equal(landmark, *actual2, 1e-2));
|
||||
|
||||
// 3. Add a slightly rotated third camera above, again with measurement noise
|
||||
Pose3 pose3 = pose1 * Pose3(Rot3::ypr(0.1, 0.2, 0.1), Point3(0.1, -2, -.1));
|
||||
|
@ -150,7 +165,7 @@ TEST( triangulation, fourPoses) {
|
|||
SimpleCamera camera4(pose4, *sharedCal);
|
||||
|
||||
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
CHECK_EXCEPTION(camera4.project(landmark);, CheiralityException);
|
||||
CHECK_EXCEPTION(camera4.project(landmark), CheiralityException);
|
||||
|
||||
poses += pose4;
|
||||
measurements += Point2(400, 400);
|
||||
|
@ -180,17 +195,17 @@ TEST( triangulation, fourPoses_distinct_Ks) {
|
|||
cameras += camera1, camera2;
|
||||
measurements += z1, z2;
|
||||
|
||||
boost::optional<Point3> triangulated_landmark = //
|
||||
boost::optional<Point3> actual = //
|
||||
triangulatePoint3(cameras, measurements);
|
||||
EXPECT(assert_equal(landmark, *triangulated_landmark, 1e-2));
|
||||
EXPECT(assert_equal(landmark, *actual, 1e-2));
|
||||
|
||||
// 2. Add some noise and try again: result should be ~ (4.995, 0.499167, 1.19814)
|
||||
measurements.at(0) += Point2(0.1, 0.5);
|
||||
measurements.at(1) += Point2(-0.2, 0.3);
|
||||
|
||||
boost::optional<Point3> triangulated_landmark_noise = //
|
||||
boost::optional<Point3> actual2 = //
|
||||
triangulatePoint3(cameras, measurements);
|
||||
EXPECT(assert_equal(landmark, *triangulated_landmark_noise, 1e-2));
|
||||
EXPECT(assert_equal(landmark, *actual2, 1e-2));
|
||||
|
||||
// 3. Add a slightly rotated third camera above, again with measurement noise
|
||||
Pose3 pose3 = pose1 * Pose3(Rot3::ypr(0.1, 0.2, 0.1), Point3(0.1, -2, -.1));
|
||||
|
@ -216,7 +231,7 @@ TEST( triangulation, fourPoses_distinct_Ks) {
|
|||
SimpleCamera camera4(pose4, K4);
|
||||
|
||||
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
CHECK_EXCEPTION(camera4.project(landmark);, CheiralityException);
|
||||
CHECK_EXCEPTION(camera4.project(landmark), CheiralityException);
|
||||
|
||||
cameras += camera4;
|
||||
measurements += Point2(400, 400);
|
||||
|
@ -244,23 +259,19 @@ TEST( triangulation, twoIdenticalPoses) {
|
|||
}
|
||||
|
||||
//******************************************************************************
|
||||
/*
|
||||
TEST( triangulation, onePose) {
|
||||
// we expect this test to fail with a TriangulationUnderconstrainedException
|
||||
// because there's only one camera observation
|
||||
TEST( triangulation, onePose) {
|
||||
// we expect this test to fail with a TriangulationUnderconstrainedException
|
||||
// because there's only one camera observation
|
||||
|
||||
Cal3_S2 *sharedCal(1500, 1200, 0, 640, 480);
|
||||
vector<Pose3> poses;
|
||||
vector<Point2> measurements;
|
||||
|
||||
vector<Pose3> poses;
|
||||
vector<Point2> measurements;
|
||||
poses += Pose3();
|
||||
measurements += Point2();
|
||||
|
||||
poses += Pose3();
|
||||
measurements += Point2();
|
||||
|
||||
CHECK_EXCEPTION(triangulatePoint3(poses, measurements, *sharedCal),
|
||||
TriangulationUnderconstrainedException);
|
||||
}
|
||||
*/
|
||||
CHECK_EXCEPTION(triangulatePoint3(poses, sharedCal, measurements),
|
||||
TriangulationUnderconstrainedException);
|
||||
}
|
||||
|
||||
//******************************************************************************
|
||||
int main() {
|
||||
|
|
|
@ -46,7 +46,6 @@ Vector4 triangulateHomogeneousDLT(
|
|||
double error;
|
||||
Vector v;
|
||||
boost::tie(rank, error, v) = DLT(A, rank_tol);
|
||||
// std::cout << "s " << s.transpose() << std:endl;
|
||||
|
||||
if (rank < 3)
|
||||
throw(TriangulationUnderconstrainedException());
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/geometry/PinholeCamera.h>
|
||||
#include <gtsam/slam/TriangulationFactor.h>
|
||||
#include <gtsam/slam/PriorFactor.h>
|
||||
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
|
||||
|
@ -64,7 +65,6 @@ GTSAM_EXPORT Point3 triangulateDLT(
|
|||
const std::vector<Matrix34>& projection_matrices,
|
||||
const std::vector<Point2>& measurements, double rank_tol = 1e-9);
|
||||
|
||||
///
|
||||
/**
|
||||
* Create a factor graph with projection factors from poses and one calibration
|
||||
* @param poses Camera poses
|
||||
|
@ -86,8 +86,9 @@ std::pair<NonlinearFactorGraph, Values> triangulationGraph(
|
|||
static SharedNoiseModel prior_model(noiseModel::Isotropic::Sigma(6, 1e-6));
|
||||
for (size_t i = 0; i < measurements.size(); i++) {
|
||||
const Pose3& pose_i = poses[i];
|
||||
PinholeCamera<CALIBRATION> camera_i(pose_i, *sharedCal);
|
||||
graph.push_back(TriangulationFactor<CALIBRATION> //
|
||||
typedef PinholePose<CALIBRATION> Camera;
|
||||
Camera camera_i(pose_i, sharedCal);
|
||||
graph.push_back(TriangulationFactor<Camera> //
|
||||
(camera_i, measurements[i], unit2, landmarkKey));
|
||||
}
|
||||
return std::make_pair(graph, values);
|
||||
|
@ -114,13 +115,22 @@ std::pair<NonlinearFactorGraph, Values> triangulationGraph(
|
|||
static SharedNoiseModel prior_model(noiseModel::Isotropic::Sigma(6, 1e-6));
|
||||
for (size_t i = 0; i < measurements.size(); i++) {
|
||||
const CAMERA& camera_i = cameras[i];
|
||||
graph.push_back(TriangulationFactor<typename CAMERA::CalibrationType> //
|
||||
graph.push_back(TriangulationFactor<CAMERA> //
|
||||
(camera_i, measurements[i], unit2, landmarkKey));
|
||||
}
|
||||
return std::make_pair(graph, values);
|
||||
}
|
||||
|
||||
///
|
||||
/// PinholeCamera specific version
|
||||
template<class CALIBRATION>
|
||||
std::pair<NonlinearFactorGraph, Values> triangulationGraph(
|
||||
const std::vector<PinholeCamera<CALIBRATION> >& cameras,
|
||||
const std::vector<Point2>& measurements, Key landmarkKey,
|
||||
const Point3& initialEstimate) {
|
||||
return triangulationGraph<PinholeCamera<CALIBRATION> > //
|
||||
(cameras, measurements, landmarkKey, initialEstimate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimize for triangulation
|
||||
* @param graph nonlinear factors for projection
|
||||
|
@ -147,8 +157,8 @@ Point3 triangulateNonlinear(const std::vector<Pose3>& poses,
|
|||
// Create a factor graph and initial values
|
||||
Values values;
|
||||
NonlinearFactorGraph graph;
|
||||
boost::tie(graph, values) = triangulationGraph(poses, sharedCal, measurements,
|
||||
Symbol('p', 0), initialEstimate);
|
||||
boost::tie(graph, values) = triangulationGraph<CALIBRATION> //
|
||||
(poses, sharedCal, measurements, Symbol('p', 0), initialEstimate);
|
||||
|
||||
return optimize(graph, values, Symbol('p', 0));
|
||||
}
|
||||
|
@ -168,12 +178,21 @@ Point3 triangulateNonlinear(
|
|||
// Create a factor graph and initial values
|
||||
Values values;
|
||||
NonlinearFactorGraph graph;
|
||||
boost::tie(graph, values) = triangulationGraph(cameras, measurements,
|
||||
Symbol('p', 0), initialEstimate);
|
||||
boost::tie(graph, values) = triangulationGraph<CAMERA> //
|
||||
(cameras, measurements, Symbol('p', 0), initialEstimate);
|
||||
|
||||
return optimize(graph, values, Symbol('p', 0));
|
||||
}
|
||||
|
||||
/// PinholeCamera specific version
|
||||
template<class CALIBRATION>
|
||||
Point3 triangulateNonlinear(
|
||||
const std::vector<PinholeCamera<CALIBRATION> >& cameras,
|
||||
const std::vector<Point2>& measurements, const Point3& initialEstimate) {
|
||||
return triangulateNonlinear<PinholeCamera<CALIBRATION> > //
|
||||
(cameras, measurements, initialEstimate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a 3*4 camera projection matrix from calibration and pose.
|
||||
* Functor for partial application on calibration
|
||||
|
@ -224,12 +243,13 @@ Point3 triangulatePoint3(const std::vector<Pose3>& poses,
|
|||
// Triangulate linearly
|
||||
Point3 point = triangulateDLT(projection_matrices, measurements, rank_tol);
|
||||
|
||||
// The n refine using non-linear optimization
|
||||
// Then refine using non-linear optimization
|
||||
if (optimize)
|
||||
point = triangulateNonlinear(poses, sharedCal, measurements, point);
|
||||
point = triangulateNonlinear<CALIBRATION> //
|
||||
(poses, sharedCal, measurements, point);
|
||||
|
||||
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
// verify that the triangulated point lies infront of all cameras
|
||||
// verify that the triangulated point lies in front of all cameras
|
||||
BOOST_FOREACH(const Pose3& pose, poses) {
|
||||
const Point3& p_local = pose.transform_to(point);
|
||||
if (p_local.z() <= 0)
|
||||
|
@ -265,9 +285,8 @@ Point3 triangulatePoint3(
|
|||
throw(TriangulationUnderconstrainedException());
|
||||
|
||||
// construct projection matrices from poses & calibration
|
||||
typedef PinholeCamera<typename CAMERA::CalibrationType> Camera;
|
||||
std::vector<Matrix34> projection_matrices;
|
||||
BOOST_FOREACH(const Camera& camera, cameras)
|
||||
BOOST_FOREACH(const CAMERA& camera, cameras)
|
||||
projection_matrices.push_back(
|
||||
CameraProjectionMatrix<typename CAMERA::CalibrationType>(camera.calibration())(
|
||||
camera.pose()));
|
||||
|
@ -275,11 +294,11 @@ Point3 triangulatePoint3(
|
|||
|
||||
// The n refine using non-linear optimization
|
||||
if (optimize)
|
||||
point = triangulateNonlinear(cameras, measurements, point);
|
||||
point = triangulateNonlinear<CAMERA>(cameras, measurements, point);
|
||||
|
||||
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
// verify that the triangulated point lies infront of all cameras
|
||||
BOOST_FOREACH(const Camera& camera, cameras) {
|
||||
// verify that the triangulated point lies in front of all cameras
|
||||
BOOST_FOREACH(const CAMERA& camera, cameras) {
|
||||
const Point3& p_local = camera.pose().transform_to(point);
|
||||
if (p_local.z() <= 0)
|
||||
throw(TriangulationCheiralityException());
|
||||
|
@ -289,5 +308,160 @@ Point3 triangulatePoint3(
|
|||
return point;
|
||||
}
|
||||
|
||||
/// Pinhole-specific version
|
||||
template<class CALIBRATION>
|
||||
Point3 triangulatePoint3(
|
||||
const std::vector<PinholeCamera<CALIBRATION> >& cameras,
|
||||
const std::vector<Point2>& measurements, double rank_tol = 1e-9,
|
||||
bool optimize = false) {
|
||||
return triangulatePoint3<PinholeCamera<CALIBRATION> > //
|
||||
(cameras, measurements, rank_tol, optimize);
|
||||
}
|
||||
|
||||
struct TriangulationParameters {
|
||||
|
||||
double rankTolerance; ///< threshold to decide whether triangulation is result.degenerate
|
||||
bool enableEPI; ///< if set to true, will refine triangulation using LM
|
||||
|
||||
/**
|
||||
* if the landmark is triangulated at distance larger than this,
|
||||
* result is flagged as degenerate.
|
||||
*/
|
||||
double landmarkDistanceThreshold; //
|
||||
|
||||
/**
|
||||
* If this is nonnegative the we will check if the average reprojection error
|
||||
* is smaller than this threshold after triangulation, otherwise result is
|
||||
* flagged as degenerate.
|
||||
*/
|
||||
double dynamicOutlierRejectionThreshold;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param rankTol tolerance used to check if point triangulation is degenerate
|
||||
* @param enableEPI if true refine triangulation with embedded LM iterations
|
||||
* @param landmarkDistanceThreshold flag as degenerate if point further than this
|
||||
* @param dynamicOutlierRejectionThreshold or if average error larger than this
|
||||
*
|
||||
*/
|
||||
TriangulationParameters(const double _rankTolerance = 1.0,
|
||||
const bool _enableEPI = false, double _landmarkDistanceThreshold = -1,
|
||||
double _dynamicOutlierRejectionThreshold = -1) :
|
||||
rankTolerance(_rankTolerance), enableEPI(_enableEPI), //
|
||||
landmarkDistanceThreshold(_landmarkDistanceThreshold), //
|
||||
dynamicOutlierRejectionThreshold(_dynamicOutlierRejectionThreshold) {
|
||||
}
|
||||
|
||||
// stream to output
|
||||
friend std::ostream &operator<<(std::ostream &os,
|
||||
const TriangulationParameters& p) {
|
||||
os << "rankTolerance = " << p.rankTolerance << std::endl;
|
||||
os << "enableEPI = " << p.enableEPI << std::endl;
|
||||
os << "landmarkDistanceThreshold = " << p.landmarkDistanceThreshold
|
||||
<< std::endl;
|
||||
os << "dynamicOutlierRejectionThreshold = "
|
||||
<< p.dynamicOutlierRejectionThreshold << std::endl;
|
||||
return os;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* TriangulationResult is an optional point, along with the reasons why it is invalid.
|
||||
*/
|
||||
class TriangulationResult: public boost::optional<Point3> {
|
||||
enum Status {
|
||||
VALID, DEGENERATE, BEHIND_CAMERA
|
||||
};
|
||||
Status status_;
|
||||
TriangulationResult(Status s) :
|
||||
status_(s) {
|
||||
}
|
||||
public:
|
||||
TriangulationResult(const Point3& p) :
|
||||
status_(VALID) {
|
||||
reset(p);
|
||||
}
|
||||
static TriangulationResult Degenerate() {
|
||||
return TriangulationResult(DEGENERATE);
|
||||
}
|
||||
static TriangulationResult BehindCamera() {
|
||||
return TriangulationResult(BEHIND_CAMERA);
|
||||
}
|
||||
bool degenerate() const {
|
||||
return status_ == DEGENERATE;
|
||||
}
|
||||
bool behindCamera() const {
|
||||
return status_ == BEHIND_CAMERA;
|
||||
}
|
||||
// stream to output
|
||||
friend std::ostream &operator<<(std::ostream &os,
|
||||
const TriangulationResult& result) {
|
||||
if (result)
|
||||
os << "point = " << *result << std::endl;
|
||||
else
|
||||
os << "no point, status = " << result.status_ << std::endl;
|
||||
return os;
|
||||
}
|
||||
};
|
||||
|
||||
/// triangulateSafe: extensive checking of the outcome
|
||||
template<class CAMERA>
|
||||
TriangulationResult triangulateSafe(const std::vector<CAMERA>& cameras,
|
||||
const std::vector<Point2>& measured,
|
||||
const TriangulationParameters& params) {
|
||||
|
||||
size_t m = cameras.size();
|
||||
|
||||
// if we have a single pose the corresponding factor is uninformative
|
||||
if (m < 2)
|
||||
return TriangulationResult::Degenerate();
|
||||
else
|
||||
// We triangulate the 3D position of the landmark
|
||||
try {
|
||||
Point3 point = triangulatePoint3<CAMERA>(cameras, measured,
|
||||
params.rankTolerance, params.enableEPI);
|
||||
|
||||
// Check landmark distance and re-projection errors to avoid outliers
|
||||
size_t i = 0;
|
||||
double totalReprojError = 0.0;
|
||||
BOOST_FOREACH(const CAMERA& camera, cameras) {
|
||||
const Pose3& pose = camera.pose();
|
||||
if (params.landmarkDistanceThreshold > 0
|
||||
&& pose.translation().distance(point)
|
||||
> params.landmarkDistanceThreshold)
|
||||
return TriangulationResult::Degenerate();
|
||||
#ifdef GTSAM_THROW_CHEIRALITY_EXCEPTION
|
||||
// verify that the triangulated point lies in front of all cameras
|
||||
// Only needed if this was not yet handled by exception
|
||||
const Point3& p_local = pose.transform_to(point);
|
||||
if (p_local.z() <= 0)
|
||||
return TriangulationResult::BehindCamera();
|
||||
#endif
|
||||
// Check reprojection error
|
||||
if (params.dynamicOutlierRejectionThreshold > 0) {
|
||||
const Point2& zi = measured.at(i);
|
||||
Point2 reprojectionError(camera.project(point) - zi);
|
||||
totalReprojError += reprojectionError.vector().norm();
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
// Flag as degenerate if average reprojection error is too large
|
||||
if (params.dynamicOutlierRejectionThreshold > 0
|
||||
&& totalReprojError / m > params.dynamicOutlierRejectionThreshold)
|
||||
return TriangulationResult::Degenerate();
|
||||
|
||||
// all good!
|
||||
return TriangulationResult(point);
|
||||
} catch (TriangulationUnderconstrainedException&) {
|
||||
// This exception is thrown if
|
||||
// 1) There is a single pose for triangulation - this should not happen because we checked the number of poses before
|
||||
// 2) The rank of the matrix used for triangulation is < 3: rotation-only, parallel cameras (or motion towards the landmark)
|
||||
return TriangulationResult::Degenerate();
|
||||
} catch (TriangulationCheiralityException&) {
|
||||
// point is behind one of the cameras: can be the case of close-to-parallel cameras or may depend on outliers
|
||||
return TriangulationResult::BehindCamera();
|
||||
}
|
||||
}
|
||||
|
||||
} // \namespace gtsam
|
||||
|
||||
|
|
|
@ -344,7 +344,7 @@ namespace gtsam {
|
|||
gttic(Full_root_factoring);
|
||||
boost::shared_ptr<typename EliminationTraitsType::BayesTreeType> p_C1_B; {
|
||||
FastVector<Key> C1_minus_B; {
|
||||
FastSet<Key> C1_minus_B_set(C1->conditional()->beginParents(), C1->conditional()->endParents());
|
||||
KeySet C1_minus_B_set(C1->conditional()->beginParents(), C1->conditional()->endParents());
|
||||
BOOST_FOREACH(const Key j, *B->conditional()) {
|
||||
C1_minus_B_set.erase(j); }
|
||||
C1_minus_B.assign(C1_minus_B_set.begin(), C1_minus_B_set.end());
|
||||
|
@ -356,7 +356,7 @@ namespace gtsam {
|
|||
}
|
||||
boost::shared_ptr<typename EliminationTraitsType::BayesTreeType> p_C2_B; {
|
||||
FastVector<Key> C2_minus_B; {
|
||||
FastSet<Key> C2_minus_B_set(C2->conditional()->beginParents(), C2->conditional()->endParents());
|
||||
KeySet C2_minus_B_set(C2->conditional()->beginParents(), C2->conditional()->endParents());
|
||||
BOOST_FOREACH(const Key j, *B->conditional()) {
|
||||
C2_minus_B_set.erase(j); }
|
||||
C2_minus_B.assign(C2_minus_B_set.begin(), C2_minus_B_set.end());
|
||||
|
|
|
@ -19,13 +19,13 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/base/FastList.h>
|
||||
#include <gtsam/base/ConcurrentMap.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
// Forward declarations
|
||||
|
|
|
@ -44,8 +44,8 @@ namespace gtsam {
|
|||
FastVector<Key>
|
||||
BayesTreeCliqueBase<DERIVED, FACTORGRAPH>::separator_setminus_B(const derived_ptr& B) const
|
||||
{
|
||||
FastSet<Key> p_F_S_parents(this->conditional()->beginParents(), this->conditional()->endParents());
|
||||
FastSet<Key> indicesB(B->conditional()->begin(), B->conditional()->end());
|
||||
KeySet p_F_S_parents(this->conditional()->beginParents(), this->conditional()->endParents());
|
||||
KeySet indicesB(B->conditional()->begin(), B->conditional()->end());
|
||||
FastVector<Key> S_setminus_B;
|
||||
std::set_difference(p_F_S_parents.begin(), p_F_S_parents.end(),
|
||||
indicesB.begin(), indicesB.end(), back_inserter(S_setminus_B));
|
||||
|
@ -58,8 +58,8 @@ namespace gtsam {
|
|||
const derived_ptr& B, const FactorGraphType& p_Cp_B) const
|
||||
{
|
||||
gttic(shortcut_indices);
|
||||
FastSet<Key> allKeys = p_Cp_B.keys();
|
||||
FastSet<Key> indicesB(B->conditional()->begin(), B->conditional()->end());
|
||||
KeySet allKeys = p_Cp_B.keys();
|
||||
KeySet indicesB(B->conditional()->begin(), B->conditional()->end());
|
||||
FastVector<Key> S_setminus_B = separator_setminus_B(B);
|
||||
FastVector<Key> keep;
|
||||
// keep = S\B intersect allKeys (S_setminus_B is already sorted)
|
||||
|
|
|
@ -7,180 +7,233 @@
|
|||
* @brief Collects factorgraph fragments defined on variable clusters, arranged in a tree
|
||||
*/
|
||||
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/base/treeTraversal-inst.h>
|
||||
#include <gtsam/inference/ClusterTree.h>
|
||||
#include <gtsam/inference/BayesTree.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/base/treeTraversal-inst.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/bind.hpp>
|
||||
|
||||
namespace gtsam
|
||||
{
|
||||
namespace
|
||||
{
|
||||
/* ************************************************************************* */
|
||||
// Elimination traversal data - stores a pointer to the parent data and collects the factors
|
||||
// resulting from elimination of the children. Also sets up BayesTree cliques with parent and
|
||||
// child pointers.
|
||||
template<class CLUSTERTREE>
|
||||
struct EliminationData {
|
||||
EliminationData* const parentData;
|
||||
size_t myIndexInParent;
|
||||
FastVector<typename CLUSTERTREE::sharedFactor> childFactors;
|
||||
boost::shared_ptr<typename CLUSTERTREE::BayesTreeType::Node> bayesTreeNode;
|
||||
EliminationData(EliminationData* _parentData, size_t nChildren) :
|
||||
parentData(_parentData),
|
||||
bayesTreeNode(boost::make_shared<typename CLUSTERTREE::BayesTreeType::Node>())
|
||||
{
|
||||
if(parentData) {
|
||||
myIndexInParent = parentData->childFactors.size();
|
||||
parentData->childFactors.push_back(typename CLUSTERTREE::sharedFactor());
|
||||
} else {
|
||||
myIndexInParent = 0;
|
||||
}
|
||||
// Set up BayesTree parent and child pointers
|
||||
if(parentData) {
|
||||
if(parentData->parentData) // If our parent is not the dummy node
|
||||
bayesTreeNode->parent_ = parentData->bayesTreeNode;
|
||||
parentData->bayesTreeNode->children.push_back(bayesTreeNode);
|
||||
}
|
||||
}
|
||||
};
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Elimination pre-order visitor - just creates the EliminationData structure for the visited
|
||||
// node.
|
||||
template<class CLUSTERTREE>
|
||||
EliminationData<CLUSTERTREE> eliminationPreOrderVisitor(
|
||||
const typename CLUSTERTREE::sharedNode& node, EliminationData<CLUSTERTREE>& parentData)
|
||||
{
|
||||
EliminationData<CLUSTERTREE> myData(&parentData, node->children.size());
|
||||
myData.bayesTreeNode->problemSize_ = node->problemSize();
|
||||
return myData;
|
||||
/* ************************************************************************* */
|
||||
// Elimination traversal data - stores a pointer to the parent data and collects
|
||||
// the factors resulting from elimination of the children. Also sets up BayesTree
|
||||
// cliques with parent and child pointers.
|
||||
template<class CLUSTERTREE>
|
||||
struct EliminationData {
|
||||
// Typedefs
|
||||
typedef typename CLUSTERTREE::sharedFactor sharedFactor;
|
||||
typedef typename CLUSTERTREE::FactorType FactorType;
|
||||
typedef typename CLUSTERTREE::FactorGraphType FactorGraphType;
|
||||
typedef typename CLUSTERTREE::ConditionalType ConditionalType;
|
||||
typedef typename CLUSTERTREE::BayesTreeType::Node BTNode;
|
||||
|
||||
EliminationData* const parentData;
|
||||
size_t myIndexInParent;
|
||||
FastVector<sharedFactor> childFactors;
|
||||
boost::shared_ptr<BTNode> bayesTreeNode;
|
||||
EliminationData(EliminationData* _parentData, size_t nChildren) :
|
||||
parentData(_parentData), bayesTreeNode(boost::make_shared<BTNode>()) {
|
||||
if (parentData) {
|
||||
myIndexInParent = parentData->childFactors.size();
|
||||
parentData->childFactors.push_back(sharedFactor());
|
||||
} else {
|
||||
myIndexInParent = 0;
|
||||
}
|
||||
// Set up BayesTree parent and child pointers
|
||||
if (parentData) {
|
||||
if (parentData->parentData) // If our parent is not the dummy node
|
||||
bayesTreeNode->parent_ = parentData->bayesTreeNode;
|
||||
parentData->bayesTreeNode->children.push_back(bayesTreeNode);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Elimination post-order visitor - combine the child factors with our own factors, add the
|
||||
// resulting conditional to the BayesTree, and add the remaining factor to the parent.
|
||||
template<class CLUSTERTREE>
|
||||
struct EliminationPostOrderVisitor
|
||||
{
|
||||
const typename CLUSTERTREE::Eliminate& eliminationFunction;
|
||||
typename CLUSTERTREE::BayesTreeType::Nodes& nodesIndex;
|
||||
EliminationPostOrderVisitor(const typename CLUSTERTREE::Eliminate& eliminationFunction,
|
||||
// Elimination pre-order visitor - creates the EliminationData structure for the visited node.
|
||||
static EliminationData EliminationPreOrderVisitor(
|
||||
const typename CLUSTERTREE::sharedNode& node,
|
||||
EliminationData& parentData) {
|
||||
assert(node);
|
||||
EliminationData myData(&parentData, node->children.size());
|
||||
myData.bayesTreeNode->problemSize_ = node->problemSize();
|
||||
return myData;
|
||||
}
|
||||
|
||||
// Elimination post-order visitor - combine the child factors with our own factors, add the
|
||||
// resulting conditional to the BayesTree, and add the remaining factor to the parent.
|
||||
class EliminationPostOrderVisitor {
|
||||
const typename CLUSTERTREE::Eliminate& eliminationFunction_;
|
||||
typename CLUSTERTREE::BayesTreeType::Nodes& nodesIndex_;
|
||||
|
||||
public:
|
||||
// Construct functor
|
||||
EliminationPostOrderVisitor(
|
||||
const typename CLUSTERTREE::Eliminate& eliminationFunction,
|
||||
typename CLUSTERTREE::BayesTreeType::Nodes& nodesIndex) :
|
||||
eliminationFunction(eliminationFunction), nodesIndex(nodesIndex) {}
|
||||
void operator()(const typename CLUSTERTREE::sharedNode& node, EliminationData<CLUSTERTREE>& myData)
|
||||
{
|
||||
// Typedefs
|
||||
typedef typename CLUSTERTREE::sharedFactor sharedFactor;
|
||||
typedef typename CLUSTERTREE::FactorType FactorType;
|
||||
typedef typename CLUSTERTREE::FactorGraphType FactorGraphType;
|
||||
typedef typename CLUSTERTREE::ConditionalType ConditionalType;
|
||||
typedef typename CLUSTERTREE::BayesTreeType::Node BTNode;
|
||||
|
||||
// Gather factors
|
||||
FactorGraphType gatheredFactors;
|
||||
gatheredFactors.reserve(node->factors.size() + node->children.size());
|
||||
gatheredFactors += node->factors;
|
||||
gatheredFactors += myData.childFactors;
|
||||
|
||||
// Check for Bayes tree orphan subtrees, and add them to our children
|
||||
BOOST_FOREACH(const sharedFactor& f, node->factors)
|
||||
{
|
||||
if(const BayesTreeOrphanWrapper<BTNode>* asSubtree = dynamic_cast<const BayesTreeOrphanWrapper<BTNode>*>(f.get()))
|
||||
{
|
||||
myData.bayesTreeNode->children.push_back(asSubtree->clique);
|
||||
asSubtree->clique->parent_ = myData.bayesTreeNode;
|
||||
}
|
||||
}
|
||||
|
||||
// Do dense elimination step
|
||||
std::pair<boost::shared_ptr<ConditionalType>, boost::shared_ptr<FactorType> > eliminationResult =
|
||||
eliminationFunction(gatheredFactors, Ordering(node->keys));
|
||||
|
||||
// Store conditional in BayesTree clique, and in the case of ISAM2Clique also store the remaining factor
|
||||
myData.bayesTreeNode->setEliminationResult(eliminationResult);
|
||||
|
||||
// Fill nodes index - we do this here instead of calling insertRoot at the end to avoid
|
||||
// putting orphan subtrees in the index - they'll already be in the index of the ISAM2
|
||||
// object they're added to.
|
||||
BOOST_FOREACH(const Key& j, myData.bayesTreeNode->conditional()->frontals())
|
||||
nodesIndex.insert(std::make_pair(j, myData.bayesTreeNode));
|
||||
|
||||
// Store remaining factor in parent's gathered factors
|
||||
if(!eliminationResult.second->empty())
|
||||
myData.parentData->childFactors[myData.myIndexInParent] = eliminationResult.second;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
void ClusterTree<BAYESTREE,GRAPH>::Cluster::print(
|
||||
const std::string& s, const KeyFormatter& keyFormatter) const
|
||||
{
|
||||
std::cout << s;
|
||||
BOOST_FOREACH(Key j, keys)
|
||||
std::cout << j << " ";
|
||||
std::cout << "problemSize = " << problemSize_ << std::endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
void ClusterTree<BAYESTREE,GRAPH>::print(
|
||||
const std::string& s, const KeyFormatter& keyFormatter) const
|
||||
{
|
||||
treeTraversal::PrintForest(*this, s, keyFormatter);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
ClusterTree<BAYESTREE,GRAPH>& ClusterTree<BAYESTREE,GRAPH>::operator=(const This& other)
|
||||
{
|
||||
// Start by duplicating the tree.
|
||||
roots_ = treeTraversal::CloneForest(other);
|
||||
|
||||
// Assign the remaining factors - these are pointers to factors in the original factor graph and
|
||||
// we do not clone them.
|
||||
remainingFactors_ = other.remainingFactors_;
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
std::pair<boost::shared_ptr<BAYESTREE>, boost::shared_ptr<GRAPH> >
|
||||
ClusterTree<BAYESTREE,GRAPH>::eliminate(const Eliminate& function) const
|
||||
{
|
||||
gttic(ClusterTree_eliminate);
|
||||
// Do elimination (depth-first traversal). The rootsContainer stores a 'dummy' BayesTree node
|
||||
// that contains all of the roots as its children. rootsContainer also stores the remaining
|
||||
// uneliminated factors passed up from the roots.
|
||||
boost::shared_ptr<BayesTreeType> result = boost::make_shared<BayesTreeType>();
|
||||
EliminationData<This> rootsContainer(0, roots_.size());
|
||||
EliminationPostOrderVisitor<This> visitorPost(function, result->nodes_);
|
||||
{
|
||||
TbbOpenMPMixedScope threadLimiter; // Limits OpenMP threads since we're mixing TBB and OpenMP
|
||||
treeTraversal::DepthFirstForestParallel(*this, rootsContainer,
|
||||
eliminationPreOrderVisitor<This>, visitorPost, 10);
|
||||
eliminationFunction_(eliminationFunction), nodesIndex_(nodesIndex) {
|
||||
}
|
||||
|
||||
// Create BayesTree from roots stored in the dummy BayesTree node.
|
||||
result->roots_.insert(result->roots_.end(), rootsContainer.bayesTreeNode->children.begin(), rootsContainer.bayesTreeNode->children.end());
|
||||
// Function that does the HEAVY lifting
|
||||
void operator()(const typename CLUSTERTREE::sharedNode& node,
|
||||
EliminationData& myData) {
|
||||
assert(node);
|
||||
|
||||
// Add remaining factors that were not involved with eliminated variables
|
||||
boost::shared_ptr<FactorGraphType> allRemainingFactors = boost::make_shared<FactorGraphType>();
|
||||
allRemainingFactors->reserve(remainingFactors_.size() + rootsContainer.childFactors.size());
|
||||
allRemainingFactors->push_back(remainingFactors_.begin(), remainingFactors_.end());
|
||||
BOOST_FOREACH(const sharedFactor& factor, rootsContainer.childFactors)
|
||||
if(factor)
|
||||
allRemainingFactors->push_back(factor);
|
||||
// Gather factors
|
||||
FactorGraphType gatheredFactors;
|
||||
gatheredFactors.reserve(node->factors.size() + node->children.size());
|
||||
gatheredFactors += node->factors;
|
||||
gatheredFactors += myData.childFactors;
|
||||
|
||||
// Return result
|
||||
return std::make_pair(result, allRemainingFactors);
|
||||
// Check for Bayes tree orphan subtrees, and add them to our children
|
||||
BOOST_FOREACH(const sharedFactor& f, node->factors) {
|
||||
if (const BayesTreeOrphanWrapper<BTNode>* asSubtree =
|
||||
dynamic_cast<const BayesTreeOrphanWrapper<BTNode>*>(f.get())) {
|
||||
myData.bayesTreeNode->children.push_back(asSubtree->clique);
|
||||
asSubtree->clique->parent_ = myData.bayesTreeNode;
|
||||
}
|
||||
}
|
||||
|
||||
// >>>>>>>>>>>>>> Do dense elimination step >>>>>>>>>>>>>>>>>>>>>>>>>>>>>
|
||||
std::pair<boost::shared_ptr<ConditionalType>,
|
||||
boost::shared_ptr<FactorType> > eliminationResult =
|
||||
eliminationFunction_(gatheredFactors, node->orderedFrontalKeys);
|
||||
// <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
// Store conditional in BayesTree clique, and in the case of ISAM2Clique also store the remaining factor
|
||||
myData.bayesTreeNode->setEliminationResult(eliminationResult);
|
||||
|
||||
// Fill nodes index - we do this here instead of calling insertRoot at the end to avoid
|
||||
// putting orphan subtrees in the index - they'll already be in the index of the ISAM2
|
||||
// object they're added to.
|
||||
BOOST_FOREACH(const Key& j, myData.bayesTreeNode->conditional()->frontals())
|
||||
nodesIndex_.insert(std::make_pair(j, myData.bayesTreeNode));
|
||||
|
||||
// Store remaining factor in parent's gathered factors
|
||||
if (!eliminationResult.second->empty())
|
||||
myData.parentData->childFactors[myData.myIndexInParent] =
|
||||
eliminationResult.second;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
void ClusterTree<BAYESTREE, GRAPH>::Cluster::print(const std::string& s,
|
||||
const KeyFormatter& keyFormatter) const {
|
||||
std::cout << s << " (" << problemSize_ << ")";
|
||||
PrintKeyVector(orderedFrontalKeys);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
void ClusterTree<BAYESTREE, GRAPH>::Cluster::mergeChildren(
|
||||
const std::vector<bool>& merge) {
|
||||
gttic(Cluster_mergeChildren);
|
||||
|
||||
// Count how many keys, factors and children we'll end up with
|
||||
size_t nrKeys = orderedFrontalKeys.size();
|
||||
size_t nrFactors = factors.size();
|
||||
size_t nrNewChildren = 0;
|
||||
// Loop over children
|
||||
size_t i = 0;
|
||||
BOOST_FOREACH(const sharedNode& child, children) {
|
||||
if (merge[i]) {
|
||||
nrKeys += child->orderedFrontalKeys.size();
|
||||
nrFactors += child->factors.size();
|
||||
nrNewChildren += child->children.size();
|
||||
} else {
|
||||
nrNewChildren += 1; // we keep the child
|
||||
}
|
||||
++i;
|
||||
}
|
||||
|
||||
// now reserve space, and really merge
|
||||
orderedFrontalKeys.reserve(nrKeys);
|
||||
factors.reserve(nrFactors);
|
||||
typename Node::Children newChildren;
|
||||
newChildren.reserve(nrNewChildren);
|
||||
i = 0;
|
||||
BOOST_FOREACH(const sharedNode& child, children) {
|
||||
if (merge[i]) {
|
||||
// Merge keys. For efficiency, we add keys in reverse order at end, calling reverse after..
|
||||
orderedFrontalKeys.insert(orderedFrontalKeys.end(),
|
||||
child->orderedFrontalKeys.rbegin(), child->orderedFrontalKeys.rend());
|
||||
// Merge keys, factors, and children.
|
||||
factors.insert(factors.end(), child->factors.begin(),
|
||||
child->factors.end());
|
||||
newChildren.insert(newChildren.end(), child->children.begin(),
|
||||
child->children.end());
|
||||
// Increment problem size
|
||||
problemSize_ = std::max(problemSize_, child->problemSize_);
|
||||
// Increment number of frontal variables
|
||||
} else {
|
||||
newChildren.push_back(child); // we keep the child
|
||||
}
|
||||
++i;
|
||||
}
|
||||
children = newChildren;
|
||||
std::reverse(orderedFrontalKeys.begin(), orderedFrontalKeys.end());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
void ClusterTree<BAYESTREE, GRAPH>::print(const std::string& s,
|
||||
const KeyFormatter& keyFormatter) const {
|
||||
treeTraversal::PrintForest(*this, s, keyFormatter);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
ClusterTree<BAYESTREE, GRAPH>& ClusterTree<BAYESTREE, GRAPH>::operator=(
|
||||
const This& other) {
|
||||
// Start by duplicating the tree.
|
||||
roots_ = treeTraversal::CloneForest(other);
|
||||
|
||||
// Assign the remaining factors - these are pointers to factors in the original factor graph and
|
||||
// we do not clone them.
|
||||
remainingFactors_ = other.remainingFactors_;
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
std::pair<boost::shared_ptr<BAYESTREE>, boost::shared_ptr<GRAPH> > ClusterTree<
|
||||
BAYESTREE, GRAPH>::eliminate(const Eliminate& function) const {
|
||||
gttic(ClusterTree_eliminate);
|
||||
// Do elimination (depth-first traversal). The rootsContainer stores a 'dummy' BayesTree node
|
||||
// that contains all of the roots as its children. rootsContainer also stores the remaining
|
||||
// uneliminated factors passed up from the roots.
|
||||
boost::shared_ptr<BayesTreeType> result = boost::make_shared<BayesTreeType>();
|
||||
typedef EliminationData<This> Data;
|
||||
Data rootsContainer(0, roots_.size());
|
||||
typename Data::EliminationPostOrderVisitor visitorPost(function,
|
||||
result->nodes_);
|
||||
{
|
||||
TbbOpenMPMixedScope threadLimiter; // Limits OpenMP threads since we're mixing TBB and OpenMP
|
||||
treeTraversal::DepthFirstForestParallel(*this, rootsContainer,
|
||||
Data::EliminationPreOrderVisitor, visitorPost, 10);
|
||||
}
|
||||
|
||||
// Create BayesTree from roots stored in the dummy BayesTree node.
|
||||
result->roots_.insert(result->roots_.end(),
|
||||
rootsContainer.bayesTreeNode->children.begin(),
|
||||
rootsContainer.bayesTreeNode->children.end());
|
||||
|
||||
// Add remaining factors that were not involved with eliminated variables
|
||||
boost::shared_ptr<FactorGraphType> remaining = boost::make_shared<
|
||||
FactorGraphType>();
|
||||
remaining->reserve(
|
||||
remainingFactors_.size() + rootsContainer.childFactors.size());
|
||||
remaining->push_back(remainingFactors_.begin(), remainingFactors_.end());
|
||||
BOOST_FOREACH(const sharedFactor& factor, rootsContainer.childFactors) {
|
||||
if (factor)
|
||||
remaining->push_back(factor);
|
||||
}
|
||||
// Return result
|
||||
return std::make_pair(result, remaining);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -11,115 +11,124 @@
|
|||
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/Ordering.h>
|
||||
|
||||
namespace gtsam
|
||||
{
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* A cluster-tree is associated with a factor graph and is defined as in Koller-Friedman:
|
||||
* each node k represents a subset \f$ C_k \sub X \f$, and the tree is family preserving, in that
|
||||
* each factor \f$ f_i \f$ is associated with a single cluster and \f$ scope(f_i) \sub C_k \f$.
|
||||
* \nosubgrouping
|
||||
*/
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
class ClusterTree
|
||||
{
|
||||
public:
|
||||
typedef GRAPH FactorGraphType; ///< The factor graph type
|
||||
typedef typename GRAPH::FactorType FactorType; ///< The type of factors
|
||||
typedef ClusterTree<BAYESTREE, GRAPH> This; ///< This class
|
||||
typedef boost::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
|
||||
typedef boost::shared_ptr<FactorType> sharedFactor; ///< Shared pointer to a factor
|
||||
typedef BAYESTREE BayesTreeType; ///< The BayesTree type produced by elimination
|
||||
typedef typename BayesTreeType::ConditionalType ConditionalType; ///< The type of conditionals
|
||||
typedef boost::shared_ptr<ConditionalType> sharedConditional; ///< Shared pointer to a conditional
|
||||
typedef typename FactorGraphType::Eliminate Eliminate; ///< Typedef for an eliminate subroutine
|
||||
/**
|
||||
* A cluster-tree is associated with a factor graph and is defined as in Koller-Friedman:
|
||||
* each node k represents a subset \f$ C_k \sub X \f$, and the tree is family preserving, in that
|
||||
* each factor \f$ f_i \f$ is associated with a single cluster and \f$ scope(f_i) \sub C_k \f$.
|
||||
* \nosubgrouping
|
||||
*/
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
class ClusterTree {
|
||||
public:
|
||||
typedef GRAPH FactorGraphType; ///< The factor graph type
|
||||
typedef typename GRAPH::FactorType FactorType; ///< The type of factors
|
||||
typedef ClusterTree<BAYESTREE, GRAPH> This; ///< This class
|
||||
typedef boost::shared_ptr<This> shared_ptr; ///< Shared pointer to this class
|
||||
typedef boost::shared_ptr<FactorType> sharedFactor; ///< Shared pointer to a factor
|
||||
typedef BAYESTREE BayesTreeType; ///< The BayesTree type produced by elimination
|
||||
typedef typename BayesTreeType::ConditionalType ConditionalType; ///< The type of conditionals
|
||||
typedef boost::shared_ptr<ConditionalType> sharedConditional; ///< Shared pointer to a conditional
|
||||
typedef typename FactorGraphType::Eliminate Eliminate; ///< Typedef for an eliminate subroutine
|
||||
|
||||
struct Cluster {
|
||||
typedef FastVector<Key> Keys;
|
||||
typedef FastVector<sharedFactor> Factors;
|
||||
typedef FastVector<boost::shared_ptr<Cluster> > Children;
|
||||
struct Cluster {
|
||||
typedef Ordering Keys;
|
||||
typedef FastVector<sharedFactor> Factors;
|
||||
typedef FastVector<boost::shared_ptr<Cluster> > Children;
|
||||
|
||||
Keys keys; ///< Frontal keys of this node
|
||||
Factors factors; ///< Factors associated with this node
|
||||
Children children; ///< sub-trees
|
||||
int problemSize_;
|
||||
Cluster() {
|
||||
}
|
||||
Cluster(Key key, const Factors& factors) :
|
||||
factors(factors) {
|
||||
orderedFrontalKeys.push_back(key);
|
||||
}
|
||||
|
||||
int problemSize() const { return problemSize_; }
|
||||
Keys orderedFrontalKeys; ///< Frontal keys of this node
|
||||
Factors factors; ///< Factors associated with this node
|
||||
Children children; ///< sub-trees
|
||||
int problemSize_;
|
||||
|
||||
/** print this node */
|
||||
void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
|
||||
};
|
||||
int problemSize() const {
|
||||
return problemSize_;
|
||||
}
|
||||
|
||||
typedef boost::shared_ptr<Cluster> sharedCluster; ///< Shared pointer to Cluster
|
||||
typedef Cluster Node; ///< Define Node=Cluster for compatibility with tree traversal functions
|
||||
typedef sharedCluster sharedNode; ///< Define Node=Cluster for compatibility with tree traversal functions
|
||||
|
||||
/** concept check */
|
||||
GTSAM_CONCEPT_TESTABLE_TYPE(FactorType);
|
||||
|
||||
protected:
|
||||
FastVector<sharedNode> roots_;
|
||||
FastVector<sharedFactor> remainingFactors_;
|
||||
|
||||
/// @name Standard Constructors
|
||||
/// @{
|
||||
|
||||
/** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
ClusterTree(const This& other) { *this = other; }
|
||||
|
||||
/// @}
|
||||
|
||||
public:
|
||||
/// @name Testable
|
||||
/// @{
|
||||
|
||||
/** Print the cluster tree */
|
||||
void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
|
||||
|
||||
/// @}
|
||||
|
||||
/// @name Standard Interface
|
||||
/// @{
|
||||
|
||||
/** Eliminate the factors to a Bayes tree and remaining factor graph
|
||||
* @param function The function to use to eliminate, see the namespace functions
|
||||
* in GaussianFactorGraph.h
|
||||
* @return The Bayes tree and factor graph resulting from elimination
|
||||
*/
|
||||
std::pair<boost::shared_ptr<BayesTreeType>, boost::shared_ptr<FactorGraphType> >
|
||||
eliminate(const Eliminate& function) const;
|
||||
|
||||
/// @}
|
||||
|
||||
/// @name Advanced Interface
|
||||
/// @{
|
||||
|
||||
/** Return the set of roots (one for a tree, multiple for a forest) */
|
||||
const FastVector<sharedNode>& roots() const { return roots_; }
|
||||
|
||||
/** Return the remaining factors that are not pulled into elimination */
|
||||
const FastVector<sharedFactor>& remainingFactors() const { return remainingFactors_; }
|
||||
|
||||
/// @}
|
||||
|
||||
protected:
|
||||
/// @name Details
|
||||
|
||||
/// Assignment operator - makes a deep copy of the tree structure, but only pointers to factors
|
||||
/// are copied, factors are not cloned.
|
||||
This& operator=(const This& other);
|
||||
|
||||
/// Default constructor to be used in derived classes
|
||||
ClusterTree() {}
|
||||
|
||||
/// @}
|
||||
/// print this node
|
||||
void print(const std::string& s = "", const KeyFormatter& keyFormatter =
|
||||
DefaultKeyFormatter) const;
|
||||
|
||||
/// Merge all children for which bit is set into this node
|
||||
void mergeChildren(const std::vector<bool>& merge);
|
||||
};
|
||||
|
||||
typedef boost::shared_ptr<Cluster> sharedCluster; ///< Shared pointer to Cluster
|
||||
typedef Cluster Node; ///< Define Node=Cluster for compatibility with tree traversal functions
|
||||
typedef sharedCluster sharedNode; ///< Define Node=Cluster for compatibility with tree traversal functions
|
||||
|
||||
/** concept check */
|
||||
GTSAM_CONCEPT_TESTABLE_TYPE(FactorType);
|
||||
|
||||
protected:
|
||||
FastVector<sharedNode> roots_;
|
||||
FastVector<sharedFactor> remainingFactors_;
|
||||
|
||||
/// @name Standard Constructors
|
||||
/// @{
|
||||
|
||||
/** Copy constructor - makes a deep copy of the tree structure, but only pointers to factors are
|
||||
* copied, factors are not cloned. */
|
||||
ClusterTree(const This& other) {*this = other;}
|
||||
|
||||
/// @}
|
||||
|
||||
public:
|
||||
/// @name Testable
|
||||
/// @{
|
||||
|
||||
/** Print the cluster tree */
|
||||
void print(const std::string& s = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
|
||||
|
||||
/// @}
|
||||
|
||||
/// @name Standard Interface
|
||||
/// @{
|
||||
|
||||
/** Eliminate the factors to a Bayes tree and remaining factor graph
|
||||
* @param function The function to use to eliminate, see the namespace functions
|
||||
* in GaussianFactorGraph.h
|
||||
* @return The Bayes tree and factor graph resulting from elimination
|
||||
*/
|
||||
std::pair<boost::shared_ptr<BayesTreeType>, boost::shared_ptr<FactorGraphType> >
|
||||
eliminate(const Eliminate& function) const;
|
||||
|
||||
/// @}
|
||||
|
||||
/// @name Advanced Interface
|
||||
/// @{
|
||||
|
||||
/** Return the set of roots (one for a tree, multiple for a forest) */
|
||||
const FastVector<sharedNode>& roots() const {return roots_;}
|
||||
|
||||
/** Return the remaining factors that are not pulled into elimination */
|
||||
const FastVector<sharedFactor>& remainingFactors() const {return remainingFactors_;}
|
||||
|
||||
/// @}
|
||||
|
||||
protected:
|
||||
/// @name Details
|
||||
|
||||
/// Assignment operator - makes a deep copy of the tree structure, but only pointers to factors
|
||||
/// are copied, factors are not cloned.
|
||||
This& operator=(const This& other);
|
||||
|
||||
/// Default constructor to be used in derived classes
|
||||
ClusterTree() {}
|
||||
|
||||
/// @}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -55,9 +55,9 @@ namespace gtsam {
|
|||
// have a VariableIndex already here because we computed one if needed in the previous 'else'
|
||||
// block.
|
||||
if (orderingType == Ordering::METIS)
|
||||
return eliminateSequential(Ordering::metis(asDerived()), function, variableIndex, orderingType);
|
||||
else
|
||||
return eliminateSequential(Ordering::colamd(*variableIndex), function, variableIndex, orderingType);
|
||||
return eliminateSequential(Ordering::Metis(asDerived()), function, variableIndex, orderingType);
|
||||
else
|
||||
return eliminateSequential(Ordering::Colamd(*variableIndex), function, variableIndex, orderingType);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,9 +93,9 @@ namespace gtsam {
|
|||
// have a VariableIndex already here because we computed one if needed in the previous 'else'
|
||||
// block.
|
||||
if (orderingType == Ordering::METIS)
|
||||
return eliminateMultifrontal(Ordering::metis(asDerived()), function, variableIndex, orderingType);
|
||||
return eliminateMultifrontal(Ordering::Metis(asDerived()), function, variableIndex, orderingType);
|
||||
else
|
||||
return eliminateMultifrontal(Ordering::colamd(*variableIndex), function, variableIndex, orderingType);
|
||||
return eliminateMultifrontal(Ordering::Colamd(*variableIndex), function, variableIndex, orderingType);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -125,7 +125,7 @@ namespace gtsam {
|
|||
if(variableIndex) {
|
||||
gttic(eliminatePartialSequential);
|
||||
// Compute full ordering
|
||||
Ordering fullOrdering = Ordering::colamdConstrainedFirst(*variableIndex, variables);
|
||||
Ordering fullOrdering = Ordering::ColamdConstrainedFirst(*variableIndex, variables);
|
||||
|
||||
// Split off the part of the ordering for the variables being eliminated
|
||||
Ordering ordering(fullOrdering.begin(), fullOrdering.begin() + variables.size());
|
||||
|
@ -163,7 +163,7 @@ namespace gtsam {
|
|||
if(variableIndex) {
|
||||
gttic(eliminatePartialMultifrontal);
|
||||
// Compute full ordering
|
||||
Ordering fullOrdering = Ordering::colamdConstrainedFirst(*variableIndex, variables);
|
||||
Ordering fullOrdering = Ordering::ColamdConstrainedFirst(*variableIndex, variables);
|
||||
|
||||
// Split off the part of the ordering for the variables being eliminated
|
||||
Ordering ordering(fullOrdering.begin(), fullOrdering.begin() + variables.size());
|
||||
|
@ -216,7 +216,7 @@ namespace gtsam {
|
|||
boost::get<const Ordering&>(&variables) : boost::get<const std::vector<Key>&>(&variables);
|
||||
|
||||
Ordering totalOrdering =
|
||||
Ordering::colamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered);
|
||||
Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered);
|
||||
|
||||
// Split up ordering
|
||||
const size_t nVars = variablesOrOrdering->size();
|
||||
|
@ -275,7 +275,7 @@ namespace gtsam {
|
|||
boost::get<const Ordering&>(&variables) : boost::get<const std::vector<Key>&>(&variables);
|
||||
|
||||
Ordering totalOrdering =
|
||||
Ordering::colamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered);
|
||||
Ordering::ColamdConstrainedLast(*variableIndex, *variablesOrOrdering, unmarginalizedAreOrdered);
|
||||
|
||||
// Split up ordering
|
||||
const size_t nVars = variablesOrOrdering->size();
|
||||
|
@ -301,7 +301,7 @@ namespace gtsam {
|
|||
if(variableIndex)
|
||||
{
|
||||
// Compute a total ordering for all variables
|
||||
Ordering totalOrdering = Ordering::colamdConstrainedLast(*variableIndex, variables);
|
||||
Ordering totalOrdering = Ordering::ColamdConstrainedLast(*variableIndex, variables);
|
||||
|
||||
// Split out the part for the marginalized variables
|
||||
Ordering marginalizationOrdering(totalOrdering.begin(), totalOrdering.end() - variables.size());
|
||||
|
|
|
@ -128,7 +128,8 @@ namespace gtsam {
|
|||
OptionalOrderingType orderingType = boost::none) const;
|
||||
|
||||
/** Do multifrontal elimination of all variables to produce a Bayes tree. If an ordering is not
|
||||
* provided, the ordering provided by COLAMD will be used.
|
||||
* provided, the ordering will be computed using either COLAMD or METIS, dependeing on
|
||||
* the parameter orderingType (Ordering::COLAMD or Ordering::METIS)
|
||||
*
|
||||
* <b> Example - Full Cholesky elimination in COLAMD order: </b>
|
||||
* \code
|
||||
|
|
|
@ -101,10 +101,12 @@ namespace gtsam {
|
|||
{
|
||||
// Retrieve the factors involving this variable and create the current node
|
||||
const VariableIndex::Factors& factors = structure[order[j]];
|
||||
nodes[j] = boost::make_shared<Node>();
|
||||
nodes[j]->key = order[j];
|
||||
const sharedNode node = boost::make_shared<Node>();
|
||||
node->key = order[j];
|
||||
|
||||
// for row i \in Struct[A*j] do
|
||||
node->children.reserve(factors.size());
|
||||
node->factors.reserve(factors.size());
|
||||
BOOST_FOREACH(const size_t i, factors) {
|
||||
// If we already hit a variable in this factor, make the subtree containing the previous
|
||||
// variable in this factor a child of the current node. This means that the variables
|
||||
|
@ -123,16 +125,16 @@ namespace gtsam {
|
|||
if (r != j) {
|
||||
// Now that we found the root, hook up parent and child pointers in the nodes.
|
||||
parents[r] = j;
|
||||
nodes[j]->children.push_back(nodes[r]);
|
||||
node->children.push_back(nodes[r]);
|
||||
}
|
||||
} else {
|
||||
// Add the current factor to the current node since we are at the first variable in this
|
||||
// factor.
|
||||
nodes[j]->factors.push_back(graph[i]);
|
||||
// Add the factor to the current node since we are at the first variable in this factor.
|
||||
node->factors.push_back(graph[i]);
|
||||
factorUsed[i] = true;
|
||||
}
|
||||
prevCol[i] = j;
|
||||
}
|
||||
nodes[j] = node;
|
||||
}
|
||||
} catch(std::invalid_argument& e) {
|
||||
// If this is thrown from structure[order[j]] above, it means that it was requested to
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
|
||||
#include <stdio.h>
|
||||
#include <sstream>
|
||||
#include <iostream> // for cout :-(
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
@ -72,8 +73,8 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
template<class FACTOR>
|
||||
FastSet<Key> FactorGraph<FACTOR>::keys() const {
|
||||
FastSet<Key> allKeys;
|
||||
KeySet FactorGraph<FACTOR>::keys() const {
|
||||
KeySet allKeys;
|
||||
BOOST_FOREACH(const sharedFactor& factor, factors_)
|
||||
if (factor)
|
||||
allKeys.insert(factor->begin(), factor->end());
|
||||
|
|
|
@ -332,7 +332,7 @@ namespace gtsam {
|
|||
size_t nrFactors() const;
|
||||
|
||||
/** Potentially very slow function to return all keys involved */
|
||||
FastSet<Key> keys() const;
|
||||
KeySet keys() const;
|
||||
|
||||
/** MATLAB interface utility: Checks whether a factor index idx exists in the graph and is a live pointer */
|
||||
inline bool exists(size_t idx) const { return idx < size() && at(idx); }
|
||||
|
|
|
@ -29,7 +29,7 @@ namespace gtsam {
|
|||
// Remove the contaminated part of the Bayes tree
|
||||
BayesNetType bn;
|
||||
if (!this->empty()) {
|
||||
const FastSet<Key> newFactorKeys = newFactors.keys();
|
||||
const KeySet newFactorKeys = newFactors.keys();
|
||||
this->removeTop(std::vector<Key>(newFactorKeys.begin(), newFactorKeys.end()), bn, orphans);
|
||||
}
|
||||
|
||||
|
@ -44,9 +44,9 @@ namespace gtsam {
|
|||
|
||||
// eliminate into a Bayes net
|
||||
const VariableIndex varIndex(factors);
|
||||
const FastSet<Key> newFactorKeys = newFactors.keys();
|
||||
const KeySet newFactorKeys = newFactors.keys();
|
||||
const Ordering constrainedOrdering =
|
||||
Ordering::colamdConstrainedLast(varIndex, std::vector<Key>(newFactorKeys.begin(), newFactorKeys.end()));
|
||||
Ordering::ColamdConstrainedLast(varIndex, std::vector<Key>(newFactorKeys.begin(), newFactorKeys.end()));
|
||||
Base bayesTree = *factors.eliminateMultifrontal(constrainedOrdering, function, varIndex);
|
||||
this->roots_.insert(this->roots_.end(), bayesTree.roots().begin(), bayesTree.roots().end());
|
||||
this->nodes_.insert(bayesTree.nodes().begin(), bayesTree.nodes().end());
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
@ -26,123 +26,132 @@
|
|||
#include <gtsam/symbolic/SymbolicFactor-inst.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
namespace {
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
struct ConstructorTraversalData {
|
||||
ConstructorTraversalData* const parentData;
|
||||
typename JunctionTree<BAYESTREE,GRAPH>::sharedNode myJTNode;
|
||||
FastVector<SymbolicConditional::shared_ptr> childSymbolicConditionals;
|
||||
FastVector<SymbolicFactor::shared_ptr> childSymbolicFactors;
|
||||
ConstructorTraversalData(ConstructorTraversalData* _parentData) : parentData(_parentData) {}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Pre-order visitor function
|
||||
template<class BAYESTREE, class GRAPH, class ETREE_NODE>
|
||||
ConstructorTraversalData<BAYESTREE,GRAPH> ConstructorTraversalVisitorPre(
|
||||
template<class BAYESTREE, class GRAPH, class ETREE_NODE>
|
||||
struct ConstructorTraversalData {
|
||||
typedef typename JunctionTree<BAYESTREE, GRAPH>::Node Node;
|
||||
typedef typename JunctionTree<BAYESTREE, GRAPH>::sharedNode sharedNode;
|
||||
|
||||
ConstructorTraversalData* const parentData;
|
||||
sharedNode myJTNode;
|
||||
FastVector<SymbolicConditional::shared_ptr> childSymbolicConditionals;
|
||||
FastVector<SymbolicFactor::shared_ptr> childSymbolicFactors;
|
||||
|
||||
// Small inner class to store symbolic factors
|
||||
class SymbolicFactors: public FactorGraph<Factor> {
|
||||
};
|
||||
|
||||
ConstructorTraversalData(ConstructorTraversalData* _parentData) :
|
||||
parentData(_parentData) {
|
||||
}
|
||||
|
||||
// Pre-order visitor function
|
||||
static ConstructorTraversalData ConstructorTraversalVisitorPre(
|
||||
const boost::shared_ptr<ETREE_NODE>& node,
|
||||
ConstructorTraversalData<BAYESTREE,GRAPH>& parentData)
|
||||
{
|
||||
// On the pre-order pass, before children have been visited, we just set up a traversal data
|
||||
// structure with its own JT node, and create a child pointer in its parent.
|
||||
ConstructorTraversalData<BAYESTREE,GRAPH> myData = ConstructorTraversalData<BAYESTREE,GRAPH>(&parentData);
|
||||
myData.myJTNode = boost::make_shared<typename JunctionTree<BAYESTREE,GRAPH>::Node>();
|
||||
myData.myJTNode->keys.push_back(node->key);
|
||||
myData.myJTNode->factors.insert(myData.myJTNode->factors.begin(), node->factors.begin(), node->factors.end());
|
||||
parentData.myJTNode->children.push_back(myData.myJTNode);
|
||||
return myData;
|
||||
}
|
||||
ConstructorTraversalData& parentData) {
|
||||
// On the pre-order pass, before children have been visited, we just set up
|
||||
// a traversal data structure with its own JT node, and create a child
|
||||
// pointer in its parent.
|
||||
ConstructorTraversalData myData = ConstructorTraversalData(&parentData);
|
||||
myData.myJTNode = boost::make_shared<Node>(node->key, node->factors);
|
||||
parentData.myJTNode->children.push_back(myData.myJTNode);
|
||||
return myData;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// Post-order visitor function
|
||||
template<class BAYESTREE, class GRAPH, class ETREE_NODE>
|
||||
void ConstructorTraversalVisitorPostAlg2(
|
||||
// Post-order visitor function
|
||||
static void ConstructorTraversalVisitorPostAlg2(
|
||||
const boost::shared_ptr<ETREE_NODE>& ETreeNode,
|
||||
const ConstructorTraversalData<BAYESTREE, GRAPH>& myData)
|
||||
{
|
||||
// In this post-order visitor, we combine the symbolic elimination results from the
|
||||
// elimination tree children and symbolically eliminate the current elimination tree node. We
|
||||
// then check whether each of our elimination tree child nodes should be merged with us. The
|
||||
// check for this is that our number of symbolic elimination parents is exactly 1 less than
|
||||
// our child's symbolic elimination parents - this condition indicates that eliminating the
|
||||
// current node did not introduce any parents beyond those already in the child.
|
||||
const ConstructorTraversalData& myData) {
|
||||
// In this post-order visitor, we combine the symbolic elimination results
|
||||
// from the elimination tree children and symbolically eliminate the current
|
||||
// elimination tree node. We then check whether each of our elimination
|
||||
// tree child nodes should be merged with us. The check for this is that
|
||||
// our number of symbolic elimination parents is exactly 1 less than
|
||||
// our child's symbolic elimination parents - this condition indicates that
|
||||
// eliminating the current node did not introduce any parents beyond those
|
||||
// already in the child->
|
||||
|
||||
// Do symbolic elimination for this node
|
||||
class : public FactorGraph<Factor> {} symbolicFactors;
|
||||
symbolicFactors.reserve(ETreeNode->factors.size() + myData.childSymbolicFactors.size());
|
||||
// Add ETree node factors
|
||||
symbolicFactors += ETreeNode->factors;
|
||||
// Add symbolic factors passed up from children
|
||||
symbolicFactors += myData.childSymbolicFactors;
|
||||
// Do symbolic elimination for this node
|
||||
SymbolicFactors symbolicFactors;
|
||||
symbolicFactors.reserve(
|
||||
ETreeNode->factors.size() + myData.childSymbolicFactors.size());
|
||||
// Add ETree node factors
|
||||
symbolicFactors += ETreeNode->factors;
|
||||
// Add symbolic factors passed up from children
|
||||
symbolicFactors += myData.childSymbolicFactors;
|
||||
|
||||
Ordering keyAsOrdering; keyAsOrdering.push_back(ETreeNode->key);
|
||||
std::pair<SymbolicConditional::shared_ptr, SymbolicFactor::shared_ptr> symbolicElimResult =
|
||||
internal::EliminateSymbolic(symbolicFactors, keyAsOrdering);
|
||||
Ordering keyAsOrdering;
|
||||
keyAsOrdering.push_back(ETreeNode->key);
|
||||
SymbolicConditional::shared_ptr myConditional;
|
||||
SymbolicFactor::shared_ptr mySeparatorFactor;
|
||||
boost::tie(myConditional, mySeparatorFactor) = internal::EliminateSymbolic(
|
||||
symbolicFactors, keyAsOrdering);
|
||||
|
||||
// Store symbolic elimination results in the parent
|
||||
myData.parentData->childSymbolicConditionals.push_back(symbolicElimResult.first);
|
||||
myData.parentData->childSymbolicFactors.push_back(symbolicElimResult.second);
|
||||
// Store symbolic elimination results in the parent
|
||||
myData.parentData->childSymbolicConditionals.push_back(myConditional);
|
||||
myData.parentData->childSymbolicFactors.push_back(mySeparatorFactor);
|
||||
|
||||
// Merge our children if they are in our clique - if our conditional has exactly one fewer
|
||||
// parent than our child's conditional.
|
||||
size_t myNrFrontals = 1;
|
||||
const size_t myNrParents = symbolicElimResult.first->nrParents();
|
||||
size_t nrMergedChildren = 0;
|
||||
assert(myData.myJTNode->children.size() == myData.childSymbolicConditionals.size());
|
||||
// Loop over children
|
||||
int combinedProblemSize = (int) (symbolicElimResult.first->size() * symbolicFactors.size());
|
||||
for(size_t child = 0; child < myData.childSymbolicConditionals.size(); ++child) {
|
||||
// Check if we should merge the child
|
||||
if(myNrParents + myNrFrontals == myData.childSymbolicConditionals[child]->nrParents()) {
|
||||
// Get a reference to the child, adjusting the index to account for children previously
|
||||
// merged and removed from the child list.
|
||||
const typename JunctionTree<BAYESTREE, GRAPH>::Node& childToMerge =
|
||||
*myData.myJTNode->children[child - nrMergedChildren];
|
||||
// Merge keys, factors, and children.
|
||||
myData.myJTNode->keys.insert(myData.myJTNode->keys.begin(), childToMerge.keys.begin(), childToMerge.keys.end());
|
||||
myData.myJTNode->factors.insert(myData.myJTNode->factors.end(), childToMerge.factors.begin(), childToMerge.factors.end());
|
||||
myData.myJTNode->children.insert(myData.myJTNode->children.end(), childToMerge.children.begin(), childToMerge.children.end());
|
||||
// Increment problem size
|
||||
combinedProblemSize = std::max(combinedProblemSize, childToMerge.problemSize_);
|
||||
// Increment number of frontal variables
|
||||
myNrFrontals += childToMerge.keys.size();
|
||||
// Remove child from list.
|
||||
myData.myJTNode->children.erase(myData.myJTNode->children.begin() + (child - nrMergedChildren));
|
||||
// Increment number of merged children
|
||||
++ nrMergedChildren;
|
||||
}
|
||||
sharedNode node = myData.myJTNode;
|
||||
const FastVector<SymbolicConditional::shared_ptr>& childConditionals =
|
||||
myData.childSymbolicConditionals;
|
||||
node->problemSize_ = (int) (myConditional->size() * symbolicFactors.size());
|
||||
|
||||
// Merge our children if they are in our clique - if our conditional has
|
||||
// exactly one fewer parent than our child's conditional.
|
||||
const size_t myNrParents = myConditional->nrParents();
|
||||
const size_t nrChildren = node->children.size();
|
||||
assert(childConditionals.size() == nrChildren);
|
||||
|
||||
// decide which children to merge, as index into children
|
||||
std::vector<bool> merge(nrChildren, false);
|
||||
size_t myNrFrontals = 1, i = 0;
|
||||
BOOST_FOREACH(const sharedNode& child, node->children) {
|
||||
// Check if we should merge the i^th child
|
||||
if (myNrParents + myNrFrontals == childConditionals[i]->nrParents()) {
|
||||
// Increment number of frontal variables
|
||||
myNrFrontals += child->orderedFrontalKeys.size();
|
||||
merge[i] = true;
|
||||
}
|
||||
myData.myJTNode->problemSize_ = combinedProblemSize;
|
||||
++i;
|
||||
}
|
||||
|
||||
// now really merge
|
||||
node->mergeChildren(merge);
|
||||
}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
template<class ETREE_BAYESNET, class ETREE_GRAPH>
|
||||
JunctionTree<BAYESTREE,GRAPH>::JunctionTree(const EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>& eliminationTree)
|
||||
{
|
||||
gttic(JunctionTree_FromEliminationTree);
|
||||
// Here we rely on the BayesNet having been produced by this elimination tree, such that the
|
||||
// conditionals are arranged in DFS post-order. We traverse the elimination tree, and inspect
|
||||
// the symbolic conditional corresponding to each node. The elimination tree node is added to
|
||||
// the same clique with its parent if it has exactly one more Bayes net conditional parent than
|
||||
// does its elimination tree parent.
|
||||
/* ************************************************************************* */
|
||||
template<class BAYESTREE, class GRAPH>
|
||||
template<class ETREE_BAYESNET, class ETREE_GRAPH>
|
||||
JunctionTree<BAYESTREE, GRAPH>::JunctionTree(
|
||||
const EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>& eliminationTree) {
|
||||
gttic(JunctionTree_FromEliminationTree);
|
||||
// Here we rely on the BayesNet having been produced by this elimination tree,
|
||||
// such that the conditionals are arranged in DFS post-order. We traverse the
|
||||
// elimination tree, and inspect the symbolic conditional corresponding to
|
||||
// each node. The elimination tree node is added to the same clique with its
|
||||
// parent if it has exactly one more Bayes net conditional parent than
|
||||
// does its elimination tree parent.
|
||||
|
||||
// Traverse the elimination tree, doing symbolic elimination and merging nodes as we go. Gather
|
||||
// the created junction tree roots in a dummy Node.
|
||||
typedef typename EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>::Node ETreeNode;
|
||||
ConstructorTraversalData<BAYESTREE, GRAPH> rootData(0);
|
||||
rootData.myJTNode = boost::make_shared<typename Base::Node>(); // Make a dummy node to gather the junction tree roots
|
||||
treeTraversal::DepthFirstForest(eliminationTree, rootData,
|
||||
ConstructorTraversalVisitorPre<BAYESTREE,GRAPH,ETreeNode>, ConstructorTraversalVisitorPostAlg2<BAYESTREE,GRAPH,ETreeNode>);
|
||||
// Traverse the elimination tree, doing symbolic elimination and merging nodes
|
||||
// as we go. Gather the created junction tree roots in a dummy Node.
|
||||
typedef typename EliminationTree<ETREE_BAYESNET, ETREE_GRAPH>::Node ETreeNode;
|
||||
typedef ConstructorTraversalData<BAYESTREE, GRAPH, ETreeNode> Data;
|
||||
Data rootData(0);
|
||||
rootData.myJTNode = boost::make_shared<typename Base::Node>(); // Make a dummy node to gather
|
||||
// the junction tree roots
|
||||
treeTraversal::DepthFirstForest(eliminationTree, rootData,
|
||||
Data::ConstructorTraversalVisitorPre,
|
||||
Data::ConstructorTraversalVisitorPostAlg2);
|
||||
|
||||
// Assign roots from the dummy node
|
||||
Base::roots_ = rootData.myJTNode->children;
|
||||
// Assign roots from the dummy node
|
||||
typedef typename JunctionTree<BAYESTREE, GRAPH>::Node Node;
|
||||
const typename Node::Children& children = rootData.myJTNode->children;
|
||||
Base::roots_.reserve(children.size());
|
||||
Base::roots_.insert(Base::roots_.begin(), children.begin(), children.end());
|
||||
|
||||
// Transfer remaining factors from elimination tree
|
||||
Base::remainingFactors_ = eliminationTree.remainingFactors();
|
||||
}
|
||||
// Transfer remaining factors from elimination tree
|
||||
Base::remainingFactors_ = eliminationTree.remainingFactors();
|
||||
}
|
||||
|
||||
} //namespace gtsam
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -17,57 +17,72 @@
|
|||
* @date Feb 20, 2012
|
||||
*/
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <boost/lexical_cast.hpp>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/LabeledSymbol.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/lexical_cast.hpp>
|
||||
#include <iostream>
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::string _multirobotKeyFormatter(Key key) {
|
||||
string _defaultKeyFormatter(Key key) {
|
||||
const Symbol asSymbol(key);
|
||||
if (asSymbol.chr() > 0)
|
||||
return (string) asSymbol;
|
||||
else
|
||||
return boost::lexical_cast<string>(key);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void PrintKey(Key key, const string& s, const KeyFormatter& keyFormatter) {
|
||||
cout << s << keyFormatter(key);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
string _multirobotKeyFormatter(Key key) {
|
||||
const LabeledSymbol asLabeledSymbol(key);
|
||||
if (asLabeledSymbol.chr() > 0 && asLabeledSymbol.label() > 0)
|
||||
return (std::string) asLabeledSymbol;
|
||||
return (string) asLabeledSymbol;
|
||||
|
||||
const Symbol asSymbol(key);
|
||||
if (asLabeledSymbol.chr() > 0)
|
||||
return (std::string) asSymbol;
|
||||
return (string) asSymbol;
|
||||
else
|
||||
return boost::lexical_cast<std::string>(key);
|
||||
return boost::lexical_cast<string>(key);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class CONTAINER>
|
||||
static void print(const CONTAINER& keys, const std::string& s,
|
||||
static void Print(const CONTAINER& keys, const string& s,
|
||||
const KeyFormatter& keyFormatter) {
|
||||
std::cout << s << " ";
|
||||
cout << s << " ";
|
||||
if (keys.empty())
|
||||
std::cout << "(none)" << std::endl;
|
||||
cout << "(none)" << endl;
|
||||
else {
|
||||
BOOST_FOREACH(const Key& key, keys)
|
||||
std::cout << keyFormatter(key) << " ";
|
||||
std::cout << std::endl;
|
||||
cout << keyFormatter(key) << " ";
|
||||
cout << endl;
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void printKeyList(const KeyList& keys, const std::string& s,
|
||||
void PrintKeyList(const KeyList& keys, const string& s,
|
||||
const KeyFormatter& keyFormatter) {
|
||||
print(keys, s, keyFormatter);
|
||||
Print(keys, s, keyFormatter);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
void printKeyVector(const KeyVector& keys, const std::string& s,
|
||||
void PrintKeyVector(const KeyVector& keys, const string& s,
|
||||
const KeyFormatter& keyFormatter) {
|
||||
print(keys, s, keyFormatter);
|
||||
Print(keys, s, keyFormatter);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
void printKeySet(const KeySet& keys, const std::string& s,
|
||||
void PrintKeySet(const KeySet& keys, const string& s,
|
||||
const KeyFormatter& keyFormatter) {
|
||||
print(keys, s, keyFormatter);
|
||||
Print(keys, s, keyFormatter);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
|
||||
|
|
|
@ -17,45 +17,75 @@
|
|||
*/
|
||||
#pragma once
|
||||
|
||||
#include <boost/function.hpp>
|
||||
#include <string>
|
||||
|
||||
#include <gtsam/global_includes.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
#include <gtsam/base/FastList.h>
|
||||
#include <gtsam/base/FastSet.h>
|
||||
#include <gtsam/base/FastMap.h>
|
||||
#include <gtsam/base/FastSet.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/dllexport.h>
|
||||
|
||||
#include <boost/function.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/// Typedef for a function to format a key, i.e. to convert it to a string
|
||||
typedef boost::function<std::string(Key)> KeyFormatter;
|
||||
|
||||
// Helper function for Multi-robot Key Formatter
|
||||
GTSAM_EXPORT std::string _multirobotKeyFormatter(gtsam::Key key);
|
||||
// Helper function for DefaultKeyFormatter
|
||||
GTSAM_EXPORT std::string _defaultKeyFormatter(Key key);
|
||||
|
||||
///
|
||||
/// A KeyFormatter that will check for LabeledSymbol keys, as well as Symbol and plain
|
||||
/// integer keys. This keyformatter will need to be passed in to override the default
|
||||
/// formatter in print functions.
|
||||
///
|
||||
/// Checks for LabeledSymbol, Symbol and then plain keys, in order.
|
||||
static const gtsam::KeyFormatter MultiRobotKeyFormatter = &_multirobotKeyFormatter;
|
||||
/// The default KeyFormatter, which is used if no KeyFormatter is passed to
|
||||
/// a nonlinear 'print' function. Automatically detects plain integer keys
|
||||
/// and Symbol keys.
|
||||
static const KeyFormatter DefaultKeyFormatter = &_defaultKeyFormatter;
|
||||
|
||||
/// Useful typedefs for operations with Values - allow for matlab interfaces
|
||||
typedef FastList<Key> KeyList;
|
||||
typedef FastVector<Key> KeyVector;
|
||||
typedef FastSet<Key> KeySet;
|
||||
typedef FastMap<Key,int> KeyGroupMap;
|
||||
// Helper function for Multi-robot Key Formatter
|
||||
GTSAM_EXPORT std::string _multirobotKeyFormatter(gtsam::Key key);
|
||||
|
||||
/// Utility function to print sets of keys with optional prefix
|
||||
GTSAM_EXPORT void printKeyList(const KeyList& keys, const std::string& s = "",
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
///
|
||||
/// A KeyFormatter that will check for LabeledSymbol keys, as well as Symbol and plain
|
||||
/// integer keys. This keyformatter will need to be passed in to override the default
|
||||
/// formatter in print functions.
|
||||
///
|
||||
/// Checks for LabeledSymbol, Symbol and then plain keys, in order.
|
||||
static const gtsam::KeyFormatter MultiRobotKeyFormatter =
|
||||
&_multirobotKeyFormatter;
|
||||
|
||||
/// Utility function to print sets of keys with optional prefix
|
||||
GTSAM_EXPORT void printKeyVector(const KeyVector& keys, const std::string& s = "",
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
/// Useful typedef for operations with Values - allows for matlab interface
|
||||
typedef FastVector<Key> KeyVector;
|
||||
|
||||
/// Utility function to print sets of keys with optional prefix
|
||||
GTSAM_EXPORT void printKeySet(const KeySet& keys, const std::string& s = "",
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
}
|
||||
// TODO(frank): Nothing fast about these :-(
|
||||
typedef FastList<Key> KeyList;
|
||||
typedef FastSet<Key> KeySet;
|
||||
typedef FastMap<Key, int> KeyGroupMap;
|
||||
|
||||
/// Utility function to print one key with optional prefix
|
||||
GTSAM_EXPORT void PrintKey(Key key, const std::string& s = "",
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
|
||||
/// Utility function to print sets of keys with optional prefix
|
||||
GTSAM_EXPORT void PrintKeyList(const KeyList& keys, const std::string& s = "",
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
|
||||
/// Utility function to print sets of keys with optional prefix
|
||||
GTSAM_EXPORT void PrintKeyVector(const KeyVector& keys, const std::string& s =
|
||||
"", const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
|
||||
/// Utility function to print sets of keys with optional prefix
|
||||
GTSAM_EXPORT void PrintKeySet(const KeySet& keys, const std::string& s = "",
|
||||
const KeyFormatter& keyFormatter = DefaultKeyFormatter);
|
||||
|
||||
// Define Key to be Testable by specializing gtsam::traits
|
||||
template<typename T> struct traits;
|
||||
template<> struct traits<Key> {
|
||||
static void Print(const Key& key, const std::string& str = "") {
|
||||
PrintKey(key, str);
|
||||
}
|
||||
static bool Equals(const Key& key1, const Key& key2, double tol = 1e-8) {
|
||||
return key1 == key2;
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace gtsam
|
||||
|
||||
|
|
|
@ -26,8 +26,8 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
template<class FACTOR>
|
||||
void MetisIndex::augment(const FactorGraph<FACTOR>& factors) {
|
||||
std::map<int32_t, FastSet<int32_t> > iAdjMap; // Stores a set of keys that are adjacent to key x, with adjMap.first
|
||||
std::map<int32_t, FastSet<int32_t> >::iterator iAdjMapIt;
|
||||
std::map<int32_t, std::set<int32_t> > iAdjMap; // Stores a set of keys that are adjacent to key x, with adjMap.first
|
||||
std::map<int32_t, std::set<int32_t> >::iterator iAdjMapIt;
|
||||
std::set<Key> keySet;
|
||||
|
||||
/* ********** Convert to CSR format ********** */
|
||||
|
|
|
@ -29,242 +29,236 @@ using namespace std;
|
|||
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
FastMap<Key, size_t> Ordering::invert() const
|
||||
{
|
||||
FastMap<Key, size_t> inverted;
|
||||
for(size_t pos = 0; pos < this->size(); ++pos)
|
||||
inverted.insert(make_pair((*this)[pos], pos));
|
||||
return inverted;
|
||||
/* ************************************************************************* */
|
||||
FastMap<Key, size_t> Ordering::invert() const {
|
||||
FastMap<Key, size_t> inverted;
|
||||
for (size_t pos = 0; pos < this->size(); ++pos)
|
||||
inverted.insert(make_pair((*this)[pos], pos));
|
||||
return inverted;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::Colamd(const VariableIndex& variableIndex) {
|
||||
// Call constrained version with all groups set to zero
|
||||
vector<int> dummy_groups(variableIndex.size(), 0);
|
||||
return Ordering::ColamdConstrained(variableIndex, dummy_groups);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::ColamdConstrained(const VariableIndex& variableIndex,
|
||||
std::vector<int>& cmember) {
|
||||
gttic(Ordering_COLAMDConstrained);
|
||||
|
||||
gttic(Prepare);
|
||||
size_t nEntries = variableIndex.nEntries(), nFactors =
|
||||
variableIndex.nFactors(), nVars = variableIndex.size();
|
||||
// Convert to compressed column major format colamd wants it in (== MATLAB format!)
|
||||
size_t Alen = ccolamd_recommended((int) nEntries, (int) nFactors,
|
||||
(int) nVars); /* colamd arg 3: size of the array A */
|
||||
vector<int> A = vector<int>(Alen); /* colamd arg 4: row indices of A, of size Alen */
|
||||
vector<int> p = vector<int>(nVars + 1); /* colamd arg 5: column pointers of A, of size n_col+1 */
|
||||
|
||||
// Fill in input data for COLAMD
|
||||
p[0] = 0;
|
||||
int count = 0;
|
||||
vector<Key> keys(nVars); // Array to store the keys in the order we add them so we can retrieve them in permuted order
|
||||
size_t index = 0;
|
||||
BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex) {
|
||||
// Arrange factor indices into COLAMD format
|
||||
const VariableIndex::Factors& column = key_factors.second;
|
||||
size_t lastFactorId = numeric_limits<size_t>::max();
|
||||
BOOST_FOREACH(size_t factorIndex, column) {
|
||||
if (lastFactorId != numeric_limits<size_t>::max())
|
||||
assert(factorIndex > lastFactorId);
|
||||
A[count++] = (int) factorIndex; // copy sparse column
|
||||
}
|
||||
p[index + 1] = count; // column j (base 1) goes from A[j-1] to A[j]-1
|
||||
// Store key in array and increment index
|
||||
keys[index] = key_factors.first;
|
||||
++index;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::colamd(const VariableIndex& variableIndex)
|
||||
{
|
||||
// Call constrained version with all groups set to zero
|
||||
vector<int> dummy_groups(variableIndex.size(), 0);
|
||||
return Ordering::colamdConstrained(variableIndex, dummy_groups);
|
||||
assert((size_t)count == variableIndex.nEntries());
|
||||
|
||||
//double* knobs = NULL; /* colamd arg 6: parameters (uses defaults if NULL) */
|
||||
double knobs[CCOLAMD_KNOBS];
|
||||
ccolamd_set_defaults(knobs);
|
||||
knobs[CCOLAMD_DENSE_ROW] = -1;
|
||||
knobs[CCOLAMD_DENSE_COL] = -1;
|
||||
|
||||
int stats[CCOLAMD_STATS]; /* colamd arg 7: colamd output statistics and error codes */
|
||||
|
||||
gttoc(Prepare);
|
||||
|
||||
// call colamd, result will be in p
|
||||
/* returns (1) if successful, (0) otherwise*/
|
||||
if (nVars > 0) {
|
||||
gttic(ccolamd);
|
||||
int rv = ccolamd((int) nFactors, (int) nVars, (int) Alen, &A[0], &p[0],
|
||||
knobs, stats, &cmember[0]);
|
||||
if (rv != 1)
|
||||
throw runtime_error(
|
||||
(boost::format("ccolamd failed with return value %1%") % rv).str());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::colamdConstrained(
|
||||
const VariableIndex& variableIndex, std::vector<int>& cmember)
|
||||
{
|
||||
gttic(Ordering_COLAMDConstrained);
|
||||
// ccolamd_report(stats);
|
||||
|
||||
gttic(Prepare);
|
||||
size_t nEntries = variableIndex.nEntries(), nFactors = variableIndex.nFactors(), nVars = variableIndex.size();
|
||||
// Convert to compressed column major format colamd wants it in (== MATLAB format!)
|
||||
size_t Alen = ccolamd_recommended((int)nEntries, (int)nFactors, (int)nVars); /* colamd arg 3: size of the array A */
|
||||
vector<int> A = vector<int>(Alen); /* colamd arg 4: row indices of A, of size Alen */
|
||||
vector<int> p = vector<int>(nVars + 1); /* colamd arg 5: column pointers of A, of size n_col+1 */
|
||||
gttic(Fill_Ordering);
|
||||
// Convert elimination ordering in p to an ordering
|
||||
Ordering result;
|
||||
result.resize(nVars);
|
||||
for (size_t j = 0; j < nVars; ++j)
|
||||
result[j] = keys[p[j]];
|
||||
gttoc(Fill_Ordering);
|
||||
|
||||
// Fill in input data for COLAMD
|
||||
p[0] = 0;
|
||||
int count = 0;
|
||||
vector<Key> keys(nVars); // Array to store the keys in the order we add them so we can retrieve them in permuted order
|
||||
size_t index = 0;
|
||||
BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex) {
|
||||
// Arrange factor indices into COLAMD format
|
||||
const VariableIndex::Factors& column = key_factors.second;
|
||||
size_t lastFactorId = numeric_limits<size_t>::max();
|
||||
BOOST_FOREACH(size_t factorIndex, column) {
|
||||
if(lastFactorId != numeric_limits<size_t>::max())
|
||||
assert(factorIndex > lastFactorId);
|
||||
A[count++] = (int)factorIndex; // copy sparse column
|
||||
}
|
||||
p[index+1] = count; // column j (base 1) goes from A[j-1] to A[j]-1
|
||||
// Store key in array and increment index
|
||||
keys[index] = key_factors.first;
|
||||
++ index;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
assert((size_t)count == variableIndex.nEntries());
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::ColamdConstrainedLast(const VariableIndex& variableIndex,
|
||||
const std::vector<Key>& constrainLast, bool forceOrder) {
|
||||
gttic(Ordering_COLAMDConstrainedLast);
|
||||
|
||||
//double* knobs = NULL; /* colamd arg 6: parameters (uses defaults if NULL) */
|
||||
double knobs[CCOLAMD_KNOBS];
|
||||
ccolamd_set_defaults(knobs);
|
||||
knobs[CCOLAMD_DENSE_ROW]=-1;
|
||||
knobs[CCOLAMD_DENSE_COL]=-1;
|
||||
size_t n = variableIndex.size();
|
||||
std::vector<int> cmember(n, 0);
|
||||
|
||||
int stats[CCOLAMD_STATS]; /* colamd arg 7: colamd output statistics and error codes */
|
||||
// Build a mapping to look up sorted Key indices by Key
|
||||
FastMap<Key, size_t> keyIndices;
|
||||
size_t j = 0;
|
||||
BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex)
|
||||
keyIndices.insert(keyIndices.end(), make_pair(key_factors.first, j++));
|
||||
|
||||
gttoc(Prepare);
|
||||
// If at least some variables are not constrained to be last, constrain the
|
||||
// ones that should be constrained.
|
||||
int group = (constrainLast.size() != n ? 1 : 0);
|
||||
BOOST_FOREACH(Key key, constrainLast) {
|
||||
cmember[keyIndices.at(key)] = group;
|
||||
if (forceOrder)
|
||||
++group;
|
||||
}
|
||||
|
||||
// call colamd, result will be in p
|
||||
/* returns (1) if successful, (0) otherwise*/
|
||||
if(nVars > 0) {
|
||||
gttic(ccolamd);
|
||||
int rv = ccolamd((int)nFactors, (int)nVars, (int)Alen, &A[0], &p[0], knobs, stats, &cmember[0]);
|
||||
if(rv != 1)
|
||||
throw runtime_error((boost::format("ccolamd failed with return value %1%")%rv).str());
|
||||
}
|
||||
return Ordering::ColamdConstrained(variableIndex, cmember);
|
||||
}
|
||||
|
||||
// ccolamd_report(stats);
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::ColamdConstrainedFirst(const VariableIndex& variableIndex,
|
||||
const std::vector<Key>& constrainFirst, bool forceOrder) {
|
||||
gttic(Ordering_COLAMDConstrainedFirst);
|
||||
|
||||
gttic(Fill_Ordering);
|
||||
// Convert elimination ordering in p to an ordering
|
||||
Ordering result;
|
||||
result.resize(nVars);
|
||||
for(size_t j = 0; j < nVars; ++j)
|
||||
result[j] = keys[p[j]];
|
||||
gttoc(Fill_Ordering);
|
||||
const int none = -1;
|
||||
size_t n = variableIndex.size();
|
||||
std::vector<int> cmember(n, none);
|
||||
|
||||
// Build a mapping to look up sorted Key indices by Key
|
||||
FastMap<Key, size_t> keyIndices;
|
||||
size_t j = 0;
|
||||
BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex)
|
||||
keyIndices.insert(keyIndices.end(), make_pair(key_factors.first, j++));
|
||||
|
||||
// If at least some variables are not constrained to be last, constrain the
|
||||
// ones that should be constrained.
|
||||
int group = 0;
|
||||
BOOST_FOREACH(Key key, constrainFirst) {
|
||||
cmember[keyIndices.at(key)] = group;
|
||||
if (forceOrder)
|
||||
++group;
|
||||
}
|
||||
|
||||
if (!forceOrder && !constrainFirst.empty())
|
||||
++group;
|
||||
BOOST_FOREACH(int& c, cmember)
|
||||
if (c == none)
|
||||
c = group;
|
||||
|
||||
return Ordering::ColamdConstrained(variableIndex, cmember);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::ColamdConstrained(const VariableIndex& variableIndex,
|
||||
const FastMap<Key, int>& groups) {
|
||||
gttic(Ordering_COLAMDConstrained);
|
||||
size_t n = variableIndex.size();
|
||||
std::vector<int> cmember(n, 0);
|
||||
|
||||
// Build a mapping to look up sorted Key indices by Key
|
||||
FastMap<Key, size_t> keyIndices;
|
||||
size_t j = 0;
|
||||
BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex)
|
||||
keyIndices.insert(keyIndices.end(), make_pair(key_factors.first, j++));
|
||||
|
||||
// Assign groups
|
||||
typedef FastMap<Key, int>::value_type key_group;
|
||||
BOOST_FOREACH(const key_group& p, groups) {
|
||||
// FIXME: check that no groups are skipped
|
||||
cmember[keyIndices.at(p.first)] = p.second;
|
||||
}
|
||||
|
||||
return Ordering::ColamdConstrained(variableIndex, cmember);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::Metis(const MetisIndex& met) {
|
||||
gttic(Ordering_METIS);
|
||||
|
||||
vector<idx_t> xadj = met.xadj();
|
||||
vector<idx_t> adj = met.adj();
|
||||
vector<idx_t> perm, iperm;
|
||||
|
||||
idx_t size = met.nValues();
|
||||
for (idx_t i = 0; i < size; i++) {
|
||||
perm.push_back(0);
|
||||
iperm.push_back(0);
|
||||
}
|
||||
|
||||
int outputError;
|
||||
|
||||
outputError = METIS_NodeND(&size, &xadj[0], &adj[0], NULL, NULL, &perm[0],
|
||||
&iperm[0]);
|
||||
Ordering result;
|
||||
|
||||
if (outputError != METIS_OK) {
|
||||
std::cout << "METIS failed during Nested Dissection ordering!\n";
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::colamdConstrainedLast(
|
||||
const VariableIndex& variableIndex, const std::vector<Key>& constrainLast, bool forceOrder)
|
||||
{
|
||||
gttic(Ordering_COLAMDConstrainedLast);
|
||||
|
||||
size_t n = variableIndex.size();
|
||||
std::vector<int> cmember(n, 0);
|
||||
|
||||
// Build a mapping to look up sorted Key indices by Key
|
||||
FastMap<Key, size_t> keyIndices;
|
||||
size_t j = 0;
|
||||
BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex)
|
||||
keyIndices.insert(keyIndices.end(), make_pair(key_factors.first, j++));
|
||||
|
||||
// If at least some variables are not constrained to be last, constrain the
|
||||
// ones that should be constrained.
|
||||
int group = (constrainLast.size() != n ? 1 : 0);
|
||||
BOOST_FOREACH(Key key, constrainLast) {
|
||||
cmember[keyIndices.at(key)] = group;
|
||||
if(forceOrder)
|
||||
++ group;
|
||||
}
|
||||
|
||||
return Ordering::colamdConstrained(variableIndex, cmember);
|
||||
result.resize(size);
|
||||
for (size_t j = 0; j < (size_t) size; ++j) {
|
||||
// We have to add the minKey value back to obtain the original key in the Values
|
||||
result[j] = met.intToKey(perm[j]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::colamdConstrainedFirst(
|
||||
const VariableIndex& variableIndex, const std::vector<Key>& constrainFirst, bool forceOrder)
|
||||
{
|
||||
gttic(Ordering_COLAMDConstrainedFirst);
|
||||
|
||||
const int none = -1;
|
||||
size_t n = variableIndex.size();
|
||||
std::vector<int> cmember(n, none);
|
||||
|
||||
// Build a mapping to look up sorted Key indices by Key
|
||||
FastMap<Key, size_t> keyIndices;
|
||||
size_t j = 0;
|
||||
BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex)
|
||||
keyIndices.insert(keyIndices.end(), make_pair(key_factors.first, j++));
|
||||
|
||||
// If at least some variables are not constrained to be last, constrain the
|
||||
// ones that should be constrained.
|
||||
int group = 0;
|
||||
BOOST_FOREACH(Key key, constrainFirst) {
|
||||
cmember[keyIndices.at(key)] = group;
|
||||
if(forceOrder)
|
||||
++ group;
|
||||
}
|
||||
|
||||
if(!forceOrder && !constrainFirst.empty())
|
||||
++ group;
|
||||
BOOST_FOREACH(int& c, cmember)
|
||||
if(c == none)
|
||||
c = group;
|
||||
|
||||
return Ordering::colamdConstrained(variableIndex, cmember);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::colamdConstrained(const VariableIndex& variableIndex,
|
||||
const FastMap<Key, int>& groups)
|
||||
{
|
||||
gttic(Ordering_COLAMDConstrained);
|
||||
size_t n = variableIndex.size();
|
||||
std::vector<int> cmember(n, 0);
|
||||
|
||||
// Build a mapping to look up sorted Key indices by Key
|
||||
FastMap<Key, size_t> keyIndices;
|
||||
size_t j = 0;
|
||||
BOOST_FOREACH(const VariableIndex::value_type key_factors, variableIndex)
|
||||
keyIndices.insert(keyIndices.end(), make_pair(key_factors.first, j++));
|
||||
|
||||
// Assign groups
|
||||
typedef FastMap<Key, int>::value_type key_group;
|
||||
BOOST_FOREACH(const key_group& p, groups) {
|
||||
// FIXME: check that no groups are skipped
|
||||
cmember[keyIndices.at(p.first)] = p.second;
|
||||
}
|
||||
|
||||
return Ordering::colamdConstrained(variableIndex, cmember);
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering Ordering::metis(const MetisIndex& met)
|
||||
{
|
||||
gttic(Ordering_METIS);
|
||||
|
||||
vector<idx_t> xadj = met.xadj();
|
||||
vector<idx_t> adj = met.adj();
|
||||
vector<idx_t> perm, iperm;
|
||||
|
||||
idx_t size = met.nValues();
|
||||
for (idx_t i = 0; i < size; i++)
|
||||
{
|
||||
perm.push_back(0);
|
||||
iperm.push_back(0);
|
||||
}
|
||||
|
||||
int outputError;
|
||||
|
||||
outputError = METIS_NodeND(&size, &xadj[0], &adj[0], NULL, NULL, &perm[0], &iperm[0]);
|
||||
Ordering result;
|
||||
|
||||
if (outputError != METIS_OK)
|
||||
{
|
||||
std::cout << "METIS failed during Nested Dissection ordering!\n";
|
||||
return result;
|
||||
}
|
||||
|
||||
result.resize(size);
|
||||
for (size_t j = 0; j < (size_t)size; ++j){
|
||||
// We have to add the minKey value back to obtain the original key in the Values
|
||||
result[j] = met.intToKey(perm[j]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void Ordering::print(const std::string& str, const KeyFormatter& keyFormatter) const
|
||||
{
|
||||
cout << str;
|
||||
// Print ordering in index order
|
||||
// Print the ordering with varsPerLine ordering entries printed on each line,
|
||||
// for compactness.
|
||||
static const size_t varsPerLine = 10;
|
||||
bool endedOnNewline = false;
|
||||
for(size_t i = 0; i < size(); ++i) {
|
||||
if(i % varsPerLine == 0)
|
||||
cout << "Position " << i << ": ";
|
||||
if(i % varsPerLine != 0)
|
||||
cout << ", ";
|
||||
cout << keyFormatter(at(i));
|
||||
if(i % varsPerLine == varsPerLine - 1) {
|
||||
cout << "\n";
|
||||
endedOnNewline = true;
|
||||
} else {
|
||||
endedOnNewline = false;
|
||||
}
|
||||
}
|
||||
if(!endedOnNewline)
|
||||
/* ************************************************************************* */
|
||||
void Ordering::print(const std::string& str,
|
||||
const KeyFormatter& keyFormatter) const {
|
||||
cout << str;
|
||||
// Print ordering in index order
|
||||
// Print the ordering with varsPerLine ordering entries printed on each line,
|
||||
// for compactness.
|
||||
static const size_t varsPerLine = 10;
|
||||
bool endedOnNewline = false;
|
||||
for (size_t i = 0; i < size(); ++i) {
|
||||
if (i % varsPerLine == 0)
|
||||
cout << "Position " << i << ": ";
|
||||
if (i % varsPerLine != 0)
|
||||
cout << ", ";
|
||||
cout << keyFormatter(at(i));
|
||||
if (i % varsPerLine == varsPerLine - 1) {
|
||||
cout << "\n";
|
||||
cout.flush();
|
||||
endedOnNewline = true;
|
||||
} else {
|
||||
endedOnNewline = false;
|
||||
}
|
||||
}
|
||||
if (!endedOnNewline)
|
||||
cout << "\n";
|
||||
cout.flush();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool Ordering::equals(const Ordering& other, double tol) const
|
||||
{
|
||||
return (*this) == other;
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
bool Ordering::equals(const Ordering& other, double tol) const {
|
||||
return (*this) == other;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,182 +18,224 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
#include <boost/assign/list_inserter.hpp>
|
||||
|
||||
#include <gtsam/base/FastSet.h>
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/VariableIndex.h>
|
||||
#include <gtsam/inference/MetisIndex.h>
|
||||
#include <gtsam/inference/FactorGraph.h>
|
||||
#include <gtsam/base/FastSet.h>
|
||||
|
||||
#include <boost/assign/list_inserter.hpp>
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
class Ordering : public std::vector<Key> {
|
||||
protected:
|
||||
typedef std::vector<Key> Base;
|
||||
class Ordering: public std::vector<Key> {
|
||||
protected:
|
||||
typedef std::vector<Key> Base;
|
||||
|
||||
public:
|
||||
public:
|
||||
|
||||
/// Type of ordering to use
|
||||
enum OrderingType {
|
||||
COLAMD, METIS, CUSTOM
|
||||
};
|
||||
|
||||
typedef Ordering This; ///< Typedef to this class
|
||||
typedef boost::shared_ptr<This> shared_ptr; ///< shared_ptr to this class
|
||||
|
||||
/// Create an empty ordering
|
||||
GTSAM_EXPORT Ordering() {}
|
||||
|
||||
/// Create from a container
|
||||
template<typename KEYS>
|
||||
explicit Ordering(const KEYS& keys) : Base(keys.begin(), keys.end()) {}
|
||||
|
||||
/// Create an ordering using iterators over keys
|
||||
template<typename ITERATOR>
|
||||
Ordering(ITERATOR firstKey, ITERATOR lastKey) : Base(firstKey, lastKey) {}
|
||||
|
||||
/// Add new variables to the ordering as ordering += key1, key2, ... Equivalent to calling
|
||||
/// push_back.
|
||||
boost::assign::list_inserter<boost::assign_detail::call_push_back<This> >
|
||||
operator+=(Key key) {
|
||||
return boost::assign::make_list_inserter(boost::assign_detail::call_push_back<This>(*this))(key);
|
||||
}
|
||||
|
||||
/// Invert (not reverse) the ordering - returns a map from key to order position
|
||||
FastMap<Key, size_t> invert() const;
|
||||
|
||||
/// @name Fill-reducing Orderings @{
|
||||
|
||||
/// Compute a fill-reducing ordering using COLAMD from a factor graph (see details for note on
|
||||
/// performance). This internally builds a VariableIndex so if you already have a VariableIndex,
|
||||
/// it is faster to use COLAMD(const VariableIndex&)
|
||||
template<class FACTOR>
|
||||
static Ordering colamd(const FactorGraph<FACTOR>& graph) {
|
||||
return colamd(VariableIndex(graph)); }
|
||||
|
||||
/// Compute a fill-reducing ordering using COLAMD from a VariableIndex.
|
||||
static GTSAM_EXPORT Ordering colamd(const VariableIndex& variableIndex);
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a factor graph (see details
|
||||
/// for note on performance). This internally builds a VariableIndex so if you already have a
|
||||
/// VariableIndex, it is faster to use COLAMD(const VariableIndex&). This function constrains
|
||||
/// the variables in \c constrainLast to the end of the ordering, and orders all other variables
|
||||
/// before in a fill-reducing ordering. If \c forceOrder is true, the variables in \c
|
||||
/// constrainLast will be ordered in the same order specified in the vector<Key> \c
|
||||
/// constrainLast. If \c forceOrder is false, the variables in \c constrainLast will be
|
||||
/// ordered after all the others, but will be rearranged by CCOLAMD to reduce fill-in as well.
|
||||
template<class FACTOR>
|
||||
static Ordering colamdConstrainedLast(const FactorGraph<FACTOR>& graph,
|
||||
const std::vector<Key>& constrainLast, bool forceOrder = false) {
|
||||
return colamdConstrainedLast(VariableIndex(graph), constrainLast, forceOrder); }
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a VariableIndex. This
|
||||
/// function constrains the variables in \c constrainLast to the end of the ordering, and orders
|
||||
/// all other variables before in a fill-reducing ordering. If \c forceOrder is true, the
|
||||
/// variables in \c constrainLast will be ordered in the same order specified in the vector<Key>
|
||||
/// \c constrainLast. If \c forceOrder is false, the variables in \c constrainLast will be
|
||||
/// ordered after all the others, but will be rearranged by CCOLAMD to reduce fill-in as well.
|
||||
static GTSAM_EXPORT Ordering colamdConstrainedLast(const VariableIndex& variableIndex,
|
||||
const std::vector<Key>& constrainLast, bool forceOrder = false);
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a factor graph (see details
|
||||
/// for note on performance). This internally builds a VariableIndex so if you already have a
|
||||
/// VariableIndex, it is faster to use COLAMD(const VariableIndex&). This function constrains
|
||||
/// the variables in \c constrainLast to the end of the ordering, and orders all other variables
|
||||
/// before in a fill-reducing ordering. If \c forceOrder is true, the variables in \c
|
||||
/// constrainLast will be ordered in the same order specified in the vector<Key> \c
|
||||
/// constrainLast. If \c forceOrder is false, the variables in \c constrainFirst will be
|
||||
/// ordered after all the others, but will be rearranged by CCOLAMD to reduce fill-in as well.
|
||||
template<class FACTOR>
|
||||
static Ordering colamdConstrainedFirst(const FactorGraph<FACTOR>& graph,
|
||||
const std::vector<Key>& constrainFirst, bool forceOrder = false) {
|
||||
return colamdConstrainedFirst(VariableIndex(graph), constrainFirst, forceOrder); }
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a VariableIndex. This
|
||||
/// function constrains the variables in \c constrainFirst to the front of the ordering, and
|
||||
/// orders all other variables after in a fill-reducing ordering. If \c forceOrder is true, the
|
||||
/// variables in \c constrainFirst will be ordered in the same order specified in the
|
||||
/// vector<Key> \c constrainFirst. If \c forceOrder is false, the variables in \c
|
||||
/// constrainFirst will be ordered after all the others, but will be rearranged by CCOLAMD to
|
||||
/// reduce fill-in as well.
|
||||
static GTSAM_EXPORT Ordering colamdConstrainedFirst(const VariableIndex& variableIndex,
|
||||
const std::vector<Key>& constrainFirst, bool forceOrder = false);
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a factor graph (see details
|
||||
/// for note on performance). This internally builds a VariableIndex so if you already have a
|
||||
/// VariableIndex, it is faster to use COLAMD(const VariableIndex&). In this function, a group
|
||||
/// for each variable should be specified in \c groups, and each group of variables will appear
|
||||
/// in the ordering in group index order. \c groups should be a map from Key to group index.
|
||||
/// The group indices used should be consecutive starting at 0, but may appear in \c groups in
|
||||
/// arbitrary order. Any variables not present in \c groups will be assigned to group 0. This
|
||||
/// function simply fills the \c cmember argument to CCOLAMD with the supplied indices, see the
|
||||
/// CCOLAMD documentation for more information.
|
||||
template<class FACTOR>
|
||||
static Ordering colamdConstrained(const FactorGraph<FACTOR>& graph,
|
||||
const FastMap<Key, int>& groups) {
|
||||
return colamdConstrained(VariableIndex(graph), groups); }
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a VariableIndex. In this
|
||||
/// function, a group for each variable should be specified in \c groups, and each group of
|
||||
/// variables will appear in the ordering in group index order. \c groups should be a map from
|
||||
/// Key to group index. The group indices used should be consecutive starting at 0, but may
|
||||
/// appear in \c groups in arbitrary order. Any variables not present in \c groups will be
|
||||
/// assigned to group 0. This function simply fills the \c cmember argument to CCOLAMD with the
|
||||
/// supplied indices, see the CCOLAMD documentation for more information.
|
||||
static GTSAM_EXPORT Ordering colamdConstrained(const VariableIndex& variableIndex,
|
||||
const FastMap<Key, int>& groups);
|
||||
|
||||
/// Return a natural Ordering. Typically used by iterative solvers
|
||||
template <class FACTOR>
|
||||
static Ordering Natural(const FactorGraph<FACTOR> &fg) {
|
||||
FastSet<Key> src = fg.keys();
|
||||
std::vector<Key> keys(src.begin(), src.end());
|
||||
std::stable_sort(keys.begin(), keys.end());
|
||||
return Ordering(keys);
|
||||
}
|
||||
|
||||
/// METIS Formatting function
|
||||
template<class FACTOR>
|
||||
static GTSAM_EXPORT void CSRFormat(std::vector<int>& xadj, std::vector<int>& adj, const FactorGraph<FACTOR>& graph);
|
||||
|
||||
/// Compute an ordering determined by METIS from a VariableIndex
|
||||
static GTSAM_EXPORT Ordering metis(const MetisIndex& met);
|
||||
|
||||
template<class FACTOR>
|
||||
static Ordering metis(const FactorGraph<FACTOR>& graph)
|
||||
{
|
||||
return metis(MetisIndex(graph));
|
||||
}
|
||||
|
||||
/// @}
|
||||
|
||||
/// @name Testable @{
|
||||
|
||||
GTSAM_EXPORT void print(const std::string& str = "", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const;
|
||||
|
||||
GTSAM_EXPORT bool equals(const Ordering& other, double tol = 1e-9) const;
|
||||
|
||||
/// @}
|
||||
|
||||
private:
|
||||
/// Internal COLAMD function
|
||||
static GTSAM_EXPORT Ordering colamdConstrained(
|
||||
const VariableIndex& variableIndex, std::vector<int>& cmember);
|
||||
|
||||
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
template<class ARCHIVE>
|
||||
void serialize(ARCHIVE & ar, const unsigned int /*version*/) {
|
||||
ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
|
||||
}
|
||||
/// Type of ordering to use
|
||||
enum OrderingType {
|
||||
COLAMD, METIS, NATURAL, CUSTOM
|
||||
};
|
||||
|
||||
/// traits
|
||||
template<> struct traits<Ordering> : public Testable<Ordering> {};
|
||||
typedef Ordering This; ///< Typedef to this class
|
||||
typedef boost::shared_ptr<This> shared_ptr; ///< shared_ptr to this class
|
||||
|
||||
/// Create an empty ordering
|
||||
GTSAM_EXPORT
|
||||
Ordering() {
|
||||
}
|
||||
|
||||
/// Create from a container
|
||||
template<typename KEYS>
|
||||
explicit Ordering(const KEYS& keys) :
|
||||
Base(keys.begin(), keys.end()) {
|
||||
}
|
||||
|
||||
/// Create an ordering using iterators over keys
|
||||
template<typename ITERATOR>
|
||||
Ordering(ITERATOR firstKey, ITERATOR lastKey) :
|
||||
Base(firstKey, lastKey) {
|
||||
}
|
||||
|
||||
/// Add new variables to the ordering as ordering += key1, key2, ... Equivalent to calling
|
||||
/// push_back.
|
||||
boost::assign::list_inserter<boost::assign_detail::call_push_back<This> > operator+=(
|
||||
Key key) {
|
||||
return boost::assign::make_list_inserter(
|
||||
boost::assign_detail::call_push_back<This>(*this))(key);
|
||||
}
|
||||
|
||||
/// Invert (not reverse) the ordering - returns a map from key to order position
|
||||
FastMap<Key, size_t> invert() const;
|
||||
|
||||
/// @name Fill-reducing Orderings @{
|
||||
|
||||
/// Compute a fill-reducing ordering using COLAMD from a factor graph (see details for note on
|
||||
/// performance). This internally builds a VariableIndex so if you already have a VariableIndex,
|
||||
/// it is faster to use COLAMD(const VariableIndex&)
|
||||
template<class FACTOR>
|
||||
static Ordering Colamd(const FactorGraph<FACTOR>& graph) {
|
||||
return Colamd(VariableIndex(graph));
|
||||
}
|
||||
|
||||
/// Compute a fill-reducing ordering using COLAMD from a VariableIndex.
|
||||
static GTSAM_EXPORT Ordering Colamd(const VariableIndex& variableIndex);
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a factor graph (see details
|
||||
/// for note on performance). This internally builds a VariableIndex so if you already have a
|
||||
/// VariableIndex, it is faster to use COLAMD(const VariableIndex&). This function constrains
|
||||
/// the variables in \c constrainLast to the end of the ordering, and orders all other variables
|
||||
/// before in a fill-reducing ordering. If \c forceOrder is true, the variables in \c
|
||||
/// constrainLast will be ordered in the same order specified in the vector<Key> \c
|
||||
/// constrainLast. If \c forceOrder is false, the variables in \c constrainLast will be
|
||||
/// ordered after all the others, but will be rearranged by CCOLAMD to reduce fill-in as well.
|
||||
template<class FACTOR>
|
||||
static Ordering ColamdConstrainedLast(const FactorGraph<FACTOR>& graph,
|
||||
const std::vector<Key>& constrainLast, bool forceOrder = false) {
|
||||
return ColamdConstrainedLast(VariableIndex(graph), constrainLast,
|
||||
forceOrder);
|
||||
}
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a VariableIndex. This
|
||||
/// function constrains the variables in \c constrainLast to the end of the ordering, and orders
|
||||
/// all other variables before in a fill-reducing ordering. If \c forceOrder is true, the
|
||||
/// variables in \c constrainLast will be ordered in the same order specified in the vector<Key>
|
||||
/// \c constrainLast. If \c forceOrder is false, the variables in \c constrainLast will be
|
||||
/// ordered after all the others, but will be rearranged by CCOLAMD to reduce fill-in as well.
|
||||
static GTSAM_EXPORT Ordering ColamdConstrainedLast(
|
||||
const VariableIndex& variableIndex, const std::vector<Key>& constrainLast,
|
||||
bool forceOrder = false);
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a factor graph (see details
|
||||
/// for note on performance). This internally builds a VariableIndex so if you already have a
|
||||
/// VariableIndex, it is faster to use COLAMD(const VariableIndex&). This function constrains
|
||||
/// the variables in \c constrainLast to the end of the ordering, and orders all other variables
|
||||
/// before in a fill-reducing ordering. If \c forceOrder is true, the variables in \c
|
||||
/// constrainFirst will be ordered in the same order specified in the vector<Key> \c
|
||||
/// constrainFirst. If \c forceOrder is false, the variables in \c constrainFirst will be
|
||||
/// ordered before all the others, but will be rearranged by CCOLAMD to reduce fill-in as well.
|
||||
template<class FACTOR>
|
||||
static Ordering ColamdConstrainedFirst(const FactorGraph<FACTOR>& graph,
|
||||
const std::vector<Key>& constrainFirst, bool forceOrder = false) {
|
||||
return ColamdConstrainedFirst(VariableIndex(graph), constrainFirst,
|
||||
forceOrder);
|
||||
}
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a VariableIndex. This
|
||||
/// function constrains the variables in \c constrainFirst to the front of the ordering, and
|
||||
/// orders all other variables after in a fill-reducing ordering. If \c forceOrder is true, the
|
||||
/// variables in \c constrainFirst will be ordered in the same order specified in the
|
||||
/// vector<Key> \c constrainFirst. If \c forceOrder is false, the variables in \c
|
||||
/// constrainFirst will be ordered before all the others, but will be rearranged by CCOLAMD to
|
||||
/// reduce fill-in as well.
|
||||
static GTSAM_EXPORT Ordering ColamdConstrainedFirst(
|
||||
const VariableIndex& variableIndex,
|
||||
const std::vector<Key>& constrainFirst, bool forceOrder = false);
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a factor graph (see details
|
||||
/// for note on performance). This internally builds a VariableIndex so if you already have a
|
||||
/// VariableIndex, it is faster to use COLAMD(const VariableIndex&). In this function, a group
|
||||
/// for each variable should be specified in \c groups, and each group of variables will appear
|
||||
/// in the ordering in group index order. \c groups should be a map from Key to group index.
|
||||
/// The group indices used should be consecutive starting at 0, but may appear in \c groups in
|
||||
/// arbitrary order. Any variables not present in \c groups will be assigned to group 0. This
|
||||
/// function simply fills the \c cmember argument to CCOLAMD with the supplied indices, see the
|
||||
/// CCOLAMD documentation for more information.
|
||||
template<class FACTOR>
|
||||
static Ordering ColamdConstrained(const FactorGraph<FACTOR>& graph,
|
||||
const FastMap<Key, int>& groups) {
|
||||
return ColamdConstrained(VariableIndex(graph), groups);
|
||||
}
|
||||
|
||||
/// Compute a fill-reducing ordering using constrained COLAMD from a VariableIndex. In this
|
||||
/// function, a group for each variable should be specified in \c groups, and each group of
|
||||
/// variables will appear in the ordering in group index order. \c groups should be a map from
|
||||
/// Key to group index. The group indices used should be consecutive starting at 0, but may
|
||||
/// appear in \c groups in arbitrary order. Any variables not present in \c groups will be
|
||||
/// assigned to group 0. This function simply fills the \c cmember argument to CCOLAMD with the
|
||||
/// supplied indices, see the CCOLAMD documentation for more information.
|
||||
static GTSAM_EXPORT Ordering ColamdConstrained(
|
||||
const VariableIndex& variableIndex, const FastMap<Key, int>& groups);
|
||||
|
||||
/// Return a natural Ordering. Typically used by iterative solvers
|
||||
template<class FACTOR>
|
||||
static Ordering Natural(const FactorGraph<FACTOR> &fg) {
|
||||
KeySet src = fg.keys();
|
||||
std::vector<Key> keys(src.begin(), src.end());
|
||||
std::stable_sort(keys.begin(), keys.end());
|
||||
return Ordering(keys);
|
||||
}
|
||||
|
||||
/// METIS Formatting function
|
||||
template<class FACTOR>
|
||||
static GTSAM_EXPORT void CSRFormat(std::vector<int>& xadj,
|
||||
std::vector<int>& adj, const FactorGraph<FACTOR>& graph);
|
||||
|
||||
/// Compute an ordering determined by METIS from a VariableIndex
|
||||
static GTSAM_EXPORT Ordering Metis(const MetisIndex& met);
|
||||
|
||||
template<class FACTOR>
|
||||
static Ordering Metis(const FactorGraph<FACTOR>& graph) {
|
||||
return Metis(MetisIndex(graph));
|
||||
}
|
||||
|
||||
/// @}
|
||||
|
||||
/// @name Named Constructors @{
|
||||
|
||||
template<class FACTOR>
|
||||
static Ordering Create(OrderingType orderingType,
|
||||
const FactorGraph<FACTOR>& graph) {
|
||||
|
||||
switch (orderingType) {
|
||||
case COLAMD:
|
||||
return Colamd(graph);
|
||||
case METIS:
|
||||
return Metis(graph);
|
||||
case NATURAL:
|
||||
return Natural(graph);
|
||||
case CUSTOM:
|
||||
throw std::runtime_error(
|
||||
"Ordering::Create error: called with CUSTOM ordering type.");
|
||||
default:
|
||||
throw std::runtime_error(
|
||||
"Ordering::Create error: called with unknown ordering type.");
|
||||
}
|
||||
}
|
||||
|
||||
/// @}
|
||||
|
||||
/// @name Testable @{
|
||||
|
||||
GTSAM_EXPORT
|
||||
void print(const std::string& str = "", const KeyFormatter& keyFormatter =
|
||||
DefaultKeyFormatter) const;
|
||||
|
||||
GTSAM_EXPORT
|
||||
bool equals(const Ordering& other, double tol = 1e-9) const;
|
||||
|
||||
/// @}
|
||||
|
||||
private:
|
||||
/// Internal COLAMD function
|
||||
static GTSAM_EXPORT Ordering ColamdConstrained(
|
||||
const VariableIndex& variableIndex, std::vector<int>& cmember);
|
||||
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
template<class ARCHIVE>
|
||||
void serialize(ARCHIVE & ar, const unsigned int version) {
|
||||
ar & BOOST_SERIALIZATION_BASE_OBJECT_NVP(Base);
|
||||
}
|
||||
};
|
||||
|
||||
/// traits
|
||||
template<> struct traits<Ordering> : public Testable<Ordering> {
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
#pragma once
|
||||
|
||||
#include <gtsam/inference/VariableIndex.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <boost/foreach.hpp>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
|
|
|
@ -18,16 +18,14 @@
|
|||
#pragma once
|
||||
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/base/FastList.h>
|
||||
#include <gtsam/base/FastMap.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/types.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
#include <gtsam/base/FastVector.h>
|
||||
#include <gtsam/dllexport.h>
|
||||
|
||||
#include <boost/foreach.hpp>
|
||||
#include <boost/optional/optional.hpp>
|
||||
#include <boost/smart_ptr/shared_ptr.hpp>
|
||||
|
||||
#include <vector>
|
||||
#include <deque>
|
||||
#include <cassert>
|
||||
#include <stdexcept>
|
||||
|
||||
namespace gtsam {
|
||||
|
@ -45,7 +43,7 @@ class GTSAM_EXPORT VariableIndex {
|
|||
public:
|
||||
|
||||
typedef boost::shared_ptr<VariableIndex> shared_ptr;
|
||||
typedef FastList<size_t> Factors;
|
||||
typedef FastVector<size_t> Factors;
|
||||
typedef Factors::iterator Factor_iterator;
|
||||
typedef Factors::const_iterator Factor_const_iterator;
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
* @author Alex Cunningham
|
||||
*/
|
||||
|
||||
#include <gtsam/inference/Key.h>
|
||||
#include <gtsam/inference/Symbol.h>
|
||||
#include <gtsam/base/Testable.h>
|
||||
#include <gtsam/base/TestableAssertions.h>
|
||||
|
|
|
@ -28,45 +28,47 @@ using namespace std;
|
|||
using namespace gtsam;
|
||||
using namespace boost::assign;
|
||||
|
||||
namespace example {
|
||||
SymbolicFactorGraph symbolicChain() {
|
||||
SymbolicFactorGraph sfg;
|
||||
sfg.push_factor(0, 1);
|
||||
sfg.push_factor(1, 2);
|
||||
sfg.push_factor(2, 3);
|
||||
sfg.push_factor(3, 4);
|
||||
sfg.push_factor(4, 5);
|
||||
return sfg;
|
||||
}
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
TEST(Ordering, constrained_ordering) {
|
||||
SymbolicFactorGraph sfg;
|
||||
|
||||
// create graph with wanted variable set = 2, 4
|
||||
sfg.push_factor(0,1);
|
||||
sfg.push_factor(1,2);
|
||||
sfg.push_factor(2,3);
|
||||
sfg.push_factor(3,4);
|
||||
sfg.push_factor(4,5);
|
||||
SymbolicFactorGraph sfg = example::symbolicChain();
|
||||
|
||||
// unconstrained version
|
||||
Ordering actUnconstrained = Ordering::colamd(sfg);
|
||||
Ordering actUnconstrained = Ordering::Colamd(sfg);
|
||||
Ordering expUnconstrained = Ordering(list_of(0)(1)(2)(3)(4)(5));
|
||||
EXPECT(assert_equal(expUnconstrained, actUnconstrained));
|
||||
|
||||
// constrained version - push one set to the end
|
||||
Ordering actConstrained = Ordering::colamdConstrainedLast(sfg, list_of(2)(4));
|
||||
Ordering actConstrained = Ordering::ColamdConstrainedLast(sfg, list_of(2)(4));
|
||||
Ordering expConstrained = Ordering(list_of(0)(1)(5)(3)(4)(2));
|
||||
EXPECT(assert_equal(expConstrained, actConstrained));
|
||||
|
||||
// constrained version - push one set to the start
|
||||
Ordering actConstrained2 = Ordering::colamdConstrainedFirst(sfg, list_of(2)(4));
|
||||
Ordering actConstrained2 = Ordering::ColamdConstrainedFirst(sfg,
|
||||
list_of(2)(4));
|
||||
Ordering expConstrained2 = Ordering(list_of(2)(4)(0)(1)(3)(5));
|
||||
EXPECT(assert_equal(expConstrained2, actConstrained2));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(Ordering, grouped_constrained_ordering) {
|
||||
SymbolicFactorGraph sfg;
|
||||
|
||||
// create graph with constrained groups:
|
||||
// 1: 2, 4
|
||||
// 2: 5
|
||||
sfg.push_factor(0,1);
|
||||
sfg.push_factor(1,2);
|
||||
sfg.push_factor(2,3);
|
||||
sfg.push_factor(3,4);
|
||||
sfg.push_factor(4,5);
|
||||
SymbolicFactorGraph sfg = example::symbolicChain();
|
||||
|
||||
// constrained version - push one set to the end
|
||||
FastMap<size_t, int> constraints;
|
||||
|
@ -74,50 +76,48 @@ TEST(Ordering, grouped_constrained_ordering) {
|
|||
constraints[4] = 1;
|
||||
constraints[5] = 2;
|
||||
|
||||
Ordering actConstrained = Ordering::colamdConstrained(sfg, constraints);
|
||||
Ordering actConstrained = Ordering::ColamdConstrained(sfg, constraints);
|
||||
Ordering expConstrained = list_of(0)(1)(3)(2)(4)(5);
|
||||
EXPECT(assert_equal(expConstrained, actConstrained));
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(Ordering, csr_format) {
|
||||
// Example in METIS manual
|
||||
SymbolicFactorGraph sfg;
|
||||
sfg.push_factor(0, 1);
|
||||
sfg.push_factor(1, 2);
|
||||
sfg.push_factor(2, 3);
|
||||
sfg.push_factor(3, 4);
|
||||
sfg.push_factor(5, 6);
|
||||
sfg.push_factor(6, 7);
|
||||
sfg.push_factor(7, 8);
|
||||
sfg.push_factor(8, 9);
|
||||
sfg.push_factor(10, 11);
|
||||
sfg.push_factor(11, 12);
|
||||
sfg.push_factor(12, 13);
|
||||
sfg.push_factor(13, 14);
|
||||
// Example in METIS manual
|
||||
SymbolicFactorGraph sfg;
|
||||
sfg.push_factor(0, 1);
|
||||
sfg.push_factor(1, 2);
|
||||
sfg.push_factor(2, 3);
|
||||
sfg.push_factor(3, 4);
|
||||
sfg.push_factor(5, 6);
|
||||
sfg.push_factor(6, 7);
|
||||
sfg.push_factor(7, 8);
|
||||
sfg.push_factor(8, 9);
|
||||
sfg.push_factor(10, 11);
|
||||
sfg.push_factor(11, 12);
|
||||
sfg.push_factor(12, 13);
|
||||
sfg.push_factor(13, 14);
|
||||
|
||||
sfg.push_factor(0, 5);
|
||||
sfg.push_factor(5, 10);
|
||||
sfg.push_factor(1, 6);
|
||||
sfg.push_factor(6, 11);
|
||||
sfg.push_factor(2, 7);
|
||||
sfg.push_factor(7, 12);
|
||||
sfg.push_factor(3, 8);
|
||||
sfg.push_factor(8, 13);
|
||||
sfg.push_factor(4, 9);
|
||||
sfg.push_factor(9, 14);
|
||||
sfg.push_factor(0, 5);
|
||||
sfg.push_factor(5, 10);
|
||||
sfg.push_factor(1, 6);
|
||||
sfg.push_factor(6, 11);
|
||||
sfg.push_factor(2, 7);
|
||||
sfg.push_factor(7, 12);
|
||||
sfg.push_factor(3, 8);
|
||||
sfg.push_factor(8, 13);
|
||||
sfg.push_factor(4, 9);
|
||||
sfg.push_factor(9, 14);
|
||||
|
||||
MetisIndex mi(sfg);
|
||||
MetisIndex mi(sfg);
|
||||
|
||||
vector<int> xadjExpected, adjExpected;
|
||||
xadjExpected += 0, 2, 5, 8, 11, 13, 16, 20, 24, 28, 31, 33, 36, 39, 42, 44;
|
||||
adjExpected += 1, 5, 0, 2, 6, 1, 3, 7, 2, 4, 8, 3, 9, 0, 6, 10, 1, 5, 7, 11,
|
||||
2, 6, 8, 12, 3, 7, 9, 13, 4, 8, 14, 5, 11, 6, 10, 12, 7, 11,
|
||||
13, 8, 12, 14, 9, 13 ;
|
||||
vector<int> xadjExpected, adjExpected;
|
||||
xadjExpected += 0, 2, 5, 8, 11, 13, 16, 20, 24, 28, 31, 33, 36, 39, 42, 44;
|
||||
adjExpected += 1, 5, 0, 2, 6, 1, 3, 7, 2, 4, 8, 3, 9, 0, 6, 10, 1, 5, 7, 11, 2, 6, 8, 12, 3, 7, 9, 13, 4, 8, 14, 5, 11, 6, 10, 12, 7, 11, 13, 8, 12, 14, 9, 13;
|
||||
|
||||
EXPECT(xadjExpected == mi.xadj());
|
||||
EXPECT(adjExpected.size() == mi.adj().size());
|
||||
EXPECT(adjExpected == mi.adj());
|
||||
EXPECT(xadjExpected == mi.xadj());
|
||||
EXPECT(adjExpected.size() == mi.adj().size());
|
||||
EXPECT(adjExpected == mi.adj());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -135,12 +135,11 @@ TEST(Ordering, csr_format_2) {
|
|||
|
||||
vector<int> xadjExpected, adjExpected;
|
||||
xadjExpected += 0, 1, 4, 6, 8, 10;
|
||||
adjExpected += 1, 0, 2, 4, 1, 3, 2, 4, 1, 3;
|
||||
adjExpected += 1, 0, 2, 4, 1, 3, 2, 4, 1, 3;
|
||||
|
||||
EXPECT(xadjExpected == mi.xadj());
|
||||
EXPECT(adjExpected.size() == mi.adj().size());
|
||||
EXPECT(adjExpected == mi.adj());
|
||||
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -170,7 +169,6 @@ TEST(Ordering, csr_format_3) {
|
|||
EXPECT(xadjExpected == mi.xadj());
|
||||
EXPECT(adjExpected.size() == mi.adj().size());
|
||||
EXPECT(adjExpected == adjAcutal);
|
||||
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -197,7 +195,7 @@ TEST(Ordering, csr_format_4) {
|
|||
EXPECT(adjExpected.size() == mi.adj().size());
|
||||
EXPECT(adjExpected == adjAcutal);
|
||||
|
||||
Ordering metOrder = Ordering::metis(sfg);
|
||||
Ordering metOrder = Ordering::Metis(sfg);
|
||||
|
||||
// Test different symbol types
|
||||
sfg.push_factor(Symbol('l', 1));
|
||||
|
@ -206,8 +204,7 @@ TEST(Ordering, csr_format_4) {
|
|||
sfg.push_factor(Symbol('x', 3), Symbol('l', 1));
|
||||
sfg.push_factor(Symbol('x', 4), Symbol('l', 1));
|
||||
|
||||
Ordering metOrder2 = Ordering::metis(sfg);
|
||||
|
||||
Ordering metOrder2 = Ordering::Metis(sfg);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -229,8 +226,77 @@ TEST(Ordering, metis) {
|
|||
EXPECT(adjExpected.size() == mi.adj().size());
|
||||
EXPECT(adjExpected == mi.adj());
|
||||
|
||||
Ordering metis = Ordering::metis(sfg);
|
||||
Ordering metis = Ordering::Metis(sfg);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(Ordering, MetisLoop) {
|
||||
|
||||
// create linear graph
|
||||
SymbolicFactorGraph sfg = example::symbolicChain();
|
||||
|
||||
// add loop closure
|
||||
sfg.push_factor(0, 5);
|
||||
|
||||
// METIS
|
||||
#if !defined(__APPLE__)
|
||||
{
|
||||
Ordering actual = Ordering::Create(Ordering::METIS, sfg);
|
||||
// - P( 0 4 1)
|
||||
// | - P( 2 | 4 1)
|
||||
// | | - P( 3 | 4 2)
|
||||
// | - P( 5 | 0 1)
|
||||
Ordering expected = Ordering(list_of(3)(2)(5)(0)(4)(1));
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
#else
|
||||
{
|
||||
Ordering actual = Ordering::Create(Ordering::METIS, sfg);
|
||||
// - P( 1 0 3)
|
||||
// | - P( 4 | 0 3)
|
||||
// | | - P( 5 | 0 4)
|
||||
// | - P( 2 | 1 3)
|
||||
Ordering expected = Ordering(list_of(5)(4)(2)(1)(0)(3));
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST(Ordering, Create) {
|
||||
|
||||
// create chain graph
|
||||
SymbolicFactorGraph sfg = example::symbolicChain();
|
||||
|
||||
// COLAMD
|
||||
{
|
||||
//- P( 4 5)
|
||||
//| - P( 3 | 4)
|
||||
//| | - P( 2 | 3)
|
||||
//| | | - P( 1 | 2)
|
||||
//| | | | - P( 0 | 1)
|
||||
Ordering actual = Ordering::Create(Ordering::COLAMD, sfg);
|
||||
Ordering expected = Ordering(list_of(0)(1)(2)(3)(4)(5));
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
// METIS
|
||||
{
|
||||
Ordering actual = Ordering::Create(Ordering::METIS, sfg);
|
||||
//- P( 1 0 2)
|
||||
//| - P( 3 4 | 2)
|
||||
//| | - P( 5 | 4)
|
||||
Ordering expected = Ordering(list_of(5)(3)(4)(1)(0)(2));
|
||||
EXPECT(assert_equal(expected, actual));
|
||||
}
|
||||
|
||||
// CUSTOM
|
||||
CHECK_EXCEPTION(Ordering::Create(Ordering::CUSTOM, sfg), runtime_error);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main() {
|
||||
TestResult tr;
|
||||
return TestRegistry::runAllTests(tr);
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
/* ----------------------------------------------------------------------------
|
||||
|
||||
* GTSAM Copyright 2010, Georgia Tech Research Corporation,
|
||||
* Atlanta, Georgia 30332-0415
|
||||
* All Rights Reserved
|
||||
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
|
||||
|
||||
* See LICENSE for the license information
|
||||
|
||||
* -------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* @file BinaryJacobianFactor.h
|
||||
*
|
||||
* @brief A binary JacobianFactor specialization that uses fixed matrix math for speed
|
||||
*
|
||||
* @date June 2015
|
||||
* @author Frank Dellaert
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <gtsam/linear/JacobianFactor.h>
|
||||
#include <gtsam/base/SymmetricBlockMatrix.h>
|
||||
#include <gtsam/base/timing.h>
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/**
|
||||
* A binary JacobianFactor specialization that uses fixed matrix math for speed
|
||||
*/
|
||||
template<int M, int N1, int N2>
|
||||
struct BinaryJacobianFactor: JacobianFactor {
|
||||
|
||||
/// Constructor
|
||||
BinaryJacobianFactor(Key key1, const Eigen::Matrix<double, M, N1>& A1,
|
||||
Key key2, const Eigen::Matrix<double, M, N2>& A2,
|
||||
const Eigen::Matrix<double, M, 1>& b, //
|
||||
const SharedDiagonal& model = SharedDiagonal()) :
|
||||
JacobianFactor(key1, A1, key2, A2, b, model) {
|
||||
}
|
||||
|
||||
inline Key key1() const {
|
||||
return keys_[0];
|
||||
}
|
||||
inline Key key2() const {
|
||||
return keys_[1];
|
||||
}
|
||||
|
||||
// Fixed-size matrix update
|
||||
void updateHessian(const FastVector<Key>& infoKeys,
|
||||
SymmetricBlockMatrix* info) const {
|
||||
gttic(updateHessian_BinaryJacobianFactor);
|
||||
// Whiten the factor if it has a noise model
|
||||
const SharedDiagonal& model = get_model();
|
||||
if (model && !model->isUnit()) {
|
||||
if (model->isConstrained())
|
||||
throw std::invalid_argument(
|
||||
"BinaryJacobianFactor::updateHessian: cannot update information with "
|
||||
"constrained noise model");
|
||||
BinaryJacobianFactor whitenedFactor(key1(), model->Whiten(getA(begin())),
|
||||
key2(), model->Whiten(getA(end())), model->whiten(getb()));
|
||||
whitenedFactor.updateHessian(infoKeys, info);
|
||||
} else {
|
||||
// First build an array of slots
|
||||
DenseIndex slot1 = Slot(infoKeys, key1());
|
||||
DenseIndex slot2 = Slot(infoKeys, key2());
|
||||
DenseIndex slotB = info->nBlocks() - 1;
|
||||
|
||||
const Matrix& Ab = Ab_.matrix();
|
||||
Eigen::Block<const Matrix, M, N1> A1(Ab, 0, 0);
|
||||
Eigen::Block<const Matrix, M, N2> A2(Ab, 0, N1);
|
||||
Eigen::Block<const Matrix, M, 1> b(Ab, 0, N1 + N2);
|
||||
|
||||
// We perform I += A'*A to the upper triangle
|
||||
(*info)(slot1, slot1).selfadjointView().rankUpdate(A1.transpose());
|
||||
(*info)(slot1, slot2).knownOffDiagonal() += A1.transpose() * A2;
|
||||
(*info)(slot1, slotB).knownOffDiagonal() += A1.transpose() * b;
|
||||
(*info)(slot2, slot2).selfadjointView().rankUpdate(A2.transpose());
|
||||
(*info)(slot2, slotB).knownOffDiagonal() += A2.transpose() * b;
|
||||
(*info)(slotB, slotB)(0, 0) += b.transpose() * b;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<int M, int N1, int N2>
|
||||
struct traits<BinaryJacobianFactor<M, N1, N2> > : Testable<
|
||||
BinaryJacobianFactor<M, N1, N2> > {
|
||||
};
|
||||
|
||||
} //namespace gtsam
|
|
@ -28,6 +28,8 @@ namespace gtsam {
|
|||
|
||||
// Forward declarations
|
||||
class VectorValues;
|
||||
class Scatter;
|
||||
class SymmetricBlockMatrix;
|
||||
|
||||
/**
|
||||
* An abstract virtual base class for JacobianFactor and HessianFactor. A GaussianFactor has a
|
||||
|
@ -119,6 +121,14 @@ namespace gtsam {
|
|||
*/
|
||||
virtual GaussianFactor::shared_ptr negate() const = 0;
|
||||
|
||||
/** Update an information matrix by adding the information corresponding to this factor
|
||||
* (used internally during elimination).
|
||||
* @param scatter A mapping from variable index to slot index in this HessianFactor
|
||||
* @param info The information matrix to be updated
|
||||
*/
|
||||
virtual void updateHessian(const FastVector<Key>& keys,
|
||||
SymmetricBlockMatrix* info) const = 0;
|
||||
|
||||
/// y += alpha * A'*A*x
|
||||
virtual void multiplyHessianAdd(double alpha, const VectorValues& x, VectorValues& y) const = 0;
|
||||
|
||||
|
@ -131,6 +141,12 @@ namespace gtsam {
|
|||
/// Gradient wrt a key at any values
|
||||
virtual Vector gradient(Key key, const VectorValues& x) const = 0;
|
||||
|
||||
// Determine position of a given key
|
||||
template <typename CONTAINER>
|
||||
static DenseIndex Slot(const CONTAINER& keys, Key key) {
|
||||
return std::find(keys.begin(), keys.end(), key) - keys.begin();
|
||||
}
|
||||
|
||||
private:
|
||||
/** Serialization function */
|
||||
friend class boost::serialization::access;
|
||||
|
@ -140,7 +156,7 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
}; // GaussianFactor
|
||||
|
||||
|
||||
/// traits
|
||||
template<>
|
||||
struct traits<GaussianFactor> : public Testable<GaussianFactor> {
|
||||
|
|
|
@ -47,7 +47,7 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
GaussianFactorGraph::Keys GaussianFactorGraph::keys() const {
|
||||
FastSet<Key> keys;
|
||||
KeySet keys;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this)
|
||||
if (factor)
|
||||
keys.insert(factor->begin(), factor->end());
|
||||
|
@ -123,26 +123,32 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
vector<boost::tuple<size_t, size_t, double> > GaussianFactorGraph::sparseJacobian() const {
|
||||
// First find dimensions of each variable
|
||||
vector<size_t> dims;
|
||||
typedef std::map<Key, size_t> KeySizeMap;
|
||||
KeySizeMap dims;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this) {
|
||||
for (GaussianFactor::const_iterator pos = factor->begin();
|
||||
pos != factor->end(); ++pos) {
|
||||
if (dims.size() <= *pos)
|
||||
dims.resize(*pos + 1, 0);
|
||||
dims[*pos] = factor->getDim(pos);
|
||||
if (!static_cast<bool>(factor)) continue;
|
||||
|
||||
for (GaussianFactor::const_iterator key = factor->begin();
|
||||
key != factor->end(); ++key) {
|
||||
dims[*key] = factor->getDim(key);
|
||||
}
|
||||
}
|
||||
|
||||
// Compute first scalar column of each variable
|
||||
vector<size_t> columnIndices(dims.size() + 1, 0);
|
||||
for (size_t j = 1; j < dims.size() + 1; ++j)
|
||||
columnIndices[j] = columnIndices[j - 1] + dims[j - 1];
|
||||
size_t currentColIndex = 0;
|
||||
KeySizeMap columnIndices = dims;
|
||||
BOOST_FOREACH(const KeySizeMap::value_type& col, dims) {
|
||||
columnIndices[col.first] = currentColIndex;
|
||||
currentColIndex += dims[col.first];
|
||||
}
|
||||
|
||||
// Iterate over all factors, adding sparse scalar entries
|
||||
typedef boost::tuple<size_t, size_t, double> triplet;
|
||||
vector<triplet> entries;
|
||||
size_t row = 0;
|
||||
BOOST_FOREACH(const sharedFactor& factor, *this) {
|
||||
if (!static_cast<bool>(factor)) continue;
|
||||
|
||||
// Convert to JacobianFactor if necessary
|
||||
JacobianFactor::shared_ptr jacobianFactor(
|
||||
boost::dynamic_pointer_cast<JacobianFactor>(factor));
|
||||
|
@ -159,11 +165,11 @@ namespace gtsam {
|
|||
// Whiten the factor and add entries for it
|
||||
// iterate over all variables in the factor
|
||||
const JacobianFactor whitened(jacobianFactor->whiten());
|
||||
for (JacobianFactor::const_iterator pos = whitened.begin();
|
||||
pos < whitened.end(); ++pos) {
|
||||
JacobianFactor::constABlock whitenedA = whitened.getA(pos);
|
||||
for (JacobianFactor::const_iterator key = whitened.begin();
|
||||
key < whitened.end(); ++key) {
|
||||
JacobianFactor::constABlock whitenedA = whitened.getA(key);
|
||||
// find first column index for this key
|
||||
size_t column_start = columnIndices[*pos];
|
||||
size_t column_start = columnIndices[*key];
|
||||
for (size_t i = 0; i < (size_t) whitenedA.rows(); i++)
|
||||
for (size_t j = 0; j < (size_t) whitenedA.cols(); j++) {
|
||||
double s = whitenedA(i, j);
|
||||
|
@ -173,7 +179,7 @@ namespace gtsam {
|
|||
}
|
||||
|
||||
JacobianFactor::constBVector whitenedb(whitened.getb());
|
||||
size_t bcolumn = columnIndices.back();
|
||||
size_t bcolumn = currentColIndex;
|
||||
for (size_t i = 0; i < (size_t) whitenedb.size(); i++)
|
||||
entries.push_back(boost::make_tuple(row + i, bcolumn, whitenedb(i)));
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue