Coding convention: convert tabs to two spaces
parent
57b4c79cad
commit
699153ece9
|
@ -157,7 +157,7 @@ struct LieMatrix : public Matrix, public DerivedValue<LieMatrix> {
|
|||
result.data(), p.rows(), p.cols()) = p;
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/// @}
|
||||
|
||||
private:
|
||||
|
|
|
@ -85,7 +85,7 @@ namespace gtsam {
|
|||
dims_accumulated.resize(dims.size()+1,0);
|
||||
dims_accumulated[0]=0;
|
||||
for (size_t i=1; i<dims_accumulated.size(); i++)
|
||||
dims_accumulated[i] = dims_accumulated[i-1]+dims[i-1];
|
||||
dims_accumulated[i] = dims_accumulated[i-1]+dims[i-1];
|
||||
return dims_accumulated;
|
||||
}
|
||||
|
||||
|
@ -358,8 +358,8 @@ namespace gtsam {
|
|||
/* ************************************************************************* */
|
||||
void GaussianFactorGraph::multiplyHessianAdd(double alpha,
|
||||
const double* x, double* y) const {
|
||||
vector<size_t> FactorKeys = getkeydim();
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& f, *this)
|
||||
vector<size_t> FactorKeys = getkeydim();
|
||||
BOOST_FOREACH(const GaussianFactor::shared_ptr& f, *this)
|
||||
f->multiplyHessianAdd(alpha, x, y, FactorKeys);
|
||||
|
||||
}
|
||||
|
|
|
@ -538,7 +538,7 @@ void HessianFactor::multiplyHessianAdd(double alpha, const VectorValues& x,
|
|||
|
||||
// copy to yvalues
|
||||
for(DenseIndex i = 0; i < (DenseIndex)size(); ++i) {
|
||||
bool didNotExist;
|
||||
bool didNotExist;
|
||||
VectorValues::iterator it;
|
||||
boost::tie(it, didNotExist) = yvalues.tryInsert(keys_[i], Vector());
|
||||
if (didNotExist)
|
||||
|
|
|
@ -345,12 +345,12 @@ namespace gtsam {
|
|||
|
||||
/** Constructor */
|
||||
CombinedImuFactor(
|
||||
Key pose_i, ///< previous pose key
|
||||
Key vel_i, ///< previous velocity key
|
||||
Key pose_j, ///< current pose key
|
||||
Key vel_j, ///< current velocity key
|
||||
Key bias_i, ///< previous bias key
|
||||
Key bias_j, ///< current bias key
|
||||
Key pose_i, ///< previous pose key
|
||||
Key vel_i, ///< previous velocity key
|
||||
Key pose_j, ///< current pose key
|
||||
Key vel_j, ///< current velocity key
|
||||
Key bias_i, ///< previous bias key
|
||||
Key bias_j, ///< current bias key
|
||||
const CombinedPreintegratedMeasurements& preintegratedMeasurements, ///< Preintegrated IMU measurements
|
||||
const Vector3& gravity, ///< gravity vector
|
||||
const Vector3& omegaCoriolis, ///< rotation rate of inertial frame
|
||||
|
@ -480,33 +480,33 @@ namespace gtsam {
|
|||
Matrix3 dfPdPi;
|
||||
Matrix3 dfVdPi;
|
||||
if(use2ndOrderCoriolis_){
|
||||
dfPdPi = - Rot_i.matrix() + 0.5 * skewSymmetric(omegaCoriolis_) * skewSymmetric(omegaCoriolis_) * Rot_i.matrix() * deltaTij*deltaTij;
|
||||
dfVdPi = skewSymmetric(omegaCoriolis_) * skewSymmetric(omegaCoriolis_) * Rot_i.matrix() * deltaTij;
|
||||
dfPdPi = - Rot_i.matrix() + 0.5 * skewSymmetric(omegaCoriolis_) * skewSymmetric(omegaCoriolis_) * Rot_i.matrix() * deltaTij*deltaTij;
|
||||
dfVdPi = skewSymmetric(omegaCoriolis_) * skewSymmetric(omegaCoriolis_) * Rot_i.matrix() * deltaTij;
|
||||
}
|
||||
else{
|
||||
dfPdPi = - Rot_i.matrix();
|
||||
dfVdPi = Matrix3::Zero();
|
||||
dfPdPi = - Rot_i.matrix();
|
||||
dfVdPi = Matrix3::Zero();
|
||||
}
|
||||
|
||||
(*H1) <<
|
||||
// dfP/dRi
|
||||
Rot_i.matrix() * skewSymmetric(preintegratedMeasurements_.deltaPij
|
||||
+ preintegratedMeasurements_.delPdelBiasOmega * biasOmegaIncr + preintegratedMeasurements_.delPdelBiasAcc * biasAccIncr),
|
||||
// dfP/dPi
|
||||
dfPdPi,
|
||||
// dfV/dRi
|
||||
Rot_i.matrix() * skewSymmetric(preintegratedMeasurements_.deltaVij
|
||||
+ preintegratedMeasurements_.delVdelBiasOmega * biasOmegaIncr + preintegratedMeasurements_.delVdelBiasAcc * biasAccIncr),
|
||||
// dfV/dPi
|
||||
dfVdPi,
|
||||
// dfR/dRi
|
||||
Jrinv_fRhat * (- Rot_j.between(Rot_i).matrix() - fRhat.inverse().matrix() * Jtheta),
|
||||
// dfR/dPi
|
||||
Matrix3::Zero(),
|
||||
//dBiasAcc/dPi
|
||||
Matrix3::Zero(), Matrix3::Zero(),
|
||||
//dBiasOmega/dPi
|
||||
Matrix3::Zero(), Matrix3::Zero();
|
||||
(*H1) <<
|
||||
// dfP/dRi
|
||||
Rot_i.matrix() * skewSymmetric(preintegratedMeasurements_.deltaPij
|
||||
+ preintegratedMeasurements_.delPdelBiasOmega * biasOmegaIncr + preintegratedMeasurements_.delPdelBiasAcc * biasAccIncr),
|
||||
// dfP/dPi
|
||||
dfPdPi,
|
||||
// dfV/dRi
|
||||
Rot_i.matrix() * skewSymmetric(preintegratedMeasurements_.deltaVij
|
||||
+ preintegratedMeasurements_.delVdelBiasOmega * biasOmegaIncr + preintegratedMeasurements_.delVdelBiasAcc * biasAccIncr),
|
||||
// dfV/dPi
|
||||
dfVdPi,
|
||||
// dfR/dRi
|
||||
Jrinv_fRhat * (- Rot_j.between(Rot_i).matrix() - fRhat.inverse().matrix() * Jtheta),
|
||||
// dfR/dPi
|
||||
Matrix3::Zero(),
|
||||
//dBiasAcc/dPi
|
||||
Matrix3::Zero(), Matrix3::Zero(),
|
||||
//dBiasOmega/dPi
|
||||
Matrix3::Zero(), Matrix3::Zero();
|
||||
}
|
||||
|
||||
if(H2) {
|
||||
|
@ -517,13 +517,13 @@ namespace gtsam {
|
|||
+ skewSymmetric(omegaCoriolis_) * deltaTij * deltaTij, // Coriolis term - we got rid of the 2 wrt ins paper
|
||||
// dfV/dVi
|
||||
- Matrix3::Identity()
|
||||
+ 2 * skewSymmetric(omegaCoriolis_) * deltaTij, // Coriolis term
|
||||
// dfR/dVi
|
||||
Matrix3::Zero(),
|
||||
//dBiasAcc/dVi
|
||||
Matrix3::Zero(),
|
||||
//dBiasOmega/dVi
|
||||
Matrix3::Zero();
|
||||
+ 2 * skewSymmetric(omegaCoriolis_) * deltaTij, // Coriolis term
|
||||
// dfR/dVi
|
||||
Matrix3::Zero(),
|
||||
//dBiasAcc/dVi
|
||||
Matrix3::Zero(),
|
||||
//dBiasOmega/dVi
|
||||
Matrix3::Zero();
|
||||
}
|
||||
|
||||
if(H3) {
|
||||
|
@ -643,21 +643,21 @@ namespace gtsam {
|
|||
// Predict state at time j
|
||||
/* ---------------------------------------------------------------------------------------------------- */
|
||||
Vector3 pos_j = pos_i + Rot_i.matrix() * (preintegratedMeasurements.deltaPij
|
||||
+ preintegratedMeasurements.delPdelBiasAcc * biasAccIncr
|
||||
+ preintegratedMeasurements.delPdelBiasOmega * biasOmegaIncr)
|
||||
+ vel_i * deltaTij
|
||||
- skewSymmetric(omegaCoriolis) * vel_i * deltaTij*deltaTij // Coriolis term - we got rid of the 2 wrt ins paper
|
||||
+ 0.5 * gravity * deltaTij*deltaTij;
|
||||
+ preintegratedMeasurements.delPdelBiasAcc * biasAccIncr
|
||||
+ preintegratedMeasurements.delPdelBiasOmega * biasOmegaIncr)
|
||||
+ vel_i * deltaTij
|
||||
- skewSymmetric(omegaCoriolis) * vel_i * deltaTij*deltaTij // Coriolis term - we got rid of the 2 wrt ins paper
|
||||
+ 0.5 * gravity * deltaTij*deltaTij;
|
||||
|
||||
vel_j = LieVector(vel_i + Rot_i.matrix() * (preintegratedMeasurements.deltaVij
|
||||
+ preintegratedMeasurements.delVdelBiasAcc * biasAccIncr
|
||||
+ preintegratedMeasurements.delVdelBiasOmega * biasOmegaIncr)
|
||||
- 2 * skewSymmetric(omegaCoriolis) * vel_i * deltaTij // Coriolis term
|
||||
+ gravity * deltaTij);
|
||||
vel_j = LieVector(vel_i + Rot_i.matrix() * (preintegratedMeasurements.deltaVij
|
||||
+ preintegratedMeasurements.delVdelBiasAcc * biasAccIncr
|
||||
+ preintegratedMeasurements.delVdelBiasOmega * biasOmegaIncr)
|
||||
- 2 * skewSymmetric(omegaCoriolis) * vel_i * deltaTij // Coriolis term
|
||||
+ gravity * deltaTij);
|
||||
|
||||
if(use2ndOrderCoriolis){
|
||||
pos_j += - 0.5 * skewSymmetric(omegaCoriolis) * skewSymmetric(omegaCoriolis) * pos_i * deltaTij*deltaTij; // 2nd order coriolis term for position
|
||||
vel_j += - skewSymmetric(omegaCoriolis) * skewSymmetric(omegaCoriolis) * pos_i * deltaTij; // 2nd order term for velocity
|
||||
pos_j += - 0.5 * skewSymmetric(omegaCoriolis) * skewSymmetric(omegaCoriolis) * pos_i * deltaTij*deltaTij; // 2nd order coriolis term for position
|
||||
vel_j += - skewSymmetric(omegaCoriolis) * skewSymmetric(omegaCoriolis) * pos_i * deltaTij; // 2nd order term for velocity
|
||||
}
|
||||
|
||||
const Rot3 deltaRij_biascorrected = preintegratedMeasurements.deltaRij.retract(preintegratedMeasurements.delRdelBiasOmega * biasOmegaIncr, Rot3::EXPMAP);
|
||||
|
|
|
@ -308,11 +308,11 @@ namespace gtsam {
|
|||
|
||||
/** Constructor */
|
||||
ImuFactor(
|
||||
Key pose_i, ///< previous pose key
|
||||
Key vel_i, ///< previous velocity key
|
||||
Key pose_j, ///< current pose key
|
||||
Key vel_j, ///< current velocity key
|
||||
Key bias, ///< previous bias key
|
||||
Key pose_i, ///< previous pose key
|
||||
Key vel_i, ///< previous velocity key
|
||||
Key pose_j, ///< current pose key
|
||||
Key vel_j, ///< current velocity key
|
||||
Key bias, ///< previous bias key
|
||||
const PreintegratedMeasurements& preintegratedMeasurements, ///< preintegrated IMU measurements
|
||||
const Vector3& gravity, ///< gravity vector
|
||||
const Vector3& omegaCoriolis, ///< rotation rate of the inertial frame
|
||||
|
@ -419,29 +419,29 @@ namespace gtsam {
|
|||
Matrix3 dfPdPi;
|
||||
Matrix3 dfVdPi;
|
||||
if(use2ndOrderCoriolis_){
|
||||
dfPdPi = - Rot_i.matrix() + 0.5 * skewSymmetric(omegaCoriolis_) * skewSymmetric(omegaCoriolis_) * Rot_i.matrix() * deltaTij*deltaTij;
|
||||
dfVdPi = skewSymmetric(omegaCoriolis_) * skewSymmetric(omegaCoriolis_) * Rot_i.matrix() * deltaTij;
|
||||
dfPdPi = - Rot_i.matrix() + 0.5 * skewSymmetric(omegaCoriolis_) * skewSymmetric(omegaCoriolis_) * Rot_i.matrix() * deltaTij*deltaTij;
|
||||
dfVdPi = skewSymmetric(omegaCoriolis_) * skewSymmetric(omegaCoriolis_) * Rot_i.matrix() * deltaTij;
|
||||
}
|
||||
else{
|
||||
dfPdPi = - Rot_i.matrix();
|
||||
dfVdPi = Matrix3::Zero();
|
||||
dfPdPi = - Rot_i.matrix();
|
||||
dfVdPi = Matrix3::Zero();
|
||||
}
|
||||
|
||||
(*H1) <<
|
||||
// dfP/dRi
|
||||
Rot_i.matrix() * skewSymmetric(preintegratedMeasurements_.deltaPij
|
||||
+ preintegratedMeasurements_.delPdelBiasOmega * biasOmegaIncr + preintegratedMeasurements_.delPdelBiasAcc * biasAccIncr),
|
||||
// dfP/dPi
|
||||
dfPdPi,
|
||||
// dfV/dRi
|
||||
Rot_i.matrix() * skewSymmetric(preintegratedMeasurements_.deltaVij
|
||||
+ preintegratedMeasurements_.delVdelBiasOmega * biasOmegaIncr + preintegratedMeasurements_.delVdelBiasAcc * biasAccIncr),
|
||||
// dfV/dPi
|
||||
dfVdPi,
|
||||
// dfR/dRi
|
||||
Jrinv_fRhat * (- Rot_j.between(Rot_i).matrix() - fRhat.inverse().matrix() * Jtheta),
|
||||
// dfR/dPi
|
||||
Matrix3::Zero();
|
||||
(*H1) <<
|
||||
// dfP/dRi
|
||||
Rot_i.matrix() * skewSymmetric(preintegratedMeasurements_.deltaPij
|
||||
+ preintegratedMeasurements_.delPdelBiasOmega * biasOmegaIncr + preintegratedMeasurements_.delPdelBiasAcc * biasAccIncr),
|
||||
// dfP/dPi
|
||||
dfPdPi,
|
||||
// dfV/dRi
|
||||
Rot_i.matrix() * skewSymmetric(preintegratedMeasurements_.deltaVij
|
||||
+ preintegratedMeasurements_.delVdelBiasOmega * biasOmegaIncr + preintegratedMeasurements_.delVdelBiasAcc * biasAccIncr),
|
||||
// dfV/dPi
|
||||
dfVdPi,
|
||||
// dfR/dRi
|
||||
Jrinv_fRhat * (- Rot_j.between(Rot_i).matrix() - fRhat.inverse().matrix() * Jtheta),
|
||||
// dfR/dPi
|
||||
Matrix3::Zero();
|
||||
}
|
||||
|
||||
if(H2) {
|
||||
|
@ -540,22 +540,22 @@ namespace gtsam {
|
|||
|
||||
// Predict state at time j
|
||||
/* ---------------------------------------------------------------------------------------------------- */
|
||||
Vector3 pos_j = pos_i + Rot_i.matrix() * (preintegratedMeasurements.deltaPij
|
||||
+ preintegratedMeasurements.delPdelBiasAcc * biasAccIncr
|
||||
+ preintegratedMeasurements.delPdelBiasOmega * biasOmegaIncr)
|
||||
+ vel_i * deltaTij
|
||||
- skewSymmetric(omegaCoriolis) * vel_i * deltaTij*deltaTij // Coriolis term - we got rid of the 2 wrt ins paper
|
||||
+ 0.5 * gravity * deltaTij*deltaTij;
|
||||
Vector3 pos_j = pos_i + Rot_i.matrix() * (preintegratedMeasurements.deltaPij
|
||||
+ preintegratedMeasurements.delPdelBiasAcc * biasAccIncr
|
||||
+ preintegratedMeasurements.delPdelBiasOmega * biasOmegaIncr)
|
||||
+ vel_i * deltaTij
|
||||
- skewSymmetric(omegaCoriolis) * vel_i * deltaTij*deltaTij // Coriolis term - we got rid of the 2 wrt ins paper
|
||||
+ 0.5 * gravity * deltaTij*deltaTij;
|
||||
|
||||
vel_j = LieVector(vel_i + Rot_i.matrix() * (preintegratedMeasurements.deltaVij
|
||||
+ preintegratedMeasurements.delVdelBiasAcc * biasAccIncr
|
||||
+ preintegratedMeasurements.delVdelBiasOmega * biasOmegaIncr)
|
||||
- 2 * skewSymmetric(omegaCoriolis) * vel_i * deltaTij // Coriolis term
|
||||
+ gravity * deltaTij);
|
||||
vel_j = LieVector(vel_i + Rot_i.matrix() * (preintegratedMeasurements.deltaVij
|
||||
+ preintegratedMeasurements.delVdelBiasAcc * biasAccIncr
|
||||
+ preintegratedMeasurements.delVdelBiasOmega * biasOmegaIncr)
|
||||
- 2 * skewSymmetric(omegaCoriolis) * vel_i * deltaTij // Coriolis term
|
||||
+ gravity * deltaTij);
|
||||
|
||||
if(use2ndOrderCoriolis){
|
||||
pos_j += - 0.5 * skewSymmetric(omegaCoriolis) * skewSymmetric(omegaCoriolis) * pos_i * deltaTij*deltaTij; // 2nd order coriolis term for position
|
||||
vel_j += - skewSymmetric(omegaCoriolis) * skewSymmetric(omegaCoriolis) * pos_i * deltaTij; // 2nd order term for velocity
|
||||
pos_j += - 0.5 * skewSymmetric(omegaCoriolis) * skewSymmetric(omegaCoriolis) * pos_i * deltaTij*deltaTij; // 2nd order coriolis term for position
|
||||
vel_j += - skewSymmetric(omegaCoriolis) * skewSymmetric(omegaCoriolis) * pos_i * deltaTij; // 2nd order term for velocity
|
||||
}
|
||||
|
||||
const Rot3 deltaRij_biascorrected = preintegratedMeasurements.deltaRij.retract(preintegratedMeasurements.delRdelBiasOmega * biasOmegaIncr, Rot3::EXPMAP);
|
||||
|
|
|
@ -176,11 +176,11 @@ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values,
|
|||
stm << "];\n";
|
||||
|
||||
if (firstTimePoses) {
|
||||
lastKey = key;
|
||||
firstTimePoses = false;
|
||||
lastKey = key;
|
||||
firstTimePoses = false;
|
||||
} else {
|
||||
stm << " var" << key << "--" << "var" << lastKey << ";\n";
|
||||
lastKey = key;
|
||||
stm << " var" << key << "--" << "var" << lastKey << ";\n";
|
||||
lastKey = key;
|
||||
}
|
||||
}
|
||||
stm << "\n";
|
||||
|
@ -219,37 +219,37 @@ void NonlinearFactorGraph::saveGraph(std::ostream &stm, const Values& values,
|
|||
// Create factors and variable connections
|
||||
for(size_t i = 0; i < this->size(); ++i) {
|
||||
if(graphvizFormatting.plotFactorPoints){
|
||||
// Make each factor a dot
|
||||
stm << " factor" << i << "[label=\"\", shape=point";
|
||||
{
|
||||
map<size_t, Point2>::const_iterator pos = graphvizFormatting.factorPositions.find(i);
|
||||
if(pos != graphvizFormatting.factorPositions.end())
|
||||
stm << ", pos=\"" << graphvizFormatting.scale*(pos->second.x() - minX) << "," << graphvizFormatting.scale*(pos->second.y() - minY) << "!\"";
|
||||
}
|
||||
stm << "];\n";
|
||||
// Make each factor a dot
|
||||
stm << " factor" << i << "[label=\"\", shape=point";
|
||||
{
|
||||
map<size_t, Point2>::const_iterator pos = graphvizFormatting.factorPositions.find(i);
|
||||
if(pos != graphvizFormatting.factorPositions.end())
|
||||
stm << ", pos=\"" << graphvizFormatting.scale*(pos->second.x() - minX) << "," << graphvizFormatting.scale*(pos->second.y() - minY) << "!\"";
|
||||
}
|
||||
stm << "];\n";
|
||||
|
||||
// Make factor-variable connections
|
||||
if(graphvizFormatting.connectKeysToFactor && this->at(i)) {
|
||||
BOOST_FOREACH(Key key, *this->at(i)) {
|
||||
stm << " var" << key << "--" << "factor" << i << ";\n";
|
||||
}
|
||||
}
|
||||
// Make factor-variable connections
|
||||
if(graphvizFormatting.connectKeysToFactor && this->at(i)) {
|
||||
BOOST_FOREACH(Key key, *this->at(i)) {
|
||||
stm << " var" << key << "--" << "factor" << i << ";\n";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
else {
|
||||
if(this->at(i)) {
|
||||
Key k;
|
||||
bool firstTime = true;
|
||||
BOOST_FOREACH(Key key, *this->at(i)) {
|
||||
if(firstTime){
|
||||
k = key;
|
||||
firstTime = false;
|
||||
continue;
|
||||
}
|
||||
stm << " var" << key << "--" << "var" << k << ";\n";
|
||||
k = key;
|
||||
}
|
||||
}
|
||||
if(this->at(i)) {
|
||||
Key k;
|
||||
bool firstTime = true;
|
||||
BOOST_FOREACH(Key key, *this->at(i)) {
|
||||
if(firstTime){
|
||||
k = key;
|
||||
firstTime = false;
|
||||
continue;
|
||||
}
|
||||
stm << " var" << key << "--" << "var" << k << ";\n";
|
||||
k = key;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,89 +34,89 @@ class DSFMap {
|
|||
|
||||
protected:
|
||||
|
||||
/// We store the forest in an STL map, but parents are done with pointers
|
||||
struct Entry {
|
||||
typename std::map<KEY, Entry>::iterator parent_;
|
||||
size_t rank_;
|
||||
Entry() {}
|
||||
};
|
||||
/// We store the forest in an STL map, but parents are done with pointers
|
||||
struct Entry {
|
||||
typename std::map<KEY, Entry>::iterator parent_;
|
||||
size_t rank_;
|
||||
Entry() {}
|
||||
};
|
||||
|
||||
typedef typename std::map<KEY, Entry> Map;
|
||||
typedef typename Map::iterator iterator;
|
||||
mutable Map entries_;
|
||||
typedef typename Map::iterator iterator;
|
||||
mutable Map entries_;
|
||||
|
||||
/// Given key, find iterator to initial entry
|
||||
iterator find__(const KEY& key) const {
|
||||
static const Entry empty;
|
||||
iterator it = entries_.find(key);
|
||||
// if key does not exist, create and return itself
|
||||
if (it == entries_.end()) {
|
||||
it = entries_.insert(std::make_pair(key, empty)).first;
|
||||
it->second.parent_ = it;
|
||||
it->second.rank_ = 0;
|
||||
}
|
||||
return it;
|
||||
}
|
||||
/// Given key, find iterator to initial entry
|
||||
iterator find__(const KEY& key) const {
|
||||
static const Entry empty;
|
||||
iterator it = entries_.find(key);
|
||||
// if key does not exist, create and return itself
|
||||
if (it == entries_.end()) {
|
||||
it = entries_.insert(std::make_pair(key, empty)).first;
|
||||
it->second.parent_ = it;
|
||||
it->second.rank_ = 0;
|
||||
}
|
||||
return it;
|
||||
}
|
||||
|
||||
/// Given iterator to initial entry, find the root Entry
|
||||
iterator find_(const iterator& it) const {
|
||||
// follow parent pointers until we reach set representative
|
||||
iterator& parent = it->second.parent_;
|
||||
if (parent != it)
|
||||
parent = find_(parent); // not yet, recurse!
|
||||
return parent;
|
||||
}
|
||||
/// Given iterator to initial entry, find the root Entry
|
||||
iterator find_(const iterator& it) const {
|
||||
// follow parent pointers until we reach set representative
|
||||
iterator& parent = it->second.parent_;
|
||||
if (parent != it)
|
||||
parent = find_(parent); // not yet, recurse!
|
||||
return parent;
|
||||
}
|
||||
|
||||
/// Given key, find the root Entry
|
||||
inline iterator find_(const KEY& key) const {
|
||||
iterator initial = find__(key);
|
||||
return find_(initial);
|
||||
}
|
||||
/// Given key, find the root Entry
|
||||
inline iterator find_(const KEY& key) const {
|
||||
iterator initial = find__(key);
|
||||
return find_(initial);
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
typedef std::set<KEY> Set;
|
||||
typedef std::set<KEY> Set;
|
||||
|
||||
/// constructor
|
||||
DSFMap() {
|
||||
}
|
||||
/// constructor
|
||||
DSFMap() {
|
||||
}
|
||||
|
||||
/// Given key, find the representative key for the set in which it lives
|
||||
inline KEY find(const KEY& key) const {
|
||||
iterator root = find_(key);
|
||||
return root->first;
|
||||
}
|
||||
/// Given key, find the representative key for the set in which it lives
|
||||
inline KEY find(const KEY& key) const {
|
||||
iterator root = find_(key);
|
||||
return root->first;
|
||||
}
|
||||
|
||||
/// Merge two sets
|
||||
void merge(const KEY& x, const KEY& y) {
|
||||
/// Merge two sets
|
||||
void merge(const KEY& x, const KEY& y) {
|
||||
|
||||
// straight from http://en.wikipedia.org/wiki/Disjoint-set_data_structure
|
||||
iterator xRoot = find_(x);
|
||||
iterator yRoot = find_(y);
|
||||
if (xRoot == yRoot)
|
||||
return;
|
||||
// straight from http://en.wikipedia.org/wiki/Disjoint-set_data_structure
|
||||
iterator xRoot = find_(x);
|
||||
iterator yRoot = find_(y);
|
||||
if (xRoot == yRoot)
|
||||
return;
|
||||
|
||||
// Merge sets
|
||||
if (xRoot->second.rank_ < yRoot->second.rank_)
|
||||
xRoot->second.parent_ = yRoot;
|
||||
else if (xRoot->second.rank_ > yRoot->second.rank_)
|
||||
yRoot->second.parent_ = xRoot;
|
||||
else {
|
||||
yRoot->second.parent_ = xRoot;
|
||||
xRoot->second.rank_ = xRoot->second.rank_ + 1;
|
||||
}
|
||||
}
|
||||
// Merge sets
|
||||
if (xRoot->second.rank_ < yRoot->second.rank_)
|
||||
xRoot->second.parent_ = yRoot;
|
||||
else if (xRoot->second.rank_ > yRoot->second.rank_)
|
||||
yRoot->second.parent_ = xRoot;
|
||||
else {
|
||||
yRoot->second.parent_ = xRoot;
|
||||
xRoot->second.rank_ = xRoot->second.rank_ + 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// return all sets, i.e. a partition of all elements
|
||||
std::map<KEY, Set> sets() const {
|
||||
std::map<KEY, Set> sets;
|
||||
iterator it = entries_.begin();
|
||||
for (; it != entries_.end(); it++) {
|
||||
iterator root = find_(it);
|
||||
sets[root->first].insert(it->first);
|
||||
}
|
||||
return sets;
|
||||
}
|
||||
/// return all sets, i.e. a partition of all elements
|
||||
std::map<KEY, Set> sets() const {
|
||||
std::map<KEY, Set> sets;
|
||||
iterator it = entries_.begin();
|
||||
for (; it != entries_.end(); it++) {
|
||||
iterator root = find_(it);
|
||||
sets[root->first].insert(it->first);
|
||||
}
|
||||
return sets;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
/**
|
||||
* @file testLoopyBelief.cpp
|
||||
* @file testLoopyBelief.cpp
|
||||
* @brief
|
||||
* @author Duy-Nguyen Ta
|
||||
* @date Oct 11, 2013
|
||||
* @date Oct 11, 2013
|
||||
*/
|
||||
|
||||
#include <gtsam/inference/VariableIndex.h>
|
||||
|
|
|
@ -26,539 +26,539 @@ extern "C" {
|
|||
|
||||
namespace gtsam { namespace partition {
|
||||
|
||||
typedef boost::shared_array<idx_t> sharedInts;
|
||||
typedef boost::shared_array<idx_t> sharedInts;
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Return the size of the separator and the partiion indices {part}
|
||||
* Part [j] is 0, 1, or 2, depending on
|
||||
* whether node j is in the left part of the graph, the right part, or the
|
||||
* separator, respectively
|
||||
*/
|
||||
std::pair<int, sharedInts> separatorMetis(idx_t n, const sharedInts& xadj,
|
||||
const sharedInts& adjncy, const sharedInts& adjwgt, bool verbose) {
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Return the size of the separator and the partiion indices {part}
|
||||
* Part [j] is 0, 1, or 2, depending on
|
||||
* whether node j is in the left part of the graph, the right part, or the
|
||||
* separator, respectively
|
||||
*/
|
||||
std::pair<int, sharedInts> separatorMetis(idx_t n, const sharedInts& xadj,
|
||||
const sharedInts& adjncy, const sharedInts& adjwgt, bool verbose) {
|
||||
|
||||
// control parameters
|
||||
idx_t vwgt[n]; // the weights of the vertices
|
||||
idx_t options[METIS_NOPTIONS];
|
||||
METIS_SetDefaultOptions(options); // use defaults
|
||||
idx_t sepsize; // the size of the separator, output
|
||||
sharedInts part_(new idx_t[n]); // the partition of each vertex, output
|
||||
// control parameters
|
||||
idx_t vwgt[n]; // the weights of the vertices
|
||||
idx_t options[METIS_NOPTIONS];
|
||||
METIS_SetDefaultOptions(options); // use defaults
|
||||
idx_t sepsize; // the size of the separator, output
|
||||
sharedInts part_(new idx_t[n]); // the partition of each vertex, output
|
||||
|
||||
// set uniform weights on the vertices
|
||||
std::fill(vwgt, vwgt+n, 1);
|
||||
// set uniform weights on the vertices
|
||||
std::fill(vwgt, vwgt+n, 1);
|
||||
|
||||
// TODO: Fix at later time
|
||||
//boost::timer::cpu_timer TOTALTmr;
|
||||
if (verbose) {
|
||||
printf("**********************************************************************\n");
|
||||
printf("Graph Information ---------------------------------------------------\n");
|
||||
printf(" #Vertices: %d, #Edges: %u\n", n, *(xadj.get()+n) / 2);
|
||||
printf("\nND Partitioning... -------------------------------------------\n");
|
||||
//TOTALTmr.start()
|
||||
}
|
||||
// TODO: Fix at later time
|
||||
//boost::timer::cpu_timer TOTALTmr;
|
||||
if (verbose) {
|
||||
printf("**********************************************************************\n");
|
||||
printf("Graph Information ---------------------------------------------------\n");
|
||||
printf(" #Vertices: %d, #Edges: %u\n", n, *(xadj.get()+n) / 2);
|
||||
printf("\nND Partitioning... -------------------------------------------\n");
|
||||
//TOTALTmr.start()
|
||||
}
|
||||
|
||||
// call metis parition routine
|
||||
METIS_ComputeVertexSeparator(&n, xadj.get(), adjncy.get(),
|
||||
// call metis parition routine
|
||||
METIS_ComputeVertexSeparator(&n, xadj.get(), adjncy.get(),
|
||||
vwgt, options, &sepsize, part_.get());
|
||||
|
||||
if (verbose) {
|
||||
//boost::cpu_times const elapsed_times(timer.elapsed());
|
||||
//printf("\nTiming Information --------------------------------------------------\n");
|
||||
//printf(" Total: \t\t %7.3f\n", elapsed_times);
|
||||
printf(" Sep size: \t\t %d\n", sepsize);
|
||||
printf("**********************************************************************\n");
|
||||
}
|
||||
if (verbose) {
|
||||
//boost::cpu_times const elapsed_times(timer.elapsed());
|
||||
//printf("\nTiming Information --------------------------------------------------\n");
|
||||
//printf(" Total: \t\t %7.3f\n", elapsed_times);
|
||||
printf(" Sep size: \t\t %d\n", sepsize);
|
||||
printf("**********************************************************************\n");
|
||||
}
|
||||
|
||||
return std::make_pair(sepsize, part_);
|
||||
}
|
||||
return std::make_pair(sepsize, part_);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void modefied_EdgeComputeSeparator(idx_t *nvtxs, idx_t *xadj, idx_t *adjncy, idx_t *vwgt,
|
||||
idx_t *adjwgt, idx_t *options, idx_t *edgecut, idx_t *part)
|
||||
{
|
||||
/* ************************************************************************* */
|
||||
void modefied_EdgeComputeSeparator(idx_t *nvtxs, idx_t *xadj, idx_t *adjncy, idx_t *vwgt,
|
||||
idx_t *adjwgt, idx_t *options, idx_t *edgecut, idx_t *part)
|
||||
{
|
||||
idx_t i, ncon;
|
||||
graph_t *graph;
|
||||
real_t *tpwgts2;
|
||||
ctrl_t *ctrl;
|
||||
ctrl = SetupCtrl(METIS_OP_OMETIS, options, 1, 3, NULL, NULL);
|
||||
ctrl->iptype = METIS_IPTYPE_GROW;
|
||||
//if () == NULL)
|
||||
// return METIS_ERROR_INPUT;
|
||||
graph_t *graph;
|
||||
real_t *tpwgts2;
|
||||
ctrl_t *ctrl;
|
||||
ctrl = SetupCtrl(METIS_OP_OMETIS, options, 1, 3, NULL, NULL);
|
||||
ctrl->iptype = METIS_IPTYPE_GROW;
|
||||
//if () == NULL)
|
||||
// return METIS_ERROR_INPUT;
|
||||
|
||||
InitRandom(ctrl->seed);
|
||||
InitRandom(ctrl->seed);
|
||||
|
||||
graph = SetupGraph(ctrl, *nvtxs, 1, xadj, adjncy, vwgt, NULL, NULL);
|
||||
graph = SetupGraph(ctrl, *nvtxs, 1, xadj, adjncy, vwgt, NULL, NULL);
|
||||
|
||||
AllocateWorkSpace(ctrl, graph);
|
||||
AllocateWorkSpace(ctrl, graph);
|
||||
|
||||
ncon = graph->ncon;
|
||||
ctrl->ncuts = 1;
|
||||
|
||||
/* determine the weights of the two partitions as a function of the weight of the
|
||||
target partition weights */
|
||||
ncon = graph->ncon;
|
||||
ctrl->ncuts = 1;
|
||||
|
||||
/* determine the weights of the two partitions as a function of the weight of the
|
||||
target partition weights */
|
||||
|
||||
tpwgts2 = rwspacemalloc(ctrl, 2*ncon);
|
||||
for (i=0; i<ncon; i++) {
|
||||
tpwgts2[i] = rsum((2>>1), ctrl->tpwgts+i, ncon);
|
||||
tpwgts2[ncon+i] = 1.0 - tpwgts2[i];
|
||||
}
|
||||
/* perform the bisection */
|
||||
*edgecut = MultilevelBisect(ctrl, graph, tpwgts2);
|
||||
tpwgts2 = rwspacemalloc(ctrl, 2*ncon);
|
||||
for (i=0; i<ncon; i++) {
|
||||
tpwgts2[i] = rsum((2>>1), ctrl->tpwgts+i, ncon);
|
||||
tpwgts2[ncon+i] = 1.0 - tpwgts2[i];
|
||||
}
|
||||
/* perform the bisection */
|
||||
*edgecut = MultilevelBisect(ctrl, graph, tpwgts2);
|
||||
|
||||
// ConstructMinCoverSeparator(&ctrl, &graph, 1.05);
|
||||
// *edgecut = graph->mincut;
|
||||
// *sepsize = graph.pwgts[2];
|
||||
icopy(*nvtxs, graph->where, part);
|
||||
std::cout << "Finished bisection:" << *edgecut << std::endl;
|
||||
FreeGraph(&graph);
|
||||
// ConstructMinCoverSeparator(&ctrl, &graph, 1.05);
|
||||
// *edgecut = graph->mincut;
|
||||
// *sepsize = graph.pwgts[2];
|
||||
icopy(*nvtxs, graph->where, part);
|
||||
std::cout << "Finished bisection:" << *edgecut << std::endl;
|
||||
FreeGraph(&graph);
|
||||
|
||||
FreeCtrl(&ctrl);
|
||||
}
|
||||
FreeCtrl(&ctrl);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Return the number of edge cuts and the partition indices {part}
|
||||
* Part [j] is 0 or 1, depending on
|
||||
* whether node j is in the left part of the graph or the right part respectively
|
||||
*/
|
||||
std::pair<int, sharedInts> edgeMetis(idx_t n, const sharedInts& xadj, const sharedInts& adjncy,
|
||||
const sharedInts& adjwgt, bool verbose) {
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Return the number of edge cuts and the partition indices {part}
|
||||
* Part [j] is 0 or 1, depending on
|
||||
* whether node j is in the left part of the graph or the right part respectively
|
||||
*/
|
||||
std::pair<int, sharedInts> edgeMetis(idx_t n, const sharedInts& xadj, const sharedInts& adjncy,
|
||||
const sharedInts& adjwgt, bool verbose) {
|
||||
|
||||
// control parameters
|
||||
idx_t vwgt[n]; // the weights of the vertices
|
||||
idx_t options[METIS_NOPTIONS];
|
||||
METIS_SetDefaultOptions(options); // use defaults
|
||||
idx_t edgecut; // the number of edge cuts, output
|
||||
sharedInts part_(new idx_t[n]); // the partition of each vertex, output
|
||||
// control parameters
|
||||
idx_t vwgt[n]; // the weights of the vertices
|
||||
idx_t options[METIS_NOPTIONS];
|
||||
METIS_SetDefaultOptions(options); // use defaults
|
||||
idx_t edgecut; // the number of edge cuts, output
|
||||
sharedInts part_(new idx_t[n]); // the partition of each vertex, output
|
||||
|
||||
// set uniform weights on the vertices
|
||||
std::fill(vwgt, vwgt+n, 1);
|
||||
// set uniform weights on the vertices
|
||||
std::fill(vwgt, vwgt+n, 1);
|
||||
|
||||
//TODO: Fix later
|
||||
//boost::timer TOTALTmr;
|
||||
if (verbose) {
|
||||
printf("**********************************************************************\n");
|
||||
printf("Graph Information ---------------------------------------------------\n");
|
||||
printf(" #Vertices: %d, #Edges: %u\n", n, *(xadj.get()+n) / 2);
|
||||
printf("\nND Partitioning... -------------------------------------------\n");
|
||||
//cleartimer(TOTALTmr);
|
||||
//starttimer(TOTALTmr);
|
||||
}
|
||||
//TODO: Fix later
|
||||
//boost::timer TOTALTmr;
|
||||
if (verbose) {
|
||||
printf("**********************************************************************\n");
|
||||
printf("Graph Information ---------------------------------------------------\n");
|
||||
printf(" #Vertices: %d, #Edges: %u\n", n, *(xadj.get()+n) / 2);
|
||||
printf("\nND Partitioning... -------------------------------------------\n");
|
||||
//cleartimer(TOTALTmr);
|
||||
//starttimer(TOTALTmr);
|
||||
}
|
||||
|
||||
//int wgtflag = 1; // only edge weights
|
||||
//int numflag = 0; // c style numbering starting from 0
|
||||
//int nparts = 2; // partition the graph to 2 submaps
|
||||
modefied_EdgeComputeSeparator(&n, xadj.get(), adjncy.get(), vwgt, adjwgt.get(),
|
||||
options, &edgecut, part_.get());
|
||||
//int wgtflag = 1; // only edge weights
|
||||
//int numflag = 0; // c style numbering starting from 0
|
||||
//int nparts = 2; // partition the graph to 2 submaps
|
||||
modefied_EdgeComputeSeparator(&n, xadj.get(), adjncy.get(), vwgt, adjwgt.get(),
|
||||
options, &edgecut, part_.get());
|
||||
|
||||
|
||||
if (verbose) {
|
||||
//stoptimer(TOTALTmr);
|
||||
printf("\nTiming Information --------------------------------------------------\n");
|
||||
//printf(" Total: \t\t %7.3f\n", gettimer(TOTALTmr));
|
||||
printf(" Edge cuts: \t\t %d\n", edgecut);
|
||||
printf("**********************************************************************\n");
|
||||
}
|
||||
|
||||
if (verbose) {
|
||||
//stoptimer(TOTALTmr);
|
||||
printf("\nTiming Information --------------------------------------------------\n");
|
||||
//printf(" Total: \t\t %7.3f\n", gettimer(TOTALTmr));
|
||||
printf(" Edge cuts: \t\t %d\n", edgecut);
|
||||
printf("**********************************************************************\n");
|
||||
}
|
||||
|
||||
return std::make_pair(edgecut, part_);
|
||||
}
|
||||
return std::make_pair(edgecut, part_);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Prepare the data structure {xadj} and {adjncy} required by metis
|
||||
* xadj always has the size equal to the no. of the nodes plus 1
|
||||
* adjncy always has the size equal to two times of the no. of the edges in the Metis graph
|
||||
*/
|
||||
template <class GenericGraph>
|
||||
void prepareMetisGraph(const GenericGraph& graph, const std::vector<size_t>& keys, WorkSpace& workspace,
|
||||
sharedInts* ptr_xadj, sharedInts* ptr_adjncy, sharedInts* ptr_adjwgt) {
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* Prepare the data structure {xadj} and {adjncy} required by metis
|
||||
* xadj always has the size equal to the no. of the nodes plus 1
|
||||
* adjncy always has the size equal to two times of the no. of the edges in the Metis graph
|
||||
*/
|
||||
template <class GenericGraph>
|
||||
void prepareMetisGraph(const GenericGraph& graph, const std::vector<size_t>& keys, WorkSpace& workspace,
|
||||
sharedInts* ptr_xadj, sharedInts* ptr_adjncy, sharedInts* ptr_adjwgt) {
|
||||
|
||||
typedef int Weight;
|
||||
typedef std::vector<int> Weights;
|
||||
typedef std::vector<int> Neighbors;
|
||||
typedef std::pair<Neighbors, Weights> NeighborsInfo;
|
||||
typedef int Weight;
|
||||
typedef std::vector<int> Weights;
|
||||
typedef std::vector<int> Neighbors;
|
||||
typedef std::pair<Neighbors, Weights> NeighborsInfo;
|
||||
|
||||
// set up dictionary
|
||||
std::vector<int>& dictionary = workspace.dictionary;
|
||||
workspace.prepareDictionary(keys);
|
||||
// set up dictionary
|
||||
std::vector<int>& dictionary = workspace.dictionary;
|
||||
workspace.prepareDictionary(keys);
|
||||
|
||||
// prepare for {adjacencyMap}, a pair of neighbor indices and the correponding edge weights
|
||||
int numNodes = keys.size();
|
||||
int numEdges = 0;
|
||||
std::vector<NeighborsInfo> adjacencyMap;
|
||||
adjacencyMap.resize(numNodes);
|
||||
std::cout << "Number of nodes: " << adjacencyMap.size() << std::endl;
|
||||
int index1, index2;
|
||||
// prepare for {adjacencyMap}, a pair of neighbor indices and the correponding edge weights
|
||||
int numNodes = keys.size();
|
||||
int numEdges = 0;
|
||||
std::vector<NeighborsInfo> adjacencyMap;
|
||||
adjacencyMap.resize(numNodes);
|
||||
std::cout << "Number of nodes: " << adjacencyMap.size() << std::endl;
|
||||
int index1, index2;
|
||||
|
||||
BOOST_FOREACH(const typename GenericGraph::value_type& factor, graph){
|
||||
index1 = dictionary[factor->key1.index];
|
||||
index2 = dictionary[factor->key2.index];
|
||||
std::cout << "index1: " << index1 << std::endl;
|
||||
std::cout << "index2: " << index2 << std::endl;
|
||||
// if both nodes are in the current graph, i.e. not a joint factor between frontal and separator
|
||||
if (index1 >= 0 && index2 >= 0) {
|
||||
std::pair<Neighbors, Weights>& adjacencyMap1 = adjacencyMap[index1];
|
||||
std::pair<Neighbors, Weights>& adjacencyMap2 = adjacencyMap[index2];
|
||||
try{
|
||||
adjacencyMap1.first.push_back(index2);
|
||||
adjacencyMap1.second.push_back(factor->weight);
|
||||
adjacencyMap2.first.push_back(index1);
|
||||
adjacencyMap2.second.push_back(factor->weight);
|
||||
}catch(std::exception& e){
|
||||
std::cout << e.what() << std::endl;
|
||||
}
|
||||
numEdges++;
|
||||
}
|
||||
}
|
||||
BOOST_FOREACH(const typename GenericGraph::value_type& factor, graph){
|
||||
index1 = dictionary[factor->key1.index];
|
||||
index2 = dictionary[factor->key2.index];
|
||||
std::cout << "index1: " << index1 << std::endl;
|
||||
std::cout << "index2: " << index2 << std::endl;
|
||||
// if both nodes are in the current graph, i.e. not a joint factor between frontal and separator
|
||||
if (index1 >= 0 && index2 >= 0) {
|
||||
std::pair<Neighbors, Weights>& adjacencyMap1 = adjacencyMap[index1];
|
||||
std::pair<Neighbors, Weights>& adjacencyMap2 = adjacencyMap[index2];
|
||||
try{
|
||||
adjacencyMap1.first.push_back(index2);
|
||||
adjacencyMap1.second.push_back(factor->weight);
|
||||
adjacencyMap2.first.push_back(index1);
|
||||
adjacencyMap2.second.push_back(factor->weight);
|
||||
}catch(std::exception& e){
|
||||
std::cout << e.what() << std::endl;
|
||||
}
|
||||
numEdges++;
|
||||
}
|
||||
}
|
||||
|
||||
// prepare for {xadj}, {adjncy}, and {adjwgt}
|
||||
*ptr_xadj = sharedInts(new idx_t[numNodes+1]);
|
||||
*ptr_adjncy = sharedInts(new idx_t[numEdges*2]);
|
||||
*ptr_adjwgt = sharedInts(new idx_t[numEdges*2]);
|
||||
sharedInts& xadj = *ptr_xadj;
|
||||
sharedInts& adjncy = *ptr_adjncy;
|
||||
sharedInts& adjwgt = *ptr_adjwgt;
|
||||
int ind_xadj = 0, ind_adjncy = 0;
|
||||
BOOST_FOREACH(const NeighborsInfo& info, adjacencyMap) {
|
||||
*(xadj.get() + ind_xadj) = ind_adjncy;
|
||||
std::copy(info.first .begin(), info.first .end(), adjncy.get() + ind_adjncy);
|
||||
std::copy(info.second.begin(), info.second.end(), adjwgt.get() + ind_adjncy);
|
||||
assert(info.first.size() == info.second.size());
|
||||
ind_adjncy += info.first.size();
|
||||
ind_xadj ++;
|
||||
}
|
||||
if (ind_xadj != numNodes) throw std::runtime_error("prepareMetisGraph_: ind_xadj != numNodes");
|
||||
*(xadj.get() + ind_xadj) = ind_adjncy;
|
||||
}
|
||||
// prepare for {xadj}, {adjncy}, and {adjwgt}
|
||||
*ptr_xadj = sharedInts(new idx_t[numNodes+1]);
|
||||
*ptr_adjncy = sharedInts(new idx_t[numEdges*2]);
|
||||
*ptr_adjwgt = sharedInts(new idx_t[numEdges*2]);
|
||||
sharedInts& xadj = *ptr_xadj;
|
||||
sharedInts& adjncy = *ptr_adjncy;
|
||||
sharedInts& adjwgt = *ptr_adjwgt;
|
||||
int ind_xadj = 0, ind_adjncy = 0;
|
||||
BOOST_FOREACH(const NeighborsInfo& info, adjacencyMap) {
|
||||
*(xadj.get() + ind_xadj) = ind_adjncy;
|
||||
std::copy(info.first .begin(), info.first .end(), adjncy.get() + ind_adjncy);
|
||||
std::copy(info.second.begin(), info.second.end(), adjwgt.get() + ind_adjncy);
|
||||
assert(info.first.size() == info.second.size());
|
||||
ind_adjncy += info.first.size();
|
||||
ind_xadj ++;
|
||||
}
|
||||
if (ind_xadj != numNodes) throw std::runtime_error("prepareMetisGraph_: ind_xadj != numNodes");
|
||||
*(xadj.get() + ind_xadj) = ind_adjncy;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class GenericGraph>
|
||||
boost::optional<MetisResult> separatorPartitionByMetis(const GenericGraph& graph,
|
||||
const std::vector<size_t>& keys, WorkSpace& workspace, bool verbose) {
|
||||
// create a metis graph
|
||||
size_t numKeys = keys.size();
|
||||
if (verbose)
|
||||
std::cout << graph.size() << " factors,\t" << numKeys << " nodes;\t" << std::endl;
|
||||
/* ************************************************************************* */
|
||||
template<class GenericGraph>
|
||||
boost::optional<MetisResult> separatorPartitionByMetis(const GenericGraph& graph,
|
||||
const std::vector<size_t>& keys, WorkSpace& workspace, bool verbose) {
|
||||
// create a metis graph
|
||||
size_t numKeys = keys.size();
|
||||
if (verbose)
|
||||
std::cout << graph.size() << " factors,\t" << numKeys << " nodes;\t" << std::endl;
|
||||
|
||||
sharedInts xadj, adjncy, adjwgt;
|
||||
sharedInts xadj, adjncy, adjwgt;
|
||||
|
||||
prepareMetisGraph<GenericGraph>(graph, keys, workspace, &xadj, &adjncy, &adjwgt);
|
||||
prepareMetisGraph<GenericGraph>(graph, keys, workspace, &xadj, &adjncy, &adjwgt);
|
||||
|
||||
// run ND on the graph
|
||||
size_t sepsize;
|
||||
sharedInts part;
|
||||
boost::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
if (!sepsize) return boost::optional<MetisResult>();
|
||||
// run ND on the graph
|
||||
size_t sepsize;
|
||||
sharedInts part;
|
||||
boost::tie(sepsize, part) = separatorMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
if (!sepsize) return boost::optional<MetisResult>();
|
||||
|
||||
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later
|
||||
// we will have more submaps
|
||||
MetisResult result;
|
||||
result.C.reserve(sepsize);
|
||||
result.A.reserve(numKeys - sepsize);
|
||||
result.B.reserve(numKeys - sepsize);
|
||||
int* ptr_part = part.get();
|
||||
std::vector<size_t>::const_iterator itKey = keys.begin();
|
||||
std::vector<size_t>::const_iterator itKeyLast = keys.end();
|
||||
while(itKey != itKeyLast) {
|
||||
switch(*(ptr_part++)) {
|
||||
case 0: result.A.push_back(*(itKey++)); break;
|
||||
case 1: result.B.push_back(*(itKey++)); break;
|
||||
case 2: result.C.push_back(*(itKey++)); break;
|
||||
default: throw std::runtime_error("separatorPartitionByMetis: invalid results from Metis ND!");
|
||||
}
|
||||
}
|
||||
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later
|
||||
// we will have more submaps
|
||||
MetisResult result;
|
||||
result.C.reserve(sepsize);
|
||||
result.A.reserve(numKeys - sepsize);
|
||||
result.B.reserve(numKeys - sepsize);
|
||||
int* ptr_part = part.get();
|
||||
std::vector<size_t>::const_iterator itKey = keys.begin();
|
||||
std::vector<size_t>::const_iterator itKeyLast = keys.end();
|
||||
while(itKey != itKeyLast) {
|
||||
switch(*(ptr_part++)) {
|
||||
case 0: result.A.push_back(*(itKey++)); break;
|
||||
case 1: result.B.push_back(*(itKey++)); break;
|
||||
case 2: result.C.push_back(*(itKey++)); break;
|
||||
default: throw std::runtime_error("separatorPartitionByMetis: invalid results from Metis ND!");
|
||||
}
|
||||
}
|
||||
|
||||
if (verbose) {
|
||||
std::cout << "total key: " << keys.size()
|
||||
<< " result(A,B,C) = " << result.A.size() << ", " << result.B.size() << ", "
|
||||
<< result.C.size() << "; sepsize from Metis = " << sepsize << std::endl;
|
||||
//throw runtime_error("separatorPartitionByMetis:stop for debug");
|
||||
}
|
||||
if (verbose) {
|
||||
std::cout << "total key: " << keys.size()
|
||||
<< " result(A,B,C) = " << result.A.size() << ", " << result.B.size() << ", "
|
||||
<< result.C.size() << "; sepsize from Metis = " << sepsize << std::endl;
|
||||
//throw runtime_error("separatorPartitionByMetis:stop for debug");
|
||||
}
|
||||
|
||||
if(result.C.size() != sepsize) {
|
||||
std::cout << "total key: " << keys.size()
|
||||
<< " result(A,B,C) = " << result.A.size() << ", " << result.B.size() << ", " << result.C.size()
|
||||
<< "; sepsize from Metis = " << sepsize << std::endl;
|
||||
throw std::runtime_error("separatorPartitionByMetis: invalid sepsize from Metis ND!");
|
||||
}
|
||||
if(result.C.size() != sepsize) {
|
||||
std::cout << "total key: " << keys.size()
|
||||
<< " result(A,B,C) = " << result.A.size() << ", " << result.B.size() << ", " << result.C.size()
|
||||
<< "; sepsize from Metis = " << sepsize << std::endl;
|
||||
throw std::runtime_error("separatorPartitionByMetis: invalid sepsize from Metis ND!");
|
||||
}
|
||||
|
||||
return boost::make_optional<MetisResult >(result);
|
||||
}
|
||||
return boost::make_optional<MetisResult >(result);
|
||||
}
|
||||
|
||||
/* *************************************************************************/
|
||||
template<class GenericGraph>
|
||||
boost::optional<MetisResult> edgePartitionByMetis(const GenericGraph& graph,
|
||||
const std::vector<size_t>& keys, WorkSpace& workspace, bool verbose) {
|
||||
/* *************************************************************************/
|
||||
template<class GenericGraph>
|
||||
boost::optional<MetisResult> edgePartitionByMetis(const GenericGraph& graph,
|
||||
const std::vector<size_t>& keys, WorkSpace& workspace, bool verbose) {
|
||||
|
||||
// a small hack for handling the camera1-camera2 case used in the unit tests
|
||||
if (graph.size() == 1 && keys.size() == 2) {
|
||||
MetisResult result;
|
||||
result.A.push_back(keys.front());
|
||||
result.B.push_back(keys.back());
|
||||
return result;
|
||||
}
|
||||
// a small hack for handling the camera1-camera2 case used in the unit tests
|
||||
if (graph.size() == 1 && keys.size() == 2) {
|
||||
MetisResult result;
|
||||
result.A.push_back(keys.front());
|
||||
result.B.push_back(keys.back());
|
||||
return result;
|
||||
}
|
||||
|
||||
// create a metis graph
|
||||
size_t numKeys = keys.size();
|
||||
if (verbose) std::cout << graph.size() << " factors,\t" << numKeys << " nodes;\t" << std::endl;
|
||||
sharedInts xadj, adjncy, adjwgt;
|
||||
prepareMetisGraph<GenericGraph>(graph, keys, workspace, &xadj, &adjncy, &adjwgt);
|
||||
// create a metis graph
|
||||
size_t numKeys = keys.size();
|
||||
if (verbose) std::cout << graph.size() << " factors,\t" << numKeys << " nodes;\t" << std::endl;
|
||||
sharedInts xadj, adjncy, adjwgt;
|
||||
prepareMetisGraph<GenericGraph>(graph, keys, workspace, &xadj, &adjncy, &adjwgt);
|
||||
|
||||
// run metis on the graph
|
||||
int edgecut;
|
||||
sharedInts part;
|
||||
boost::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
// run metis on the graph
|
||||
int edgecut;
|
||||
sharedInts part;
|
||||
boost::tie(edgecut, part) = edgeMetis(numKeys, xadj, adjncy, adjwgt, verbose);
|
||||
|
||||
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later we will have more submaps
|
||||
MetisResult result;
|
||||
result.A.reserve(numKeys);
|
||||
result.B.reserve(numKeys);
|
||||
int* ptr_part = part.get();
|
||||
std::vector<size_t>::const_iterator itKey = keys.begin();
|
||||
std::vector<size_t>::const_iterator itKeyLast = keys.end();
|
||||
while(itKey != itKeyLast) {
|
||||
if (*ptr_part != 0 && *ptr_part != 1)
|
||||
std::cout << *ptr_part << "!!!" << std::endl;
|
||||
switch(*(ptr_part++)) {
|
||||
case 0: result.A.push_back(*(itKey++)); break;
|
||||
case 1: result.B.push_back(*(itKey++)); break;
|
||||
default: throw std::runtime_error("edgePartitionByMetis: invalid results from Metis ND!");
|
||||
}
|
||||
}
|
||||
// convert the 0-1-2 from Metis to 1-2-0, so that the separator is 0, as later we will have more submaps
|
||||
MetisResult result;
|
||||
result.A.reserve(numKeys);
|
||||
result.B.reserve(numKeys);
|
||||
int* ptr_part = part.get();
|
||||
std::vector<size_t>::const_iterator itKey = keys.begin();
|
||||
std::vector<size_t>::const_iterator itKeyLast = keys.end();
|
||||
while(itKey != itKeyLast) {
|
||||
if (*ptr_part != 0 && *ptr_part != 1)
|
||||
std::cout << *ptr_part << "!!!" << std::endl;
|
||||
switch(*(ptr_part++)) {
|
||||
case 0: result.A.push_back(*(itKey++)); break;
|
||||
case 1: result.B.push_back(*(itKey++)); break;
|
||||
default: throw std::runtime_error("edgePartitionByMetis: invalid results from Metis ND!");
|
||||
}
|
||||
}
|
||||
|
||||
if (verbose) {
|
||||
std::cout << "the size of two submaps in the reduced graph: " << result.A.size()
|
||||
<< " " << result.B.size() << std::endl;
|
||||
int edgeCut = 0;
|
||||
if (verbose) {
|
||||
std::cout << "the size of two submaps in the reduced graph: " << result.A.size()
|
||||
<< " " << result.B.size() << std::endl;
|
||||
int edgeCut = 0;
|
||||
|
||||
BOOST_FOREACH(const typename GenericGraph::value_type& factor, graph){
|
||||
int key1 = factor->key1.index;
|
||||
int key2 = factor->key2.index;
|
||||
// print keys and their subgraph assignment
|
||||
std::cout << key1;
|
||||
if (std::find(result.A.begin(), result.A.end(), key1) != result.A.end()) std::cout <<"A ";
|
||||
if (std::find(result.B.begin(), result.B.end(), key1) != result.B.end()) std::cout <<"B ";
|
||||
BOOST_FOREACH(const typename GenericGraph::value_type& factor, graph){
|
||||
int key1 = factor->key1.index;
|
||||
int key2 = factor->key2.index;
|
||||
// print keys and their subgraph assignment
|
||||
std::cout << key1;
|
||||
if (std::find(result.A.begin(), result.A.end(), key1) != result.A.end()) std::cout <<"A ";
|
||||
if (std::find(result.B.begin(), result.B.end(), key1) != result.B.end()) std::cout <<"B ";
|
||||
|
||||
std::cout << key2;
|
||||
if (std::find(result.A.begin(), result.A.end(), key2) != result.A.end()) std::cout <<"A ";
|
||||
if (std::find(result.B.begin(), result.B.end(), key2) != result.B.end()) std::cout <<"B ";
|
||||
std::cout << key2;
|
||||
if (std::find(result.A.begin(), result.A.end(), key2) != result.A.end()) std::cout <<"A ";
|
||||
if (std::find(result.B.begin(), result.B.end(), key2) != result.B.end()) std::cout <<"B ";
|
||||
std::cout << "weight " << factor->weight;;
|
||||
|
||||
// find vertices that were assigned to sets A & B. Their edge will be cut
|
||||
if ((std::find(result.A.begin(), result.A.end(), key1) != result.A.end() &&
|
||||
std::find(result.B.begin(), result.B.end(), key2) != result.B.end()) ||
|
||||
(std::find(result.B.begin(), result.B.end(), key1) != result.B.end() &&
|
||||
std::find(result.A.begin(), result.A.end(), key2) != result.A.end())){
|
||||
edgeCut ++;
|
||||
std::cout << " CUT ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
}
|
||||
std::cout << "edgeCut: " << edgeCut << std::endl;
|
||||
}
|
||||
// find vertices that were assigned to sets A & B. Their edge will be cut
|
||||
if ((std::find(result.A.begin(), result.A.end(), key1) != result.A.end() &&
|
||||
std::find(result.B.begin(), result.B.end(), key2) != result.B.end()) ||
|
||||
(std::find(result.B.begin(), result.B.end(), key1) != result.B.end() &&
|
||||
std::find(result.A.begin(), result.A.end(), key2) != result.A.end())){
|
||||
edgeCut ++;
|
||||
std::cout << " CUT ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
}
|
||||
std::cout << "edgeCut: " << edgeCut << std::endl;
|
||||
}
|
||||
|
||||
return boost::make_optional<MetisResult >(result);
|
||||
}
|
||||
return boost::make_optional<MetisResult >(result);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
bool isLargerIsland(const std::vector<size_t>& island1, const std::vector<size_t>& island2) {
|
||||
return island1.size() > island2.size();
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
bool isLargerIsland(const std::vector<size_t>& island1, const std::vector<size_t>& island2) {
|
||||
return island1.size() > island2.size();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// debug functions
|
||||
void printIsland(const std::vector<size_t>& island) {
|
||||
std::cout << "island: ";
|
||||
BOOST_FOREACH(const size_t key, island)
|
||||
std::cout << key << " ";
|
||||
std::cout << std::endl;
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
// debug functions
|
||||
void printIsland(const std::vector<size_t>& island) {
|
||||
std::cout << "island: ";
|
||||
BOOST_FOREACH(const size_t key, island)
|
||||
std::cout << key << " ";
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
void printIslands(const std::list<std::vector<size_t> >& islands) {
|
||||
BOOST_FOREACH(const std::vector<std::size_t>& island, islands)
|
||||
printIsland(island);
|
||||
}
|
||||
void printIslands(const std::list<std::vector<size_t> >& islands) {
|
||||
BOOST_FOREACH(const std::vector<std::size_t>& island, islands)
|
||||
printIsland(island);
|
||||
}
|
||||
|
||||
void printNumCamerasLandmarks(const std::vector<size_t>& keys, const std::vector<Symbol>& int2symbol) {
|
||||
int numCamera = 0, numLandmark = 0;
|
||||
BOOST_FOREACH(const size_t key, keys)
|
||||
if (int2symbol[key].chr() == 'x')
|
||||
numCamera++;
|
||||
else
|
||||
numLandmark++;
|
||||
std::cout << "numCamera: " << numCamera << " numLandmark: " << numLandmark << std::endl;
|
||||
}
|
||||
void printNumCamerasLandmarks(const std::vector<size_t>& keys, const std::vector<Symbol>& int2symbol) {
|
||||
int numCamera = 0, numLandmark = 0;
|
||||
BOOST_FOREACH(const size_t key, keys)
|
||||
if (int2symbol[key].chr() == 'x')
|
||||
numCamera++;
|
||||
else
|
||||
numLandmark++;
|
||||
std::cout << "numCamera: " << numCamera << " numLandmark: " << numLandmark << std::endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class GenericGraph>
|
||||
void addLandmarkToPartitionResult(const GenericGraph& graph, const std::vector<size_t>& landmarkKeys,
|
||||
MetisResult& partitionResult, WorkSpace& workspace) {
|
||||
/* ************************************************************************* */
|
||||
template<class GenericGraph>
|
||||
void addLandmarkToPartitionResult(const GenericGraph& graph, const std::vector<size_t>& landmarkKeys,
|
||||
MetisResult& partitionResult, WorkSpace& workspace) {
|
||||
|
||||
// set up cameras in the dictionary
|
||||
std::vector<size_t>& A = partitionResult.A;
|
||||
std::vector<size_t>& B = partitionResult.B;
|
||||
std::vector<size_t>& C = partitionResult.C;
|
||||
std::vector<int>& dictionary = workspace.dictionary;
|
||||
std::fill(dictionary.begin(), dictionary.end(), -1);
|
||||
BOOST_FOREACH(const size_t a, A)
|
||||
dictionary[a] = 1;
|
||||
BOOST_FOREACH(const size_t b, B)
|
||||
dictionary[b] = 2;
|
||||
if (!C.empty())
|
||||
throw std::runtime_error("addLandmarkToPartitionResult: C is not empty");
|
||||
// set up cameras in the dictionary
|
||||
std::vector<size_t>& A = partitionResult.A;
|
||||
std::vector<size_t>& B = partitionResult.B;
|
||||
std::vector<size_t>& C = partitionResult.C;
|
||||
std::vector<int>& dictionary = workspace.dictionary;
|
||||
std::fill(dictionary.begin(), dictionary.end(), -1);
|
||||
BOOST_FOREACH(const size_t a, A)
|
||||
dictionary[a] = 1;
|
||||
BOOST_FOREACH(const size_t b, B)
|
||||
dictionary[b] = 2;
|
||||
if (!C.empty())
|
||||
throw std::runtime_error("addLandmarkToPartitionResult: C is not empty");
|
||||
|
||||
// set up landmarks
|
||||
size_t i,j;
|
||||
BOOST_FOREACH(const typename GenericGraph::value_type& factor, graph) {
|
||||
i = factor->key1.index;
|
||||
j = factor->key2.index;
|
||||
if (dictionary[j] == 0) // if the landmark is already in the separator, continue
|
||||
continue;
|
||||
else if (dictionary[j] == -1)
|
||||
dictionary[j] = dictionary[i];
|
||||
else {
|
||||
if (dictionary[j] != dictionary[i])
|
||||
dictionary[j] = 0;
|
||||
}
|
||||
// if (j == 67980)
|
||||
// std::cout << "dictionary[67980]" << dictionary[j] << std::endl;
|
||||
}
|
||||
// set up landmarks
|
||||
size_t i,j;
|
||||
BOOST_FOREACH(const typename GenericGraph::value_type& factor, graph) {
|
||||
i = factor->key1.index;
|
||||
j = factor->key2.index;
|
||||
if (dictionary[j] == 0) // if the landmark is already in the separator, continue
|
||||
continue;
|
||||
else if (dictionary[j] == -1)
|
||||
dictionary[j] = dictionary[i];
|
||||
else {
|
||||
if (dictionary[j] != dictionary[i])
|
||||
dictionary[j] = 0;
|
||||
}
|
||||
// if (j == 67980)
|
||||
// std::cout << "dictionary[67980]" << dictionary[j] << std::endl;
|
||||
}
|
||||
|
||||
BOOST_FOREACH(const size_t j, landmarkKeys) {
|
||||
switch(dictionary[j]) {
|
||||
case 0: C.push_back(j); break;
|
||||
case 1: A.push_back(j); break;
|
||||
case 2: B.push_back(j); break;
|
||||
default: std::cout << j << ": " << dictionary[j] << std::endl;
|
||||
throw std::runtime_error("addLandmarkToPartitionResult: wrong status for landmark");
|
||||
}
|
||||
}
|
||||
}
|
||||
BOOST_FOREACH(const size_t j, landmarkKeys) {
|
||||
switch(dictionary[j]) {
|
||||
case 0: C.push_back(j); break;
|
||||
case 1: A.push_back(j); break;
|
||||
case 2: B.push_back(j); break;
|
||||
default: std::cout << j << ": " << dictionary[j] << std::endl;
|
||||
throw std::runtime_error("addLandmarkToPartitionResult: wrong status for landmark");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#define REDUCE_CAMERA_GRAPH
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class GenericGraph>
|
||||
boost::optional<MetisResult> findPartitoning(const GenericGraph& graph, const std::vector<size_t>& keys,
|
||||
WorkSpace& workspace, bool verbose,
|
||||
const boost::optional<std::vector<Symbol> >& int2symbol, const bool reduceGraph) {
|
||||
boost::optional<MetisResult> result;
|
||||
GenericGraph reducedGraph;
|
||||
std::vector<size_t> keyToPartition;
|
||||
std::vector<size_t> cameraKeys, landmarkKeys;
|
||||
if (reduceGraph) {
|
||||
if (!int2symbol.is_initialized())
|
||||
throw std::invalid_argument("findSeparator: int2symbol must be valid!");
|
||||
/* ************************************************************************* */
|
||||
template<class GenericGraph>
|
||||
boost::optional<MetisResult> findPartitoning(const GenericGraph& graph, const std::vector<size_t>& keys,
|
||||
WorkSpace& workspace, bool verbose,
|
||||
const boost::optional<std::vector<Symbol> >& int2symbol, const bool reduceGraph) {
|
||||
boost::optional<MetisResult> result;
|
||||
GenericGraph reducedGraph;
|
||||
std::vector<size_t> keyToPartition;
|
||||
std::vector<size_t> cameraKeys, landmarkKeys;
|
||||
if (reduceGraph) {
|
||||
if (!int2symbol.is_initialized())
|
||||
throw std::invalid_argument("findSeparator: int2symbol must be valid!");
|
||||
|
||||
// find out all the landmark keys, which are to be eliminated
|
||||
cameraKeys.reserve(keys.size());
|
||||
landmarkKeys.reserve(keys.size());
|
||||
BOOST_FOREACH(const size_t key, keys) {
|
||||
if((*int2symbol)[key].chr() == 'x')
|
||||
cameraKeys.push_back(key);
|
||||
else
|
||||
landmarkKeys.push_back(key);
|
||||
}
|
||||
// find out all the landmark keys, which are to be eliminated
|
||||
cameraKeys.reserve(keys.size());
|
||||
landmarkKeys.reserve(keys.size());
|
||||
BOOST_FOREACH(const size_t key, keys) {
|
||||
if((*int2symbol)[key].chr() == 'x')
|
||||
cameraKeys.push_back(key);
|
||||
else
|
||||
landmarkKeys.push_back(key);
|
||||
}
|
||||
|
||||
keyToPartition = cameraKeys;
|
||||
workspace.prepareDictionary(keyToPartition);
|
||||
const std::vector<int>& dictionary = workspace.dictionary;
|
||||
reduceGenericGraph(graph, cameraKeys, landmarkKeys, dictionary, reducedGraph);
|
||||
std::cout << "original graph: V" << keys.size() << ", E" << graph.size()
|
||||
<< " --> reduced graph: V" << cameraKeys.size() << ", E" << reducedGraph.size() << std::endl;
|
||||
result = edgePartitionByMetis(reducedGraph, keyToPartition, workspace, verbose);
|
||||
} else // call Metis to partition the graph to A, B, C
|
||||
result = separatorPartitionByMetis(graph, keys, workspace, verbose);
|
||||
keyToPartition = cameraKeys;
|
||||
workspace.prepareDictionary(keyToPartition);
|
||||
const std::vector<int>& dictionary = workspace.dictionary;
|
||||
reduceGenericGraph(graph, cameraKeys, landmarkKeys, dictionary, reducedGraph);
|
||||
std::cout << "original graph: V" << keys.size() << ", E" << graph.size()
|
||||
<< " --> reduced graph: V" << cameraKeys.size() << ", E" << reducedGraph.size() << std::endl;
|
||||
result = edgePartitionByMetis(reducedGraph, keyToPartition, workspace, verbose);
|
||||
} else // call Metis to partition the graph to A, B, C
|
||||
result = separatorPartitionByMetis(graph, keys, workspace, verbose);
|
||||
|
||||
if (!result.is_initialized()) {
|
||||
std::cout << "metis failed!" << std::endl;
|
||||
return 0;
|
||||
}
|
||||
if (!result.is_initialized()) {
|
||||
std::cout << "metis failed!" << std::endl;
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (reduceGraph) {
|
||||
addLandmarkToPartitionResult(graph, landmarkKeys, *result, workspace);
|
||||
std::cout << "the separator size: " << result->C.size() << " landmarks" << std::endl;
|
||||
}
|
||||
if (reduceGraph) {
|
||||
addLandmarkToPartitionResult(graph, landmarkKeys, *result, workspace);
|
||||
std::cout << "the separator size: " << result->C.size() << " landmarks" << std::endl;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template<class GenericGraph>
|
||||
int findSeparator(const GenericGraph& graph, const std::vector<size_t>& keys,
|
||||
const int minNodesPerMap, WorkSpace& workspace, bool verbose,
|
||||
const boost::optional<std::vector<Symbol> >& int2symbol, const bool reduceGraph,
|
||||
const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark) {
|
||||
/* ************************************************************************* */
|
||||
template<class GenericGraph>
|
||||
int findSeparator(const GenericGraph& graph, const std::vector<size_t>& keys,
|
||||
const int minNodesPerMap, WorkSpace& workspace, bool verbose,
|
||||
const boost::optional<std::vector<Symbol> >& int2symbol, const bool reduceGraph,
|
||||
const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark) {
|
||||
|
||||
boost::optional<MetisResult> result = findPartitoning(graph, keys, workspace,
|
||||
verbose, int2symbol, reduceGraph);
|
||||
boost::optional<MetisResult> result = findPartitoning(graph, keys, workspace,
|
||||
verbose, int2symbol, reduceGraph);
|
||||
|
||||
// find the island in A and B, and make them separated submaps
|
||||
typedef std::vector<size_t> Island;
|
||||
std::list<Island> islands;
|
||||
// find the island in A and B, and make them separated submaps
|
||||
typedef std::vector<size_t> Island;
|
||||
std::list<Island> islands;
|
||||
|
||||
std::list<Island> islands_in_A = findIslands(graph, result->A, workspace,
|
||||
minNrConstraintsPerCamera, minNrConstraintsPerLandmark);
|
||||
std::list<Island> islands_in_A = findIslands(graph, result->A, workspace,
|
||||
minNrConstraintsPerCamera, minNrConstraintsPerLandmark);
|
||||
|
||||
std::list<Island> islands_in_B = findIslands(graph, result->B, workspace,
|
||||
minNrConstraintsPerCamera, minNrConstraintsPerLandmark);
|
||||
std::list<Island> islands_in_B = findIslands(graph, result->B, workspace,
|
||||
minNrConstraintsPerCamera, minNrConstraintsPerLandmark);
|
||||
|
||||
islands.insert(islands.end(), islands_in_A.begin(), islands_in_A.end());
|
||||
islands.insert(islands.end(), islands_in_B.begin(), islands_in_B.end());
|
||||
islands.sort(isLargerIsland);
|
||||
size_t numIsland0 = islands.size();
|
||||
islands.insert(islands.end(), islands_in_A.begin(), islands_in_A.end());
|
||||
islands.insert(islands.end(), islands_in_B.begin(), islands_in_B.end());
|
||||
islands.sort(isLargerIsland);
|
||||
size_t numIsland0 = islands.size();
|
||||
|
||||
#ifdef NDEBUG
|
||||
// verbose = true;
|
||||
// if (!int2symbol) throw std::invalid_argument("findSeparator: int2symbol is not set!");
|
||||
// std::cout << "sep size: " << result->C.size() << "; ";
|
||||
// printNumCamerasLandmarks(result->C, *int2symbol);
|
||||
// std::cout << "no. of island: " << islands.size() << "; ";
|
||||
// std::cout << "island size: ";
|
||||
// BOOST_FOREACH(const Island& island, islands)
|
||||
// std::cout << island.size() << " ";
|
||||
// std::cout << std::endl;
|
||||
// verbose = true;
|
||||
// if (!int2symbol) throw std::invalid_argument("findSeparator: int2symbol is not set!");
|
||||
// std::cout << "sep size: " << result->C.size() << "; ";
|
||||
// printNumCamerasLandmarks(result->C, *int2symbol);
|
||||
// std::cout << "no. of island: " << islands.size() << "; ";
|
||||
// std::cout << "island size: ";
|
||||
// BOOST_FOREACH(const Island& island, islands)
|
||||
// std::cout << island.size() << " ";
|
||||
// std::cout << std::endl;
|
||||
|
||||
// BOOST_FOREACH(const Island& island, islands) {
|
||||
// printNumCamerasLandmarks(island, int2symbol);
|
||||
// }
|
||||
// BOOST_FOREACH(const Island& island, islands) {
|
||||
// printNumCamerasLandmarks(island, int2symbol);
|
||||
// }
|
||||
#endif
|
||||
|
||||
// absorb small components into the separator
|
||||
size_t oldSize = islands.size();
|
||||
while(true) {
|
||||
if (islands.size() < 2) {
|
||||
std::cout << "numIsland: " << numIsland0 << std::endl;
|
||||
throw std::runtime_error("findSeparator: found fewer than 2 submaps!");
|
||||
}
|
||||
// absorb small components into the separator
|
||||
size_t oldSize = islands.size();
|
||||
while(true) {
|
||||
if (islands.size() < 2) {
|
||||
std::cout << "numIsland: " << numIsland0 << std::endl;
|
||||
throw std::runtime_error("findSeparator: found fewer than 2 submaps!");
|
||||
}
|
||||
|
||||
std::list<Island>::reference island = islands.back();
|
||||
if ((int)island.size() >= minNodesPerMap) break;
|
||||
result->C.insert(result->C.end(), island.begin(), island.end());
|
||||
islands.pop_back();
|
||||
}
|
||||
if (islands.size() != oldSize){
|
||||
if (verbose) std::cout << oldSize << "-" << oldSize - islands.size() << " submap(s);\t" << std::endl;
|
||||
}
|
||||
else{
|
||||
if (verbose) std::cout << oldSize << " submap(s);\t" << std::endl;
|
||||
}
|
||||
std::list<Island>::reference island = islands.back();
|
||||
if ((int)island.size() >= minNodesPerMap) break;
|
||||
result->C.insert(result->C.end(), island.begin(), island.end());
|
||||
islands.pop_back();
|
||||
}
|
||||
if (islands.size() != oldSize){
|
||||
if (verbose) std::cout << oldSize << "-" << oldSize - islands.size() << " submap(s);\t" << std::endl;
|
||||
}
|
||||
else{
|
||||
if (verbose) std::cout << oldSize << " submap(s);\t" << std::endl;
|
||||
}
|
||||
|
||||
// generate the node map
|
||||
std::vector<int>& partitionTable = workspace.partitionTable;
|
||||
std::fill(partitionTable.begin(), partitionTable.end(), -1);
|
||||
BOOST_FOREACH(const size_t key, result->C)
|
||||
partitionTable[key] = 0;
|
||||
int idx = 0;
|
||||
BOOST_FOREACH(const Island& island, islands) {
|
||||
idx++;
|
||||
BOOST_FOREACH(const size_t key, island) {
|
||||
partitionTable[key] = idx;
|
||||
}
|
||||
}
|
||||
// generate the node map
|
||||
std::vector<int>& partitionTable = workspace.partitionTable;
|
||||
std::fill(partitionTable.begin(), partitionTable.end(), -1);
|
||||
BOOST_FOREACH(const size_t key, result->C)
|
||||
partitionTable[key] = 0;
|
||||
int idx = 0;
|
||||
BOOST_FOREACH(const Island& island, islands) {
|
||||
idx++;
|
||||
BOOST_FOREACH(const size_t key, island) {
|
||||
partitionTable[key] = idx;
|
||||
}
|
||||
}
|
||||
|
||||
return islands.size();
|
||||
}
|
||||
return islands.size();
|
||||
}
|
||||
|
||||
}} //namespace
|
||||
|
|
|
@ -16,29 +16,29 @@
|
|||
|
||||
namespace gtsam { namespace partition {
|
||||
|
||||
// typedef std::map<size_t, size_t> PartitionTable; // from the key to the partition: 0 - separator, > 1: submap id
|
||||
// typedef std::map<size_t, size_t> PartitionTable; // from the key to the partition: 0 - separator, > 1: submap id
|
||||
|
||||
/** the metis Nest dissection result */
|
||||
struct MetisResult {
|
||||
std::vector<size_t> A, B; // frontals
|
||||
std::vector<size_t> C; // separator
|
||||
};
|
||||
/** the metis Nest dissection result */
|
||||
struct MetisResult {
|
||||
std::vector<size_t> A, B; // frontals
|
||||
std::vector<size_t> C; // separator
|
||||
};
|
||||
|
||||
/**
|
||||
* use Metis library to partition, return the size of separator and the optional partition table
|
||||
* the size of dictionary mush be equal to the number of variables in the original graph (the largest one)
|
||||
*/
|
||||
template<class GenericGraph>
|
||||
boost::optional<MetisResult> separatorPartitionByMetis(const GenericGraph& graph, const std::vector<size_t>& keys,
|
||||
WorkSpace& workspace, bool verbose);
|
||||
/**
|
||||
* use Metis library to partition, return the size of separator and the optional partition table
|
||||
* the size of dictionary mush be equal to the number of variables in the original graph (the largest one)
|
||||
*/
|
||||
template<class GenericGraph>
|
||||
boost::optional<MetisResult> separatorPartitionByMetis(const GenericGraph& graph, const std::vector<size_t>& keys,
|
||||
WorkSpace& workspace, bool verbose);
|
||||
|
||||
/**
|
||||
* return the number of submaps and the parition table of the partitioned graph (**stored in workspace.partitionTable**).
|
||||
* return 0 if failed Note that the original output of Metis is 0,1 for submap, and 2 for the separator.
|
||||
*/
|
||||
template<class GenericGraph>
|
||||
int findSeparator(const GenericGraph& graph, const std::vector<size_t>& keys,
|
||||
const int minNodesPerMap, WorkSpace& workspace, bool verbose, const boost::optional<std::vector<Symbol> >& int2symbol,
|
||||
const bool reduceGraph, const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark);
|
||||
/**
|
||||
* return the number of submaps and the parition table of the partitioned graph (**stored in workspace.partitionTable**).
|
||||
* return 0 if failed Note that the original output of Metis is 0,1 for submap, and 2 for the separator.
|
||||
*/
|
||||
template<class GenericGraph>
|
||||
int findSeparator(const GenericGraph& graph, const std::vector<size_t>& keys,
|
||||
const int minNodesPerMap, WorkSpace& workspace, bool verbose, const boost::optional<std::vector<Symbol> >& int2symbol,
|
||||
const bool reduceGraph, const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark);
|
||||
|
||||
}} //namespace
|
||||
|
|
|
@ -19,459 +19,459 @@ using namespace std;
|
|||
|
||||
namespace gtsam { namespace partition {
|
||||
|
||||
/**
|
||||
* Note: Need to be able to handle a graph with factors that involve variables not in the given {keys}
|
||||
*/
|
||||
list<vector<size_t> > findIslands(const GenericGraph2D& graph, const vector<size_t>& keys, WorkSpace& workspace,
|
||||
const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark)
|
||||
{
|
||||
typedef pair<int, int> IntPair;
|
||||
typedef list<sharedGenericFactor2D> FactorList;
|
||||
typedef map<IntPair, FactorList::iterator> Connections;
|
||||
/**
|
||||
* Note: Need to be able to handle a graph with factors that involve variables not in the given {keys}
|
||||
*/
|
||||
list<vector<size_t> > findIslands(const GenericGraph2D& graph, const vector<size_t>& keys, WorkSpace& workspace,
|
||||
const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark)
|
||||
{
|
||||
typedef pair<int, int> IntPair;
|
||||
typedef list<sharedGenericFactor2D> FactorList;
|
||||
typedef map<IntPair, FactorList::iterator> Connections;
|
||||
|
||||
// create disjoin set forest
|
||||
DSFVector dsf(workspace.dsf, keys);
|
||||
// create disjoin set forest
|
||||
DSFVector dsf(workspace.dsf, keys);
|
||||
|
||||
FactorList factors(graph.begin(), graph.end());
|
||||
size_t nrFactors = factors.size();
|
||||
FactorList::iterator itEnd;
|
||||
workspace.prepareDictionary(keys);
|
||||
while (nrFactors) {
|
||||
Connections connections;
|
||||
bool succeed = false;
|
||||
itEnd = factors.end();
|
||||
list<FactorList::iterator> toErase;
|
||||
for (FactorList::iterator itFactor=factors.begin(); itFactor!=itEnd; itFactor++) {
|
||||
FactorList factors(graph.begin(), graph.end());
|
||||
size_t nrFactors = factors.size();
|
||||
FactorList::iterator itEnd;
|
||||
workspace.prepareDictionary(keys);
|
||||
while (nrFactors) {
|
||||
Connections connections;
|
||||
bool succeed = false;
|
||||
itEnd = factors.end();
|
||||
list<FactorList::iterator> toErase;
|
||||
for (FactorList::iterator itFactor=factors.begin(); itFactor!=itEnd; itFactor++) {
|
||||
|
||||
// remove invalid factors
|
||||
GenericNode2D key1 = (*itFactor)->key1, key2 = (*itFactor)->key2;
|
||||
if (workspace.dictionary[key1.index]==-1 || workspace.dictionary[key2.index]==-1) {
|
||||
toErase.push_back(itFactor); nrFactors--; continue;
|
||||
}
|
||||
// remove invalid factors
|
||||
GenericNode2D key1 = (*itFactor)->key1, key2 = (*itFactor)->key2;
|
||||
if (workspace.dictionary[key1.index]==-1 || workspace.dictionary[key2.index]==-1) {
|
||||
toErase.push_back(itFactor); nrFactors--; continue;
|
||||
}
|
||||
|
||||
size_t label1 = dsf.findSet(key1.index);
|
||||
size_t label2 = dsf.findSet(key2.index);
|
||||
if (label1 == label2) { toErase.push_back(itFactor); nrFactors--; continue; }
|
||||
size_t label1 = dsf.findSet(key1.index);
|
||||
size_t label2 = dsf.findSet(key2.index);
|
||||
if (label1 == label2) { toErase.push_back(itFactor); nrFactors--; continue; }
|
||||
|
||||
// merge two trees if the connection is strong enough, otherwise cache it
|
||||
// an odometry factor always merges two islands
|
||||
if (key1.type == NODE_POSE_2D && key2.type == NODE_POSE_2D) {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
dsf.makeUnionInPlace(label1, label2);
|
||||
succeed = true;
|
||||
break;
|
||||
}
|
||||
// merge two trees if the connection is strong enough, otherwise cache it
|
||||
// an odometry factor always merges two islands
|
||||
if (key1.type == NODE_POSE_2D && key2.type == NODE_POSE_2D) {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
dsf.makeUnionInPlace(label1, label2);
|
||||
succeed = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// single landmark island only need one measurement
|
||||
if ((dsf.isSingleton(label1)==1 && key1.type == NODE_LANDMARK_2D) ||
|
||||
(dsf.isSingleton(label2)==1 && key2.type == NODE_LANDMARK_2D)) {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
dsf.makeUnionInPlace(label1, label2);
|
||||
succeed = true;
|
||||
break;
|
||||
}
|
||||
// single landmark island only need one measurement
|
||||
if ((dsf.isSingleton(label1)==1 && key1.type == NODE_LANDMARK_2D) ||
|
||||
(dsf.isSingleton(label2)==1 && key2.type == NODE_LANDMARK_2D)) {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
dsf.makeUnionInPlace(label1, label2);
|
||||
succeed = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// stack the current factor with the cached constraint
|
||||
IntPair labels = (label1 < label2) ? make_pair(label1, label2) : make_pair(label2, label1);
|
||||
Connections::iterator itCached = connections.find(labels);
|
||||
if (itCached == connections.end()) {
|
||||
connections.insert(make_pair(labels, itFactor));
|
||||
continue;
|
||||
} else {
|
||||
GenericNode2D key21 = (*itCached->second)->key1, key22 = (*itCached->second)->key2;
|
||||
// if observe the same landmark, we can not merge, abandon the current factor
|
||||
if ((key1.index == key21.index && key1.type == NODE_LANDMARK_2D) ||
|
||||
(key1.index == key22.index && key1.type == NODE_LANDMARK_2D) ||
|
||||
(key2.index == key21.index && key2.type == NODE_LANDMARK_2D) ||
|
||||
(key2.index == key22.index && key2.type == NODE_LANDMARK_2D)) {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
continue;
|
||||
} else {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
toErase.push_back(itCached->second); nrFactors--;
|
||||
dsf.makeUnionInPlace(label1, label2);
|
||||
connections.erase(itCached);
|
||||
succeed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// stack the current factor with the cached constraint
|
||||
IntPair labels = (label1 < label2) ? make_pair(label1, label2) : make_pair(label2, label1);
|
||||
Connections::iterator itCached = connections.find(labels);
|
||||
if (itCached == connections.end()) {
|
||||
connections.insert(make_pair(labels, itFactor));
|
||||
continue;
|
||||
} else {
|
||||
GenericNode2D key21 = (*itCached->second)->key1, key22 = (*itCached->second)->key2;
|
||||
// if observe the same landmark, we can not merge, abandon the current factor
|
||||
if ((key1.index == key21.index && key1.type == NODE_LANDMARK_2D) ||
|
||||
(key1.index == key22.index && key1.type == NODE_LANDMARK_2D) ||
|
||||
(key2.index == key21.index && key2.type == NODE_LANDMARK_2D) ||
|
||||
(key2.index == key22.index && key2.type == NODE_LANDMARK_2D)) {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
continue;
|
||||
} else {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
toErase.push_back(itCached->second); nrFactors--;
|
||||
dsf.makeUnionInPlace(label1, label2);
|
||||
connections.erase(itCached);
|
||||
succeed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// erase unused factors
|
||||
BOOST_FOREACH(const FactorList::iterator& it, toErase)
|
||||
factors.erase(it);
|
||||
// erase unused factors
|
||||
BOOST_FOREACH(const FactorList::iterator& it, toErase)
|
||||
factors.erase(it);
|
||||
|
||||
if (!succeed) break;
|
||||
}
|
||||
if (!succeed) break;
|
||||
}
|
||||
|
||||
list<vector<size_t> > islands;
|
||||
map<size_t, vector<size_t> > arrays = dsf.arrays();
|
||||
size_t key; vector<size_t> array;
|
||||
BOOST_FOREACH(boost::tie(key, array), arrays)
|
||||
islands.push_back(array);
|
||||
return islands;
|
||||
}
|
||||
list<vector<size_t> > islands;
|
||||
map<size_t, vector<size_t> > arrays = dsf.arrays();
|
||||
size_t key; vector<size_t> array;
|
||||
BOOST_FOREACH(boost::tie(key, array), arrays)
|
||||
islands.push_back(array);
|
||||
return islands;
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************* */
|
||||
void print(const GenericGraph2D& graph, const std::string name) {
|
||||
cout << name << endl;
|
||||
BOOST_FOREACH(const sharedGenericFactor2D& factor_, graph)
|
||||
cout << factor_->key1.index << " " << factor_->key2.index << endl;
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
void print(const GenericGraph2D& graph, const std::string name) {
|
||||
cout << name << endl;
|
||||
BOOST_FOREACH(const sharedGenericFactor2D& factor_, graph)
|
||||
cout << factor_->key1.index << " " << factor_->key2.index << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void print(const GenericGraph3D& graph, const std::string name) {
|
||||
cout << name << endl;
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph)
|
||||
cout << factor_->key1.index << " " << factor_->key2.index << " (" <<
|
||||
factor_->key1.type << ", " << factor_->key2.type <<")" << endl;
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
void print(const GenericGraph3D& graph, const std::string name) {
|
||||
cout << name << endl;
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph)
|
||||
cout << factor_->key1.index << " " << factor_->key2.index << " (" <<
|
||||
factor_->key1.type << ", " << factor_->key2.type <<")" << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// create disjoin set forest
|
||||
DSFVector createDSF(const GenericGraph3D& graph, const vector<size_t>& keys, const WorkSpace& workspace) {
|
||||
DSFVector dsf(workspace.dsf, keys);
|
||||
typedef list<sharedGenericFactor3D> FactorList;
|
||||
/* ************************************************************************* */
|
||||
// create disjoin set forest
|
||||
DSFVector createDSF(const GenericGraph3D& graph, const vector<size_t>& keys, const WorkSpace& workspace) {
|
||||
DSFVector dsf(workspace.dsf, keys);
|
||||
typedef list<sharedGenericFactor3D> FactorList;
|
||||
|
||||
FactorList factors(graph.begin(), graph.end());
|
||||
size_t nrFactors = factors.size();
|
||||
FactorList::iterator itEnd;
|
||||
while (nrFactors) {
|
||||
FactorList factors(graph.begin(), graph.end());
|
||||
size_t nrFactors = factors.size();
|
||||
FactorList::iterator itEnd;
|
||||
while (nrFactors) {
|
||||
|
||||
bool succeed = false;
|
||||
itEnd = factors.end();
|
||||
list<FactorList::iterator> toErase;
|
||||
for (FactorList::iterator itFactor=factors.begin(); itFactor!=itEnd; itFactor++) {
|
||||
bool succeed = false;
|
||||
itEnd = factors.end();
|
||||
list<FactorList::iterator> toErase;
|
||||
for (FactorList::iterator itFactor=factors.begin(); itFactor!=itEnd; itFactor++) {
|
||||
|
||||
// remove invalid factors
|
||||
if (graph.size() == 178765) cout << "kai21" << endl;
|
||||
GenericNode3D key1 = (*itFactor)->key1, key2 = (*itFactor)->key2;
|
||||
if (graph.size() == 178765) cout << "kai21: " << key1.index << " " << key2.index << endl;
|
||||
if (workspace.dictionary[key1.index]==-1 || workspace.dictionary[key2.index]==-1) {
|
||||
toErase.push_back(itFactor); nrFactors--; continue;
|
||||
}
|
||||
// remove invalid factors
|
||||
if (graph.size() == 178765) cout << "kai21" << endl;
|
||||
GenericNode3D key1 = (*itFactor)->key1, key2 = (*itFactor)->key2;
|
||||
if (graph.size() == 178765) cout << "kai21: " << key1.index << " " << key2.index << endl;
|
||||
if (workspace.dictionary[key1.index]==-1 || workspace.dictionary[key2.index]==-1) {
|
||||
toErase.push_back(itFactor); nrFactors--; continue;
|
||||
}
|
||||
|
||||
if (graph.size() == 178765) cout << "kai22" << endl;
|
||||
size_t label1 = dsf.findSet(key1.index);
|
||||
size_t label2 = dsf.findSet(key2.index);
|
||||
if (label1 == label2) { toErase.push_back(itFactor); nrFactors--; continue; }
|
||||
if (graph.size() == 178765) cout << "kai22" << endl;
|
||||
size_t label1 = dsf.findSet(key1.index);
|
||||
size_t label2 = dsf.findSet(key2.index);
|
||||
if (label1 == label2) { toErase.push_back(itFactor); nrFactors--; continue; }
|
||||
|
||||
if (graph.size() == 178765) cout << "kai23" << endl;
|
||||
// merge two trees if the connection is strong enough, otherwise cache it
|
||||
// an odometry factor always merges two islands
|
||||
if ((key1.type == NODE_POSE_3D && key2.type == NODE_LANDMARK_3D) ||
|
||||
(key1.type == NODE_POSE_3D && key2.type == NODE_POSE_3D)) {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
dsf.makeUnionInPlace(label1, label2);
|
||||
succeed = true;
|
||||
break;
|
||||
}
|
||||
if (graph.size() == 178765) cout << "kai23" << endl;
|
||||
// merge two trees if the connection is strong enough, otherwise cache it
|
||||
// an odometry factor always merges two islands
|
||||
if ((key1.type == NODE_POSE_3D && key2.type == NODE_LANDMARK_3D) ||
|
||||
(key1.type == NODE_POSE_3D && key2.type == NODE_POSE_3D)) {
|
||||
toErase.push_back(itFactor); nrFactors--;
|
||||
dsf.makeUnionInPlace(label1, label2);
|
||||
succeed = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (graph.size() == 178765) cout << "kai24" << endl;
|
||||
if (graph.size() == 178765) cout << "kai24" << endl;
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// erase unused factors
|
||||
BOOST_FOREACH(const FactorList::iterator& it, toErase)
|
||||
factors.erase(it);
|
||||
// erase unused factors
|
||||
BOOST_FOREACH(const FactorList::iterator& it, toErase)
|
||||
factors.erase(it);
|
||||
|
||||
if (!succeed) break;
|
||||
}
|
||||
return dsf;
|
||||
}
|
||||
if (!succeed) break;
|
||||
}
|
||||
return dsf;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// first check the type of the key (pose or landmark), and then check whether it is singular
|
||||
inline bool isSingular(const set<size_t>& singularCameras, const set<size_t>& singularLandmarks, const GenericNode3D& node) {
|
||||
switch(node.type) {
|
||||
case NODE_POSE_3D:
|
||||
return singularCameras.find(node.index) != singularCameras.end(); break;
|
||||
case NODE_LANDMARK_3D:
|
||||
return singularLandmarks.find(node.index) != singularLandmarks.end(); break;
|
||||
default:
|
||||
throw runtime_error("unrecognized key type!");
|
||||
}
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
// first check the type of the key (pose or landmark), and then check whether it is singular
|
||||
inline bool isSingular(const set<size_t>& singularCameras, const set<size_t>& singularLandmarks, const GenericNode3D& node) {
|
||||
switch(node.type) {
|
||||
case NODE_POSE_3D:
|
||||
return singularCameras.find(node.index) != singularCameras.end(); break;
|
||||
case NODE_LANDMARK_3D:
|
||||
return singularLandmarks.find(node.index) != singularLandmarks.end(); break;
|
||||
default:
|
||||
throw runtime_error("unrecognized key type!");
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void findSingularCamerasLandmarks(const GenericGraph3D& graph, const WorkSpace& workspace,
|
||||
const vector<bool>& isCamera, const vector<bool>& isLandmark,
|
||||
set<size_t>& singularCameras, set<size_t>& singularLandmarks, vector<int>& nrConstraints,
|
||||
bool& foundSingularCamera, bool& foundSingularLandmark,
|
||||
const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark) {
|
||||
/* ************************************************************************* */
|
||||
void findSingularCamerasLandmarks(const GenericGraph3D& graph, const WorkSpace& workspace,
|
||||
const vector<bool>& isCamera, const vector<bool>& isLandmark,
|
||||
set<size_t>& singularCameras, set<size_t>& singularLandmarks, vector<int>& nrConstraints,
|
||||
bool& foundSingularCamera, bool& foundSingularLandmark,
|
||||
const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark) {
|
||||
|
||||
// compute the constraint number per camera
|
||||
std::fill(nrConstraints.begin(), nrConstraints.end(), 0);
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
const int& key1 = factor_->key1.index;
|
||||
const int& key2 = factor_->key2.index;
|
||||
if (workspace.dictionary[key1] != -1 && workspace.dictionary[key2] != -1 &&
|
||||
!isSingular(singularCameras, singularLandmarks, factor_->key1) &&
|
||||
!isSingular(singularCameras, singularLandmarks, factor_->key2)) {
|
||||
nrConstraints[key1]++;
|
||||
nrConstraints[key2]++;
|
||||
// compute the constraint number per camera
|
||||
std::fill(nrConstraints.begin(), nrConstraints.end(), 0);
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
const int& key1 = factor_->key1.index;
|
||||
const int& key2 = factor_->key2.index;
|
||||
if (workspace.dictionary[key1] != -1 && workspace.dictionary[key2] != -1 &&
|
||||
!isSingular(singularCameras, singularLandmarks, factor_->key1) &&
|
||||
!isSingular(singularCameras, singularLandmarks, factor_->key2)) {
|
||||
nrConstraints[key1]++;
|
||||
nrConstraints[key2]++;
|
||||
|
||||
// a single pose constraint is sufficient for stereo, so we add 2 to the counter
|
||||
// for a total of 3, i.e. the same as 3 landmarks fully constraining the camera
|
||||
if(factor_->key1.type == NODE_POSE_3D && factor_->key2.type == NODE_POSE_3D){
|
||||
nrConstraints[key1]+=2;
|
||||
nrConstraints[key2]+=2;
|
||||
}
|
||||
}
|
||||
}
|
||||
// a single pose constraint is sufficient for stereo, so we add 2 to the counter
|
||||
// for a total of 3, i.e. the same as 3 landmarks fully constraining the camera
|
||||
if(factor_->key1.type == NODE_POSE_3D && factor_->key2.type == NODE_POSE_3D){
|
||||
nrConstraints[key1]+=2;
|
||||
nrConstraints[key2]+=2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// find singular cameras and landmarks
|
||||
foundSingularCamera = false;
|
||||
foundSingularLandmark = false;
|
||||
for (size_t i=0; i<nrConstraints.size(); i++) {
|
||||
if (isCamera[i] && nrConstraints[i] < minNrConstraintsPerCamera &&
|
||||
singularCameras.find(i) == singularCameras.end()) {
|
||||
singularCameras.insert(i);
|
||||
foundSingularCamera = true;
|
||||
}
|
||||
if (isLandmark[i] && nrConstraints[i] < minNrConstraintsPerLandmark &&
|
||||
singularLandmarks.find(i) == singularLandmarks.end()) {
|
||||
singularLandmarks.insert(i);
|
||||
foundSingularLandmark = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
// find singular cameras and landmarks
|
||||
foundSingularCamera = false;
|
||||
foundSingularLandmark = false;
|
||||
for (size_t i=0; i<nrConstraints.size(); i++) {
|
||||
if (isCamera[i] && nrConstraints[i] < minNrConstraintsPerCamera &&
|
||||
singularCameras.find(i) == singularCameras.end()) {
|
||||
singularCameras.insert(i);
|
||||
foundSingularCamera = true;
|
||||
}
|
||||
if (isLandmark[i] && nrConstraints[i] < minNrConstraintsPerLandmark &&
|
||||
singularLandmarks.find(i) == singularLandmarks.end()) {
|
||||
singularLandmarks.insert(i);
|
||||
foundSingularLandmark = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
list<vector<size_t> > findIslands(const GenericGraph3D& graph, const vector<size_t>& keys, WorkSpace& workspace,
|
||||
const size_t minNrConstraintsPerCamera, const size_t minNrConstraintsPerLandmark) {
|
||||
/* ************************************************************************* */
|
||||
list<vector<size_t> > findIslands(const GenericGraph3D& graph, const vector<size_t>& keys, WorkSpace& workspace,
|
||||
const size_t minNrConstraintsPerCamera, const size_t minNrConstraintsPerLandmark) {
|
||||
|
||||
// create disjoint set forest
|
||||
workspace.prepareDictionary(keys);
|
||||
DSFVector dsf = createDSF(graph, keys, workspace);
|
||||
// create disjoint set forest
|
||||
workspace.prepareDictionary(keys);
|
||||
DSFVector dsf = createDSF(graph, keys, workspace);
|
||||
|
||||
const bool verbose = false;
|
||||
bool foundSingularCamera = true;
|
||||
bool foundSingularLandmark = true;
|
||||
const bool verbose = false;
|
||||
bool foundSingularCamera = true;
|
||||
bool foundSingularLandmark = true;
|
||||
|
||||
list<vector<size_t> > islands;
|
||||
set<size_t> singularCameras, singularLandmarks;
|
||||
vector<bool> isCamera(workspace.dictionary.size(), false);
|
||||
vector<bool> isLandmark(workspace.dictionary.size(), false);
|
||||
list<vector<size_t> > islands;
|
||||
set<size_t> singularCameras, singularLandmarks;
|
||||
vector<bool> isCamera(workspace.dictionary.size(), false);
|
||||
vector<bool> isLandmark(workspace.dictionary.size(), false);
|
||||
|
||||
// check the constraint number of every variable
|
||||
// find the camera and landmark keys
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
//assert(factor_->key2.type == NODE_LANDMARK_3D); // only VisualSLAM should come here, not StereoSLAM
|
||||
if (workspace.dictionary[factor_->key1.index] != -1) {
|
||||
if (factor_->key1.type == NODE_POSE_3D)
|
||||
isCamera[factor_->key1.index] = true;
|
||||
else
|
||||
isLandmark[factor_->key1.index] = true;
|
||||
}
|
||||
// check the constraint number of every variable
|
||||
// find the camera and landmark keys
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
//assert(factor_->key2.type == NODE_LANDMARK_3D); // only VisualSLAM should come here, not StereoSLAM
|
||||
if (workspace.dictionary[factor_->key1.index] != -1) {
|
||||
if (factor_->key1.type == NODE_POSE_3D)
|
||||
isCamera[factor_->key1.index] = true;
|
||||
else
|
||||
isLandmark[factor_->key1.index] = true;
|
||||
}
|
||||
if (workspace.dictionary[factor_->key2.index] != -1) {
|
||||
if (factor_->key2.type == NODE_POSE_3D)
|
||||
isCamera[factor_->key2.index] = true;
|
||||
else
|
||||
isLandmark[factor_->key2.index] = true;
|
||||
if (factor_->key2.type == NODE_POSE_3D)
|
||||
isCamera[factor_->key2.index] = true;
|
||||
else
|
||||
isLandmark[factor_->key2.index] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
vector<int> nrConstraints(workspace.dictionary.size(), 0);
|
||||
// iterate until all singular variables have been removed. Removing a singular variable
|
||||
// can cause another to become singular, so this will probably run several times
|
||||
while (foundSingularCamera || foundSingularLandmark) {
|
||||
findSingularCamerasLandmarks(graph, workspace, isCamera, isLandmark, // input
|
||||
singularCameras, singularLandmarks, nrConstraints, // output
|
||||
foundSingularCamera, foundSingularLandmark, // output
|
||||
minNrConstraintsPerCamera, minNrConstraintsPerLandmark); // input
|
||||
}
|
||||
vector<int> nrConstraints(workspace.dictionary.size(), 0);
|
||||
// iterate until all singular variables have been removed. Removing a singular variable
|
||||
// can cause another to become singular, so this will probably run several times
|
||||
while (foundSingularCamera || foundSingularLandmark) {
|
||||
findSingularCamerasLandmarks(graph, workspace, isCamera, isLandmark, // input
|
||||
singularCameras, singularLandmarks, nrConstraints, // output
|
||||
foundSingularCamera, foundSingularLandmark, // output
|
||||
minNrConstraintsPerCamera, minNrConstraintsPerLandmark); // input
|
||||
}
|
||||
|
||||
// add singular variables directly as islands
|
||||
if (!singularCameras.empty()) {
|
||||
if (verbose) cout << "singular cameras:";
|
||||
BOOST_FOREACH(const size_t i, singularCameras) {
|
||||
islands.push_back(vector<size_t>(1, i)); // <---------------------------
|
||||
if (verbose) cout << i << " ";
|
||||
}
|
||||
if (verbose) cout << endl;
|
||||
}
|
||||
if (!singularLandmarks.empty()) {
|
||||
if (verbose) cout << "singular landmarks:";
|
||||
BOOST_FOREACH(const size_t i, singularLandmarks) {
|
||||
islands.push_back(vector<size_t>(1, i)); // <---------------------------
|
||||
if (verbose) cout << i << " ";
|
||||
}
|
||||
if (verbose) cout << endl;
|
||||
}
|
||||
// add singular variables directly as islands
|
||||
if (!singularCameras.empty()) {
|
||||
if (verbose) cout << "singular cameras:";
|
||||
BOOST_FOREACH(const size_t i, singularCameras) {
|
||||
islands.push_back(vector<size_t>(1, i)); // <---------------------------
|
||||
if (verbose) cout << i << " ";
|
||||
}
|
||||
if (verbose) cout << endl;
|
||||
}
|
||||
if (!singularLandmarks.empty()) {
|
||||
if (verbose) cout << "singular landmarks:";
|
||||
BOOST_FOREACH(const size_t i, singularLandmarks) {
|
||||
islands.push_back(vector<size_t>(1, i)); // <---------------------------
|
||||
if (verbose) cout << i << " ";
|
||||
}
|
||||
if (verbose) cout << endl;
|
||||
}
|
||||
|
||||
|
||||
// regenerating islands
|
||||
map<size_t, vector<size_t> > labelIslands = dsf.arrays();
|
||||
size_t label; vector<size_t> island;
|
||||
BOOST_FOREACH(boost::tie(label, island), labelIslands) {
|
||||
vector<size_t> filteredIsland; // remove singular cameras from array
|
||||
filteredIsland.reserve(island.size());
|
||||
BOOST_FOREACH(const size_t key, island) {
|
||||
if ((isCamera[key] && singularCameras.find(key) == singularCameras.end()) || // not singular
|
||||
(isLandmark[key] && singularLandmarks.find(key) == singularLandmarks.end()) || // not singular
|
||||
(!isCamera[key] && !isLandmark[key])) { // the key is not involved in any factor, so the type is undertermined
|
||||
filteredIsland.push_back(key);
|
||||
}
|
||||
}
|
||||
islands.push_back(filteredIsland);
|
||||
}
|
||||
// regenerating islands
|
||||
map<size_t, vector<size_t> > labelIslands = dsf.arrays();
|
||||
size_t label; vector<size_t> island;
|
||||
BOOST_FOREACH(boost::tie(label, island), labelIslands) {
|
||||
vector<size_t> filteredIsland; // remove singular cameras from array
|
||||
filteredIsland.reserve(island.size());
|
||||
BOOST_FOREACH(const size_t key, island) {
|
||||
if ((isCamera[key] && singularCameras.find(key) == singularCameras.end()) || // not singular
|
||||
(isLandmark[key] && singularLandmarks.find(key) == singularLandmarks.end()) || // not singular
|
||||
(!isCamera[key] && !isLandmark[key])) { // the key is not involved in any factor, so the type is undertermined
|
||||
filteredIsland.push_back(key);
|
||||
}
|
||||
}
|
||||
islands.push_back(filteredIsland);
|
||||
}
|
||||
|
||||
// sanity check
|
||||
size_t nrKeys = 0;
|
||||
BOOST_FOREACH(const vector<size_t>& island, islands)
|
||||
nrKeys += island.size();
|
||||
if (nrKeys != keys.size()) {
|
||||
cout << nrKeys << " vs " << keys.size() << endl;
|
||||
throw runtime_error("findIslands: the number of keys is inconsistent!");
|
||||
}
|
||||
// sanity check
|
||||
size_t nrKeys = 0;
|
||||
BOOST_FOREACH(const vector<size_t>& island, islands)
|
||||
nrKeys += island.size();
|
||||
if (nrKeys != keys.size()) {
|
||||
cout << nrKeys << " vs " << keys.size() << endl;
|
||||
throw runtime_error("findIslands: the number of keys is inconsistent!");
|
||||
}
|
||||
|
||||
|
||||
if (verbose) cout << "found " << islands.size() << " islands!" << endl;
|
||||
return islands;
|
||||
}
|
||||
if (verbose) cout << "found " << islands.size() << " islands!" << endl;
|
||||
return islands;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
// return the number of intersection between two **sorted** landmark vectors
|
||||
inline int getNrCommonLandmarks(const vector<size_t>& landmarks1, const vector<size_t>& landmarks2){
|
||||
size_t i1 = 0, i2 = 0;
|
||||
int nrCommonLandmarks = 0;
|
||||
while (i1 < landmarks1.size() && i2 < landmarks2.size()) {
|
||||
if (landmarks1[i1] < landmarks2[i2])
|
||||
i1 ++;
|
||||
else if (landmarks1[i1] > landmarks2[i2])
|
||||
i2 ++;
|
||||
else {
|
||||
i1++; i2++;
|
||||
nrCommonLandmarks ++;
|
||||
}
|
||||
}
|
||||
return nrCommonLandmarks;
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
// return the number of intersection between two **sorted** landmark vectors
|
||||
inline int getNrCommonLandmarks(const vector<size_t>& landmarks1, const vector<size_t>& landmarks2){
|
||||
size_t i1 = 0, i2 = 0;
|
||||
int nrCommonLandmarks = 0;
|
||||
while (i1 < landmarks1.size() && i2 < landmarks2.size()) {
|
||||
if (landmarks1[i1] < landmarks2[i2])
|
||||
i1 ++;
|
||||
else if (landmarks1[i1] > landmarks2[i2])
|
||||
i2 ++;
|
||||
else {
|
||||
i1++; i2++;
|
||||
nrCommonLandmarks ++;
|
||||
}
|
||||
}
|
||||
return nrCommonLandmarks;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void reduceGenericGraph(const GenericGraph3D& graph, const std::vector<size_t>& cameraKeys, const std::vector<size_t>& landmarkKeys,
|
||||
const std::vector<int>& dictionary, GenericGraph3D& reducedGraph) {
|
||||
/* ************************************************************************* */
|
||||
void reduceGenericGraph(const GenericGraph3D& graph, const std::vector<size_t>& cameraKeys, const std::vector<size_t>& landmarkKeys,
|
||||
const std::vector<int>& dictionary, GenericGraph3D& reducedGraph) {
|
||||
|
||||
typedef size_t CameraKey;
|
||||
typedef pair<CameraKey, CameraKey> CameraPair;
|
||||
typedef size_t LandmarkKey;
|
||||
// get a mapping from each landmark to its connected cameras
|
||||
vector<vector<LandmarkKey> > cameraToLandmarks(dictionary.size());
|
||||
// for odometry xi-xj where i<j, we always store cameraToCamera[i] = j, otherwise equal to -1 if no odometry
|
||||
vector<int> cameraToCamera(dictionary.size(), -1);
|
||||
size_t key_i, key_j;
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
if (factor_->key1.type == NODE_POSE_3D) {
|
||||
if (factor_->key2.type == NODE_LANDMARK_3D) {// projection factor
|
||||
cameraToLandmarks[factor_->key1.index].push_back(factor_->key2.index);
|
||||
}
|
||||
else { // odometry factor
|
||||
if (factor_->key1.index < factor_->key2.index) {
|
||||
key_i = factor_->key1.index;
|
||||
key_j = factor_->key2.index;
|
||||
} else {
|
||||
key_i = factor_->key2.index;
|
||||
key_j = factor_->key1.index;
|
||||
}
|
||||
cameraToCamera[key_i] = key_j;
|
||||
}
|
||||
}
|
||||
}
|
||||
typedef size_t CameraKey;
|
||||
typedef pair<CameraKey, CameraKey> CameraPair;
|
||||
typedef size_t LandmarkKey;
|
||||
// get a mapping from each landmark to its connected cameras
|
||||
vector<vector<LandmarkKey> > cameraToLandmarks(dictionary.size());
|
||||
// for odometry xi-xj where i<j, we always store cameraToCamera[i] = j, otherwise equal to -1 if no odometry
|
||||
vector<int> cameraToCamera(dictionary.size(), -1);
|
||||
size_t key_i, key_j;
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
if (factor_->key1.type == NODE_POSE_3D) {
|
||||
if (factor_->key2.type == NODE_LANDMARK_3D) {// projection factor
|
||||
cameraToLandmarks[factor_->key1.index].push_back(factor_->key2.index);
|
||||
}
|
||||
else { // odometry factor
|
||||
if (factor_->key1.index < factor_->key2.index) {
|
||||
key_i = factor_->key1.index;
|
||||
key_j = factor_->key2.index;
|
||||
} else {
|
||||
key_i = factor_->key2.index;
|
||||
key_j = factor_->key1.index;
|
||||
}
|
||||
cameraToCamera[key_i] = key_j;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// sort the landmark keys for the late getNrCommonLandmarks call
|
||||
BOOST_FOREACH(vector<LandmarkKey> &landmarks, cameraToLandmarks){
|
||||
if (!landmarks.empty())
|
||||
std::sort(landmarks.begin(), landmarks.end());
|
||||
}
|
||||
// sort the landmark keys for the late getNrCommonLandmarks call
|
||||
BOOST_FOREACH(vector<LandmarkKey> &landmarks, cameraToLandmarks){
|
||||
if (!landmarks.empty())
|
||||
std::sort(landmarks.begin(), landmarks.end());
|
||||
}
|
||||
|
||||
// generate the reduced graph
|
||||
reducedGraph.clear();
|
||||
int factorIndex = 0;
|
||||
int camera1, camera2, nrTotalConstraints;
|
||||
bool hasOdometry;
|
||||
for (size_t i1=0; i1<cameraKeys.size()-1; ++i1) {
|
||||
for (size_t i2=i1+1; i2<cameraKeys.size(); ++i2) {
|
||||
camera1 = cameraKeys[i1];
|
||||
camera2 = cameraKeys[i2];
|
||||
int nrCommonLandmarks = getNrCommonLandmarks(cameraToLandmarks[camera1], cameraToLandmarks[camera2]);
|
||||
hasOdometry = cameraToCamera[camera1] == camera2;
|
||||
if (nrCommonLandmarks > 0 || hasOdometry) {
|
||||
nrTotalConstraints = 2 * nrCommonLandmarks + (hasOdometry ? 6 : 0);
|
||||
reducedGraph.push_back(boost::make_shared<GenericFactor3D>(camera1, camera2,
|
||||
factorIndex++, NODE_POSE_3D, NODE_POSE_3D, nrTotalConstraints));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// generate the reduced graph
|
||||
reducedGraph.clear();
|
||||
int factorIndex = 0;
|
||||
int camera1, camera2, nrTotalConstraints;
|
||||
bool hasOdometry;
|
||||
for (size_t i1=0; i1<cameraKeys.size()-1; ++i1) {
|
||||
for (size_t i2=i1+1; i2<cameraKeys.size(); ++i2) {
|
||||
camera1 = cameraKeys[i1];
|
||||
camera2 = cameraKeys[i2];
|
||||
int nrCommonLandmarks = getNrCommonLandmarks(cameraToLandmarks[camera1], cameraToLandmarks[camera2]);
|
||||
hasOdometry = cameraToCamera[camera1] == camera2;
|
||||
if (nrCommonLandmarks > 0 || hasOdometry) {
|
||||
nrTotalConstraints = 2 * nrCommonLandmarks + (hasOdometry ? 6 : 0);
|
||||
reducedGraph.push_back(boost::make_shared<GenericFactor3D>(camera1, camera2,
|
||||
factorIndex++, NODE_POSE_3D, NODE_POSE_3D, nrTotalConstraints));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void checkSingularity(const GenericGraph3D& graph, const std::vector<size_t>& frontals,
|
||||
WorkSpace& workspace, const size_t minNrConstraintsPerCamera, const size_t minNrConstraintsPerLandmark) {
|
||||
workspace.prepareDictionary(frontals);
|
||||
vector<size_t> nrConstraints(workspace.dictionary.size(), 0);
|
||||
/* ************************************************************************* */
|
||||
void checkSingularity(const GenericGraph3D& graph, const std::vector<size_t>& frontals,
|
||||
WorkSpace& workspace, const size_t minNrConstraintsPerCamera, const size_t minNrConstraintsPerLandmark) {
|
||||
workspace.prepareDictionary(frontals);
|
||||
vector<size_t> nrConstraints(workspace.dictionary.size(), 0);
|
||||
|
||||
// summarize the constraint number
|
||||
const vector<int>& dictionary = workspace.dictionary;
|
||||
vector<bool> isValidCamera(workspace.dictionary.size(), false);
|
||||
vector<bool> isValidLandmark(workspace.dictionary.size(), false);
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
assert(factor_->key1.type == NODE_POSE_3D);
|
||||
//assert(factor_->key2.type == NODE_LANDMARK_3D);
|
||||
const size_t& key1 = factor_->key1.index;
|
||||
const size_t& key2 = factor_->key2.index;
|
||||
if (dictionary[key1] == -1 || dictionary[key2] == -1)
|
||||
continue;
|
||||
// summarize the constraint number
|
||||
const vector<int>& dictionary = workspace.dictionary;
|
||||
vector<bool> isValidCamera(workspace.dictionary.size(), false);
|
||||
vector<bool> isValidLandmark(workspace.dictionary.size(), false);
|
||||
BOOST_FOREACH(const sharedGenericFactor3D& factor_, graph) {
|
||||
assert(factor_->key1.type == NODE_POSE_3D);
|
||||
//assert(factor_->key2.type == NODE_LANDMARK_3D);
|
||||
const size_t& key1 = factor_->key1.index;
|
||||
const size_t& key2 = factor_->key2.index;
|
||||
if (dictionary[key1] == -1 || dictionary[key2] == -1)
|
||||
continue;
|
||||
|
||||
isValidCamera[key1] = true;
|
||||
if(factor_->key2.type == NODE_LANDMARK_3D)
|
||||
isValidLandmark[key2] = true;
|
||||
else
|
||||
isValidCamera[key2] = true;
|
||||
isValidCamera[key1] = true;
|
||||
if(factor_->key2.type == NODE_LANDMARK_3D)
|
||||
isValidLandmark[key2] = true;
|
||||
else
|
||||
isValidCamera[key2] = true;
|
||||
|
||||
nrConstraints[key1]++;
|
||||
nrConstraints[key2]++;
|
||||
nrConstraints[key1]++;
|
||||
nrConstraints[key2]++;
|
||||
|
||||
// a single pose constraint is sufficient for stereo, so we add 2 to the counter
|
||||
// for a total of 3, i.e. the same as 3 landmarks fully constraining the camera
|
||||
if(factor_->key1.type == NODE_POSE_3D && factor_->key2.type == NODE_POSE_3D){
|
||||
nrConstraints[key1]+=2;
|
||||
nrConstraints[key2]+=2;
|
||||
}
|
||||
}
|
||||
// a single pose constraint is sufficient for stereo, so we add 2 to the counter
|
||||
// for a total of 3, i.e. the same as 3 landmarks fully constraining the camera
|
||||
if(factor_->key1.type == NODE_POSE_3D && factor_->key2.type == NODE_POSE_3D){
|
||||
nrConstraints[key1]+=2;
|
||||
nrConstraints[key2]+=2;
|
||||
}
|
||||
}
|
||||
|
||||
// find the minimum constraint for cameras and landmarks
|
||||
size_t minFoundConstraintsPerCamera = 10000;
|
||||
size_t minFoundConstraintsPerLandmark = 10000;
|
||||
// find the minimum constraint for cameras and landmarks
|
||||
size_t minFoundConstraintsPerCamera = 10000;
|
||||
size_t minFoundConstraintsPerLandmark = 10000;
|
||||
|
||||
for (size_t i=0; i<isValidCamera.size(); i++) {
|
||||
if (isValidCamera[i]) {
|
||||
minFoundConstraintsPerCamera = std::min(nrConstraints[i], minFoundConstraintsPerCamera);
|
||||
if (nrConstraints[i] < minNrConstraintsPerCamera)
|
||||
cout << "!!!!!!!!!!!!!!!!!!! camera with " << nrConstraints[i] << " constraint: " << i << endl;
|
||||
}
|
||||
for (size_t i=0; i<isValidCamera.size(); i++) {
|
||||
if (isValidCamera[i]) {
|
||||
minFoundConstraintsPerCamera = std::min(nrConstraints[i], minFoundConstraintsPerCamera);
|
||||
if (nrConstraints[i] < minNrConstraintsPerCamera)
|
||||
cout << "!!!!!!!!!!!!!!!!!!! camera with " << nrConstraints[i] << " constraint: " << i << endl;
|
||||
}
|
||||
|
||||
}
|
||||
for (size_t j=0; j<isValidLandmark.size(); j++) {
|
||||
if (isValidLandmark[j]) {
|
||||
minFoundConstraintsPerLandmark = std::min(nrConstraints[j], minFoundConstraintsPerLandmark);
|
||||
if (nrConstraints[j] < minNrConstraintsPerLandmark)
|
||||
cout << "!!!!!!!!!!!!!!!!!!! landmark with " << nrConstraints[j] << " constraint: " << j << endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (size_t j=0; j<isValidLandmark.size(); j++) {
|
||||
if (isValidLandmark[j]) {
|
||||
minFoundConstraintsPerLandmark = std::min(nrConstraints[j], minFoundConstraintsPerLandmark);
|
||||
if (nrConstraints[j] < minNrConstraintsPerLandmark)
|
||||
cout << "!!!!!!!!!!!!!!!!!!! landmark with " << nrConstraints[j] << " constraint: " << j << endl;
|
||||
}
|
||||
}
|
||||
|
||||
// debug info
|
||||
BOOST_FOREACH(const size_t key, frontals) {
|
||||
if (isValidCamera[key] && nrConstraints[key] < minNrConstraintsPerCamera)
|
||||
cout << "singular camera:" << key << " with " << nrConstraints[key] << " constraints" << endl;
|
||||
}
|
||||
// debug info
|
||||
BOOST_FOREACH(const size_t key, frontals) {
|
||||
if (isValidCamera[key] && nrConstraints[key] < minNrConstraintsPerCamera)
|
||||
cout << "singular camera:" << key << " with " << nrConstraints[key] << " constraints" << endl;
|
||||
}
|
||||
|
||||
if (minFoundConstraintsPerCamera < minNrConstraintsPerCamera)
|
||||
throw runtime_error("checkSingularity:minConstraintsPerCamera < " + boost::lexical_cast<string>(minFoundConstraintsPerCamera));
|
||||
if (minFoundConstraintsPerLandmark < minNrConstraintsPerLandmark)
|
||||
throw runtime_error("checkSingularity:minConstraintsPerLandmark < " + boost::lexical_cast<string>(minFoundConstraintsPerLandmark));
|
||||
}
|
||||
if (minFoundConstraintsPerCamera < minNrConstraintsPerCamera)
|
||||
throw runtime_error("checkSingularity:minConstraintsPerCamera < " + boost::lexical_cast<string>(minFoundConstraintsPerCamera));
|
||||
if (minFoundConstraintsPerLandmark < minNrConstraintsPerLandmark)
|
||||
throw runtime_error("checkSingularity:minConstraintsPerLandmark < " + boost::lexical_cast<string>(minFoundConstraintsPerLandmark));
|
||||
}
|
||||
|
||||
}} // namespace
|
||||
|
|
|
@ -17,133 +17,133 @@
|
|||
|
||||
namespace gtsam { namespace partition {
|
||||
|
||||
/***************************************************
|
||||
* 2D generic factors and their factor graph
|
||||
***************************************************/
|
||||
enum GenericNode2DType { NODE_POSE_2D, NODE_LANDMARK_2D };
|
||||
/***************************************************
|
||||
* 2D generic factors and their factor graph
|
||||
***************************************************/
|
||||
enum GenericNode2DType { NODE_POSE_2D, NODE_LANDMARK_2D };
|
||||
|
||||
/** the index of the node and the type of the node */
|
||||
struct GenericNode2D {
|
||||
std::size_t index;
|
||||
GenericNode2DType type;
|
||||
GenericNode2D (const std::size_t& index_in, const GenericNode2DType& type_in) : index(index_in), type(type_in) {}
|
||||
};
|
||||
/** the index of the node and the type of the node */
|
||||
struct GenericNode2D {
|
||||
std::size_t index;
|
||||
GenericNode2DType type;
|
||||
GenericNode2D (const std::size_t& index_in, const GenericNode2DType& type_in) : index(index_in), type(type_in) {}
|
||||
};
|
||||
|
||||
/** a factor always involves two nodes/variables for now */
|
||||
struct GenericFactor2D {
|
||||
GenericNode2D key1;
|
||||
GenericNode2D key2;
|
||||
int index; // the factor index in the original nonlinear factor graph
|
||||
int weight; // the weight of the edge
|
||||
GenericFactor2D(const size_t index1, const GenericNode2DType type1, const size_t index2, const GenericNode2DType type2, const int index_ = -1, const int weight_ = 1)
|
||||
: key1(index1, type1), key2(index2, type2), index(index_), weight(weight_) {}
|
||||
GenericFactor2D(const size_t index1, const char type1, const size_t index2, const char type2, const int index_ = -1, const int weight_ = 1)
|
||||
: key1(index1, type1 == 'x' ? NODE_POSE_2D : NODE_LANDMARK_2D),
|
||||
key2(index2, type2 == 'x' ? NODE_POSE_2D : NODE_LANDMARK_2D), index(index_), weight(weight_) {}
|
||||
};
|
||||
/** a factor always involves two nodes/variables for now */
|
||||
struct GenericFactor2D {
|
||||
GenericNode2D key1;
|
||||
GenericNode2D key2;
|
||||
int index; // the factor index in the original nonlinear factor graph
|
||||
int weight; // the weight of the edge
|
||||
GenericFactor2D(const size_t index1, const GenericNode2DType type1, const size_t index2, const GenericNode2DType type2, const int index_ = -1, const int weight_ = 1)
|
||||
: key1(index1, type1), key2(index2, type2), index(index_), weight(weight_) {}
|
||||
GenericFactor2D(const size_t index1, const char type1, const size_t index2, const char type2, const int index_ = -1, const int weight_ = 1)
|
||||
: key1(index1, type1 == 'x' ? NODE_POSE_2D : NODE_LANDMARK_2D),
|
||||
key2(index2, type2 == 'x' ? NODE_POSE_2D : NODE_LANDMARK_2D), index(index_), weight(weight_) {}
|
||||
};
|
||||
|
||||
/** graph is a collection of factors */
|
||||
typedef boost::shared_ptr<GenericFactor2D> sharedGenericFactor2D;
|
||||
typedef std::vector<sharedGenericFactor2D> GenericGraph2D;
|
||||
/** graph is a collection of factors */
|
||||
typedef boost::shared_ptr<GenericFactor2D> sharedGenericFactor2D;
|
||||
typedef std::vector<sharedGenericFactor2D> GenericGraph2D;
|
||||
|
||||
/** merge nodes in DSF using constraints captured by the given graph */
|
||||
std::list<std::vector<size_t> > findIslands(const GenericGraph2D& graph, const std::vector<size_t>& keys, WorkSpace& workspace,
|
||||
const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark);
|
||||
/** merge nodes in DSF using constraints captured by the given graph */
|
||||
std::list<std::vector<size_t> > findIslands(const GenericGraph2D& graph, const std::vector<size_t>& keys, WorkSpace& workspace,
|
||||
const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark);
|
||||
|
||||
/** eliminate the sensors from generic graph */
|
||||
inline void reduceGenericGraph(const GenericGraph2D& graph, const std::vector<size_t>& cameraKeys, const std::vector<size_t>& landmarkKeys,
|
||||
const std::vector<int>& dictionary, GenericGraph2D& reducedGraph) {
|
||||
throw std::runtime_error("reduceGenericGraph 2d not implemented");
|
||||
}
|
||||
/** eliminate the sensors from generic graph */
|
||||
inline void reduceGenericGraph(const GenericGraph2D& graph, const std::vector<size_t>& cameraKeys, const std::vector<size_t>& landmarkKeys,
|
||||
const std::vector<int>& dictionary, GenericGraph2D& reducedGraph) {
|
||||
throw std::runtime_error("reduceGenericGraph 2d not implemented");
|
||||
}
|
||||
|
||||
/** check whether the 2D graph is singular (under constrained) , Dummy function for 2D */
|
||||
inline void checkSingularity(const GenericGraph2D& graph, const std::vector<size_t>& frontals,
|
||||
WorkSpace& workspace, const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark) { return; }
|
||||
/** check whether the 2D graph is singular (under constrained) , Dummy function for 2D */
|
||||
inline void checkSingularity(const GenericGraph2D& graph, const std::vector<size_t>& frontals,
|
||||
WorkSpace& workspace, const int minNrConstraintsPerCamera, const int minNrConstraintsPerLandmark) { return; }
|
||||
|
||||
/** print the graph **/
|
||||
void print(const GenericGraph2D& graph, const std::string name = "GenericGraph2D");
|
||||
/** print the graph **/
|
||||
void print(const GenericGraph2D& graph, const std::string name = "GenericGraph2D");
|
||||
|
||||
/***************************************************
|
||||
* 3D generic factors and their factor graph
|
||||
***************************************************/
|
||||
enum GenericNode3DType { NODE_POSE_3D, NODE_LANDMARK_3D };
|
||||
/***************************************************
|
||||
* 3D generic factors and their factor graph
|
||||
***************************************************/
|
||||
enum GenericNode3DType { NODE_POSE_3D, NODE_LANDMARK_3D };
|
||||
|
||||
// const int minNrConstraintsPerCamera = 7;
|
||||
// const int minNrConstraintsPerLandmark = 2;
|
||||
// const int minNrConstraintsPerCamera = 7;
|
||||
// const int minNrConstraintsPerLandmark = 2;
|
||||
|
||||
/** the index of the node and the type of the node */
|
||||
struct GenericNode3D {
|
||||
std::size_t index;
|
||||
GenericNode3DType type;
|
||||
GenericNode3D (const std::size_t& index_in, const GenericNode3DType& type_in) : index(index_in), type(type_in) {}
|
||||
};
|
||||
/** the index of the node and the type of the node */
|
||||
struct GenericNode3D {
|
||||
std::size_t index;
|
||||
GenericNode3DType type;
|
||||
GenericNode3D (const std::size_t& index_in, const GenericNode3DType& type_in) : index(index_in), type(type_in) {}
|
||||
};
|
||||
|
||||
/** a factor always involves two nodes/variables for now */
|
||||
struct GenericFactor3D {
|
||||
GenericNode3D key1;
|
||||
GenericNode3D key2;
|
||||
int index; // the index in the entire graph, 0-based
|
||||
int weight; // the weight of the edge
|
||||
GenericFactor3D() :key1(-1, NODE_POSE_3D), key2(-1, NODE_LANDMARK_3D), index(-1), weight(1) {}
|
||||
GenericFactor3D(const size_t index1, const size_t index2, const int index_ = -1,
|
||||
const GenericNode3DType type1 = NODE_POSE_3D, const GenericNode3DType type2 = NODE_LANDMARK_3D, const int weight_ = 1)
|
||||
: key1(index1, type1), key2(index2, type2), index(index_), weight(weight_) {}
|
||||
};
|
||||
/** a factor always involves two nodes/variables for now */
|
||||
struct GenericFactor3D {
|
||||
GenericNode3D key1;
|
||||
GenericNode3D key2;
|
||||
int index; // the index in the entire graph, 0-based
|
||||
int weight; // the weight of the edge
|
||||
GenericFactor3D() :key1(-1, NODE_POSE_3D), key2(-1, NODE_LANDMARK_3D), index(-1), weight(1) {}
|
||||
GenericFactor3D(const size_t index1, const size_t index2, const int index_ = -1,
|
||||
const GenericNode3DType type1 = NODE_POSE_3D, const GenericNode3DType type2 = NODE_LANDMARK_3D, const int weight_ = 1)
|
||||
: key1(index1, type1), key2(index2, type2), index(index_), weight(weight_) {}
|
||||
};
|
||||
|
||||
/** graph is a collection of factors */
|
||||
typedef boost::shared_ptr<GenericFactor3D> sharedGenericFactor3D;
|
||||
typedef std::vector<sharedGenericFactor3D> GenericGraph3D;
|
||||
/** graph is a collection of factors */
|
||||
typedef boost::shared_ptr<GenericFactor3D> sharedGenericFactor3D;
|
||||
typedef std::vector<sharedGenericFactor3D> GenericGraph3D;
|
||||
|
||||
/** merge nodes in DSF using constraints captured by the given graph */
|
||||
std::list<std::vector<size_t> > findIslands(const GenericGraph3D& graph, const std::vector<size_t>& keys, WorkSpace& workspace,
|
||||
const size_t minNrConstraintsPerCamera, const size_t minNrConstraintsPerLandmark);
|
||||
/** merge nodes in DSF using constraints captured by the given graph */
|
||||
std::list<std::vector<size_t> > findIslands(const GenericGraph3D& graph, const std::vector<size_t>& keys, WorkSpace& workspace,
|
||||
const size_t minNrConstraintsPerCamera, const size_t minNrConstraintsPerLandmark);
|
||||
|
||||
/** eliminate the sensors from generic graph */
|
||||
void reduceGenericGraph(const GenericGraph3D& graph, const std::vector<size_t>& cameraKeys, const std::vector<size_t>& landmarkKeys,
|
||||
const std::vector<int>& dictionary, GenericGraph3D& reducedGraph);
|
||||
/** eliminate the sensors from generic graph */
|
||||
void reduceGenericGraph(const GenericGraph3D& graph, const std::vector<size_t>& cameraKeys, const std::vector<size_t>& landmarkKeys,
|
||||
const std::vector<int>& dictionary, GenericGraph3D& reducedGraph);
|
||||
|
||||
/** check whether the 3D graph is singular (under constrained) */
|
||||
void checkSingularity(const GenericGraph3D& graph, const std::vector<size_t>& frontals,
|
||||
WorkSpace& workspace, const size_t minNrConstraintsPerCamera, const size_t minNrConstraintsPerLandmark);
|
||||
/** check whether the 3D graph is singular (under constrained) */
|
||||
void checkSingularity(const GenericGraph3D& graph, const std::vector<size_t>& frontals,
|
||||
WorkSpace& workspace, const size_t minNrConstraintsPerCamera, const size_t minNrConstraintsPerLandmark);
|
||||
|
||||
|
||||
/** print the graph **/
|
||||
void print(const GenericGraph3D& graph, const std::string name = "GenericGraph3D");
|
||||
/** print the graph **/
|
||||
void print(const GenericGraph3D& graph, const std::string name = "GenericGraph3D");
|
||||
|
||||
/***************************************************
|
||||
* unary generic factors and their factor graph
|
||||
***************************************************/
|
||||
/** a factor involves a single variable */
|
||||
struct GenericUnaryFactor {
|
||||
GenericNode2D key;
|
||||
int index; // the factor index in the original nonlinear factor graph
|
||||
GenericUnaryFactor(const size_t key_, const GenericNode2DType type_, const int index_ = -1)
|
||||
: key(key_, type_), index(index_) {}
|
||||
GenericUnaryFactor(const size_t key_, const char type_, const int index_ = -1)
|
||||
: key(key_, type_ == 'x' ? NODE_POSE_2D : NODE_LANDMARK_2D), index(index_) {}
|
||||
};
|
||||
/***************************************************
|
||||
* unary generic factors and their factor graph
|
||||
***************************************************/
|
||||
/** a factor involves a single variable */
|
||||
struct GenericUnaryFactor {
|
||||
GenericNode2D key;
|
||||
int index; // the factor index in the original nonlinear factor graph
|
||||
GenericUnaryFactor(const size_t key_, const GenericNode2DType type_, const int index_ = -1)
|
||||
: key(key_, type_), index(index_) {}
|
||||
GenericUnaryFactor(const size_t key_, const char type_, const int index_ = -1)
|
||||
: key(key_, type_ == 'x' ? NODE_POSE_2D : NODE_LANDMARK_2D), index(index_) {}
|
||||
};
|
||||
|
||||
/** graph is a collection of factors */
|
||||
typedef boost::shared_ptr<GenericUnaryFactor> sharedGenericUnaryFactor;
|
||||
typedef std::vector<sharedGenericUnaryFactor> GenericUnaryGraph;
|
||||
/** graph is a collection of factors */
|
||||
typedef boost::shared_ptr<GenericUnaryFactor> sharedGenericUnaryFactor;
|
||||
typedef std::vector<sharedGenericUnaryFactor> GenericUnaryGraph;
|
||||
|
||||
/***************************************************
|
||||
* utility functions
|
||||
***************************************************/
|
||||
inline bool hasCommonCamera(const std::set<size_t>& cameras1, const std::set<size_t>& cameras2) {
|
||||
if (cameras1.empty() || cameras2.empty())
|
||||
throw std::invalid_argument("hasCommonCamera: the input camera set is empty!");
|
||||
std::set<size_t>::const_iterator it1 = cameras1.begin();
|
||||
std::set<size_t>::const_iterator it2 = cameras2.begin();
|
||||
while (it1 != cameras1.end() && it2 != cameras2.end()) {
|
||||
if (*it1 == *it2)
|
||||
return true;
|
||||
else if (*it1 < *it2)
|
||||
it1++;
|
||||
else
|
||||
it2++;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/***************************************************
|
||||
* utility functions
|
||||
***************************************************/
|
||||
inline bool hasCommonCamera(const std::set<size_t>& cameras1, const std::set<size_t>& cameras2) {
|
||||
if (cameras1.empty() || cameras2.empty())
|
||||
throw std::invalid_argument("hasCommonCamera: the input camera set is empty!");
|
||||
std::set<size_t>::const_iterator it1 = cameras1.begin();
|
||||
std::set<size_t>::const_iterator it2 = cameras2.begin();
|
||||
while (it1 != cameras1.end() && it2 != cameras2.end()) {
|
||||
if (*it1 == *it2)
|
||||
return true;
|
||||
else if (*it1 < *it2)
|
||||
it1++;
|
||||
else
|
||||
it2++;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}} // namespace
|
||||
|
|
|
@ -16,236 +16,236 @@
|
|||
|
||||
namespace gtsam { namespace partition {
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
NestedDissection<NLG, SubNLG, GenericGraph>::NestedDissection(
|
||||
const NLG& fg, const Ordering& ordering, const int numNodeStopPartition, const int minNodesPerMap, const bool verbose) :
|
||||
fg_(fg), ordering_(ordering){
|
||||
GenericUnaryGraph unaryFactors;
|
||||
GenericGraph gfg;
|
||||
boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
NestedDissection<NLG, SubNLG, GenericGraph>::NestedDissection(
|
||||
const NLG& fg, const Ordering& ordering, const int numNodeStopPartition, const int minNodesPerMap, const bool verbose) :
|
||||
fg_(fg), ordering_(ordering){
|
||||
GenericUnaryGraph unaryFactors;
|
||||
GenericGraph gfg;
|
||||
boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
|
||||
// build reverse mapping from integer to symbol
|
||||
int numNodes = ordering.size();
|
||||
int2symbol_.resize(numNodes);
|
||||
Ordering::const_iterator it = ordering.begin(), itLast = ordering.end();
|
||||
while(it != itLast)
|
||||
int2symbol_[it->second] = (it++)->first;
|
||||
// build reverse mapping from integer to symbol
|
||||
int numNodes = ordering.size();
|
||||
int2symbol_.resize(numNodes);
|
||||
Ordering::const_iterator it = ordering.begin(), itLast = ordering.end();
|
||||
while(it != itLast)
|
||||
int2symbol_[it->second] = (it++)->first;
|
||||
|
||||
vector<size_t> keys;
|
||||
keys.reserve(numNodes);
|
||||
for(int i=0; i<ordering.size(); ++i)
|
||||
keys.push_back(i);
|
||||
vector<size_t> keys;
|
||||
keys.reserve(numNodes);
|
||||
for(int i=0; i<ordering.size(); ++i)
|
||||
keys.push_back(i);
|
||||
|
||||
WorkSpace workspace(numNodes);
|
||||
root_ = recursivePartition(gfg, unaryFactors, keys, vector<size_t>(), numNodeStopPartition, minNodesPerMap, boost::shared_ptr<SubNLG>(), workspace, verbose);
|
||||
}
|
||||
WorkSpace workspace(numNodes);
|
||||
root_ = recursivePartition(gfg, unaryFactors, keys, vector<size_t>(), numNodeStopPartition, minNodesPerMap, boost::shared_ptr<SubNLG>(), workspace, verbose);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
NestedDissection<NLG, SubNLG, GenericGraph>::NestedDissection(
|
||||
const NLG& fg, const Ordering& ordering, const boost::shared_ptr<Cuts>& cuts, const bool verbose) : fg_(fg), ordering_(ordering){
|
||||
GenericUnaryGraph unaryFactors;
|
||||
GenericGraph gfg;
|
||||
boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
NestedDissection<NLG, SubNLG, GenericGraph>::NestedDissection(
|
||||
const NLG& fg, const Ordering& ordering, const boost::shared_ptr<Cuts>& cuts, const bool verbose) : fg_(fg), ordering_(ordering){
|
||||
GenericUnaryGraph unaryFactors;
|
||||
GenericGraph gfg;
|
||||
boost::tie(unaryFactors, gfg) = fg.createGenericGraph(ordering);
|
||||
|
||||
// build reverse mapping from integer to symbol
|
||||
int numNodes = ordering.size();
|
||||
int2symbol_.resize(numNodes);
|
||||
Ordering::const_iterator it = ordering.begin(), itLast = ordering.end();
|
||||
while(it != itLast)
|
||||
int2symbol_[it->second] = (it++)->first;
|
||||
// build reverse mapping from integer to symbol
|
||||
int numNodes = ordering.size();
|
||||
int2symbol_.resize(numNodes);
|
||||
Ordering::const_iterator it = ordering.begin(), itLast = ordering.end();
|
||||
while(it != itLast)
|
||||
int2symbol_[it->second] = (it++)->first;
|
||||
|
||||
vector<size_t> keys;
|
||||
keys.reserve(numNodes);
|
||||
for(int i=0; i<ordering.size(); ++i)
|
||||
keys.push_back(i);
|
||||
vector<size_t> keys;
|
||||
keys.reserve(numNodes);
|
||||
for(int i=0; i<ordering.size(); ++i)
|
||||
keys.push_back(i);
|
||||
|
||||
WorkSpace workspace(numNodes);
|
||||
root_ = recursivePartition(gfg, unaryFactors, keys, vector<size_t>(), cuts, boost::shared_ptr<SubNLG>(), workspace, verbose);
|
||||
}
|
||||
WorkSpace workspace(numNodes);
|
||||
root_ = recursivePartition(gfg, unaryFactors, keys, vector<size_t>(), cuts, boost::shared_ptr<SubNLG>(), workspace, verbose);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
boost::shared_ptr<SubNLG> NestedDissection<NLG, SubNLG, GenericGraph>::makeSubNLG(
|
||||
const NLG& fg, const vector<size_t>& frontals, const vector<size_t>& sep, const boost::shared_ptr<SubNLG>& parent) const {
|
||||
OrderedSymbols frontalKeys;
|
||||
BOOST_FOREACH(const size_t index, frontals)
|
||||
frontalKeys.push_back(int2symbol_[index]);
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
boost::shared_ptr<SubNLG> NestedDissection<NLG, SubNLG, GenericGraph>::makeSubNLG(
|
||||
const NLG& fg, const vector<size_t>& frontals, const vector<size_t>& sep, const boost::shared_ptr<SubNLG>& parent) const {
|
||||
OrderedSymbols frontalKeys;
|
||||
BOOST_FOREACH(const size_t index, frontals)
|
||||
frontalKeys.push_back(int2symbol_[index]);
|
||||
|
||||
UnorderedSymbols sepKeys;
|
||||
BOOST_FOREACH(const size_t index, sep)
|
||||
sepKeys.insert(int2symbol_[index]);
|
||||
UnorderedSymbols sepKeys;
|
||||
BOOST_FOREACH(const size_t index, sep)
|
||||
sepKeys.insert(int2symbol_[index]);
|
||||
|
||||
return boost::make_shared<SubNLG>(fg, frontalKeys, sepKeys, parent);
|
||||
}
|
||||
return boost::make_shared<SubNLG>(fg, frontalKeys, sepKeys, parent);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
void NestedDissection<NLG, SubNLG, GenericGraph>::processFactor(
|
||||
const typename GenericGraph::value_type& factor, const std::vector<int>& partitionTable, // input
|
||||
vector<GenericGraph>& frontalFactors, NLG& sepFactors, vector<set<size_t> >& childSeps, // output factor graphs
|
||||
typename SubNLG::Weeklinks& weeklinks) const { // the links between child cliques
|
||||
list<size_t> sep_; // the separator variables involved in the current factor
|
||||
int partition1 = partitionTable[factor->key1.index];
|
||||
int partition2 = partitionTable[factor->key2.index];
|
||||
if (partition1 <= 0 && partition2 <= 0) { // is a factor in the current clique
|
||||
sepFactors.push_back(fg_[factor->index]);
|
||||
}
|
||||
else if (partition1 > 0 && partition2 > 0 && partition1 != partition2) { // is a weeklink (factor between two child cliques)
|
||||
weeklinks.push_back(fg_[factor->index]);
|
||||
}
|
||||
else if (partition1 > 0 && partition2 > 0 && partition1 == partition2) { // is a local factor in one of the child cliques
|
||||
frontalFactors[partition1 - 1].push_back(factor);
|
||||
}
|
||||
else { // is a joint factor in the child clique (involving varaibles in the current clique)
|
||||
if (partition1 > 0 && partition2 <= 0) {
|
||||
frontalFactors[partition1 - 1].push_back(factor);
|
||||
childSeps[partition1 - 1].insert(factor->key2.index);
|
||||
} else if (partition1 <= 0 && partition2 > 0) {
|
||||
frontalFactors[partition2 - 1].push_back(factor);
|
||||
childSeps[partition2 - 1].insert(factor->key1.index);
|
||||
} else
|
||||
throw runtime_error("processFactor: unexpected entries in the partition table!");
|
||||
}
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
void NestedDissection<NLG, SubNLG, GenericGraph>::processFactor(
|
||||
const typename GenericGraph::value_type& factor, const std::vector<int>& partitionTable, // input
|
||||
vector<GenericGraph>& frontalFactors, NLG& sepFactors, vector<set<size_t> >& childSeps, // output factor graphs
|
||||
typename SubNLG::Weeklinks& weeklinks) const { // the links between child cliques
|
||||
list<size_t> sep_; // the separator variables involved in the current factor
|
||||
int partition1 = partitionTable[factor->key1.index];
|
||||
int partition2 = partitionTable[factor->key2.index];
|
||||
if (partition1 <= 0 && partition2 <= 0) { // is a factor in the current clique
|
||||
sepFactors.push_back(fg_[factor->index]);
|
||||
}
|
||||
else if (partition1 > 0 && partition2 > 0 && partition1 != partition2) { // is a weeklink (factor between two child cliques)
|
||||
weeklinks.push_back(fg_[factor->index]);
|
||||
}
|
||||
else if (partition1 > 0 && partition2 > 0 && partition1 == partition2) { // is a local factor in one of the child cliques
|
||||
frontalFactors[partition1 - 1].push_back(factor);
|
||||
}
|
||||
else { // is a joint factor in the child clique (involving varaibles in the current clique)
|
||||
if (partition1 > 0 && partition2 <= 0) {
|
||||
frontalFactors[partition1 - 1].push_back(factor);
|
||||
childSeps[partition1 - 1].insert(factor->key2.index);
|
||||
} else if (partition1 <= 0 && partition2 > 0) {
|
||||
frontalFactors[partition2 - 1].push_back(factor);
|
||||
childSeps[partition2 - 1].insert(factor->key1.index);
|
||||
} else
|
||||
throw runtime_error("processFactor: unexpected entries in the partition table!");
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* given a factor graph and its partition {nodeMap}, split the factors between the child cliques ({frontalFactors})
|
||||
* and the current clique ({sepFactors}). Also split the variables between the child cliques ({childFrontals})
|
||||
* and the current clique ({localFrontals}). Those separator variables involved in {frontalFactors} are put into
|
||||
* the correspoding ordering in {childSeps}.
|
||||
*/
|
||||
// TODO: frontalFactors and localFrontals should be generated in findSeparator
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
void NestedDissection<NLG, SubNLG, GenericGraph>::partitionFactorsAndVariables(
|
||||
const GenericGraph& fg, const GenericUnaryGraph& unaryFactors, const std::vector<size_t>& keys, //input
|
||||
const std::vector<int>& partitionTable, const int numSubmaps, // input
|
||||
vector<GenericGraph>& frontalFactors, vector<GenericUnaryGraph>& frontalUnaryFactors, NLG& sepFactors, // output factor graphs
|
||||
vector<vector<size_t> >& childFrontals, vector<vector<size_t> >& childSeps, vector<size_t>& localFrontals, // output sub-orderings
|
||||
typename SubNLG::Weeklinks& weeklinks) const { // the links between child cliques
|
||||
/* ************************************************************************* */
|
||||
/**
|
||||
* given a factor graph and its partition {nodeMap}, split the factors between the child cliques ({frontalFactors})
|
||||
* and the current clique ({sepFactors}). Also split the variables between the child cliques ({childFrontals})
|
||||
* and the current clique ({localFrontals}). Those separator variables involved in {frontalFactors} are put into
|
||||
* the correspoding ordering in {childSeps}.
|
||||
*/
|
||||
// TODO: frontalFactors and localFrontals should be generated in findSeparator
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
void NestedDissection<NLG, SubNLG, GenericGraph>::partitionFactorsAndVariables(
|
||||
const GenericGraph& fg, const GenericUnaryGraph& unaryFactors, const std::vector<size_t>& keys, //input
|
||||
const std::vector<int>& partitionTable, const int numSubmaps, // input
|
||||
vector<GenericGraph>& frontalFactors, vector<GenericUnaryGraph>& frontalUnaryFactors, NLG& sepFactors, // output factor graphs
|
||||
vector<vector<size_t> >& childFrontals, vector<vector<size_t> >& childSeps, vector<size_t>& localFrontals, // output sub-orderings
|
||||
typename SubNLG::Weeklinks& weeklinks) const { // the links between child cliques
|
||||
|
||||
// make three lists of variables A, B, and C
|
||||
int partition;
|
||||
childFrontals.resize(numSubmaps);
|
||||
BOOST_FOREACH(const size_t key, keys){
|
||||
partition = partitionTable[key];
|
||||
switch (partition) {
|
||||
case -1: break; // the separator of the separator variables
|
||||
case 0: localFrontals.push_back(key); break; // the separator variables
|
||||
default: childFrontals[partition-1].push_back(key); // the frontal variables
|
||||
}
|
||||
}
|
||||
// make three lists of variables A, B, and C
|
||||
int partition;
|
||||
childFrontals.resize(numSubmaps);
|
||||
BOOST_FOREACH(const size_t key, keys){
|
||||
partition = partitionTable[key];
|
||||
switch (partition) {
|
||||
case -1: break; // the separator of the separator variables
|
||||
case 0: localFrontals.push_back(key); break; // the separator variables
|
||||
default: childFrontals[partition-1].push_back(key); // the frontal variables
|
||||
}
|
||||
}
|
||||
|
||||
// group the factors to {frontalFactors} and {sepFactors},and find the joint variables
|
||||
vector<set<size_t> > childSeps_;
|
||||
childSeps_.resize(numSubmaps);
|
||||
childSeps.reserve(numSubmaps);
|
||||
frontalFactors.resize(numSubmaps);
|
||||
frontalUnaryFactors.resize(numSubmaps);
|
||||
BOOST_FOREACH(typename GenericGraph::value_type factor, fg)
|
||||
processFactor(factor, partitionTable, frontalFactors, sepFactors, childSeps_, weeklinks);
|
||||
BOOST_FOREACH(const set<size_t>& childSep, childSeps_)
|
||||
childSeps.push_back(vector<size_t>(childSep.begin(), childSep.end()));
|
||||
// group the factors to {frontalFactors} and {sepFactors},and find the joint variables
|
||||
vector<set<size_t> > childSeps_;
|
||||
childSeps_.resize(numSubmaps);
|
||||
childSeps.reserve(numSubmaps);
|
||||
frontalFactors.resize(numSubmaps);
|
||||
frontalUnaryFactors.resize(numSubmaps);
|
||||
BOOST_FOREACH(typename GenericGraph::value_type factor, fg)
|
||||
processFactor(factor, partitionTable, frontalFactors, sepFactors, childSeps_, weeklinks);
|
||||
BOOST_FOREACH(const set<size_t>& childSep, childSeps_)
|
||||
childSeps.push_back(vector<size_t>(childSep.begin(), childSep.end()));
|
||||
|
||||
// add unary factor to the current cluster or pass it to one of the child clusters
|
||||
BOOST_FOREACH(const sharedGenericUnaryFactor& unaryFactor_, unaryFactors) {
|
||||
partition = partitionTable[unaryFactor_->key.index];
|
||||
if (!partition) sepFactors.push_back(fg_[unaryFactor_->index]);
|
||||
else frontalUnaryFactors[partition-1].push_back(unaryFactor_);
|
||||
}
|
||||
}
|
||||
// add unary factor to the current cluster or pass it to one of the child clusters
|
||||
BOOST_FOREACH(const sharedGenericUnaryFactor& unaryFactor_, unaryFactors) {
|
||||
partition = partitionTable[unaryFactor_->key.index];
|
||||
if (!partition) sepFactors.push_back(fg_[unaryFactor_->index]);
|
||||
else frontalUnaryFactors[partition-1].push_back(unaryFactor_);
|
||||
}
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
NLG NestedDissection<NLG, SubNLG, GenericGraph>::collectOriginalFactors(
|
||||
const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors) const {
|
||||
NLG sepFactors;
|
||||
typename GenericGraph::const_iterator it = gfg.begin(), itLast = gfg.end();
|
||||
while(it!=itLast) sepFactors.push_back(fg_[(*it++)->index]);
|
||||
BOOST_FOREACH(const sharedGenericUnaryFactor& unaryFactor_, unaryFactors)
|
||||
sepFactors.push_back(fg_[unaryFactor_->index]);
|
||||
return sepFactors;
|
||||
}
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
NLG NestedDissection<NLG, SubNLG, GenericGraph>::collectOriginalFactors(
|
||||
const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors) const {
|
||||
NLG sepFactors;
|
||||
typename GenericGraph::const_iterator it = gfg.begin(), itLast = gfg.end();
|
||||
while(it!=itLast) sepFactors.push_back(fg_[(*it++)->index]);
|
||||
BOOST_FOREACH(const sharedGenericUnaryFactor& unaryFactor_, unaryFactors)
|
||||
sepFactors.push_back(fg_[unaryFactor_->index]);
|
||||
return sepFactors;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
boost::shared_ptr<SubNLG> NestedDissection<NLG, SubNLG, GenericGraph>::recursivePartition(
|
||||
const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors, const vector<size_t>& frontals, const vector<size_t>& sep,
|
||||
const int numNodeStopPartition, const int minNodesPerMap, const boost::shared_ptr<SubNLG>& parent, WorkSpace& workspace, const bool verbose) const {
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
boost::shared_ptr<SubNLG> NestedDissection<NLG, SubNLG, GenericGraph>::recursivePartition(
|
||||
const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors, const vector<size_t>& frontals, const vector<size_t>& sep,
|
||||
const int numNodeStopPartition, const int minNodesPerMap, const boost::shared_ptr<SubNLG>& parent, WorkSpace& workspace, const bool verbose) const {
|
||||
|
||||
// if no split needed
|
||||
NLG sepFactors; // factors that should remain in the current cluster
|
||||
if (frontals.size() <= numNodeStopPartition || gfg.size() <= numNodeStopPartition) {
|
||||
sepFactors = collectOriginalFactors(gfg, unaryFactors);
|
||||
return makeSubNLG(sepFactors, frontals, sep, parent);
|
||||
}
|
||||
// if no split needed
|
||||
NLG sepFactors; // factors that should remain in the current cluster
|
||||
if (frontals.size() <= numNodeStopPartition || gfg.size() <= numNodeStopPartition) {
|
||||
sepFactors = collectOriginalFactors(gfg, unaryFactors);
|
||||
return makeSubNLG(sepFactors, frontals, sep, parent);
|
||||
}
|
||||
|
||||
// find the nested dissection separator
|
||||
int numSubmaps = findSeparator(gfg, frontals, minNodesPerMap, workspace, verbose, int2symbol_, NLG::reduceGraph(),
|
||||
NLG::minNrConstraintsPerCamera(),NLG::minNrConstraintsPerLandmark());
|
||||
partition::PartitionTable& partitionTable = workspace.partitionTable;
|
||||
if (numSubmaps == 0) throw runtime_error("recursivePartition: get zero submap after ND!");
|
||||
// find the nested dissection separator
|
||||
int numSubmaps = findSeparator(gfg, frontals, minNodesPerMap, workspace, verbose, int2symbol_, NLG::reduceGraph(),
|
||||
NLG::minNrConstraintsPerCamera(),NLG::minNrConstraintsPerLandmark());
|
||||
partition::PartitionTable& partitionTable = workspace.partitionTable;
|
||||
if (numSubmaps == 0) throw runtime_error("recursivePartition: get zero submap after ND!");
|
||||
|
||||
// split the factors between child cliques and the current clique
|
||||
vector<GenericGraph> frontalFactors; vector<GenericUnaryGraph> frontalUnaryFactors; typename SubNLG::Weeklinks weeklinks;
|
||||
vector<size_t> localFrontals; vector<vector<size_t> > childFrontals, childSeps;
|
||||
partitionFactorsAndVariables(gfg, unaryFactors, frontals, partitionTable, numSubmaps,
|
||||
frontalFactors, frontalUnaryFactors, sepFactors, childFrontals, childSeps, localFrontals, weeklinks);
|
||||
// split the factors between child cliques and the current clique
|
||||
vector<GenericGraph> frontalFactors; vector<GenericUnaryGraph> frontalUnaryFactors; typename SubNLG::Weeklinks weeklinks;
|
||||
vector<size_t> localFrontals; vector<vector<size_t> > childFrontals, childSeps;
|
||||
partitionFactorsAndVariables(gfg, unaryFactors, frontals, partitionTable, numSubmaps,
|
||||
frontalFactors, frontalUnaryFactors, sepFactors, childFrontals, childSeps, localFrontals, weeklinks);
|
||||
|
||||
// make a new cluster
|
||||
boost::shared_ptr<SubNLG> current = makeSubNLG(sepFactors, localFrontals, sep, parent);
|
||||
current->setWeeklinks(weeklinks);
|
||||
// make a new cluster
|
||||
boost::shared_ptr<SubNLG> current = makeSubNLG(sepFactors, localFrontals, sep, parent);
|
||||
current->setWeeklinks(weeklinks);
|
||||
|
||||
// check whether all the submaps are fully constrained
|
||||
for (int i=0; i<numSubmaps; i++) {
|
||||
checkSingularity(frontalFactors[i], childFrontals[i], workspace, NLG::minNrConstraintsPerCamera(),NLG::minNrConstraintsPerLandmark());
|
||||
}
|
||||
// check whether all the submaps are fully constrained
|
||||
for (int i=0; i<numSubmaps; i++) {
|
||||
checkSingularity(frontalFactors[i], childFrontals[i], workspace, NLG::minNrConstraintsPerCamera(),NLG::minNrConstraintsPerLandmark());
|
||||
}
|
||||
|
||||
// create child clusters
|
||||
for (int i=0; i<numSubmaps; i++) {
|
||||
boost::shared_ptr<SubNLG> child = recursivePartition(frontalFactors[i], frontalUnaryFactors[i], childFrontals[i], childSeps[i],
|
||||
numNodeStopPartition, minNodesPerMap, current, workspace, verbose);
|
||||
current->addChild(child);
|
||||
}
|
||||
// create child clusters
|
||||
for (int i=0; i<numSubmaps; i++) {
|
||||
boost::shared_ptr<SubNLG> child = recursivePartition(frontalFactors[i], frontalUnaryFactors[i], childFrontals[i], childSeps[i],
|
||||
numNodeStopPartition, minNodesPerMap, current, workspace, verbose);
|
||||
current->addChild(child);
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
return current;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
boost::shared_ptr<SubNLG> NestedDissection<NLG, SubNLG, GenericGraph>::recursivePartition(
|
||||
const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors, const vector<size_t>& frontals, const vector<size_t>& sep,
|
||||
const boost::shared_ptr<Cuts>& cuts, const boost::shared_ptr<SubNLG>& parent, WorkSpace& workspace, const bool verbose) const {
|
||||
/* ************************************************************************* */
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
boost::shared_ptr<SubNLG> NestedDissection<NLG, SubNLG, GenericGraph>::recursivePartition(
|
||||
const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors, const vector<size_t>& frontals, const vector<size_t>& sep,
|
||||
const boost::shared_ptr<Cuts>& cuts, const boost::shared_ptr<SubNLG>& parent, WorkSpace& workspace, const bool verbose) const {
|
||||
|
||||
// if there is no need to cut any more
|
||||
NLG sepFactors; // factors that should remain in the current cluster
|
||||
if (!cuts.get()) {
|
||||
sepFactors = collectOriginalFactors(gfg, unaryFactors);
|
||||
return makeSubNLG(sepFactors, frontals, sep, parent);
|
||||
}
|
||||
// if there is no need to cut any more
|
||||
NLG sepFactors; // factors that should remain in the current cluster
|
||||
if (!cuts.get()) {
|
||||
sepFactors = collectOriginalFactors(gfg, unaryFactors);
|
||||
return makeSubNLG(sepFactors, frontals, sep, parent);
|
||||
}
|
||||
|
||||
// retrieve the current partitioning info
|
||||
int numSubmaps = 2;
|
||||
partition::PartitionTable& partitionTable = cuts->partitionTable;
|
||||
// retrieve the current partitioning info
|
||||
int numSubmaps = 2;
|
||||
partition::PartitionTable& partitionTable = cuts->partitionTable;
|
||||
|
||||
// split the factors between child cliques and the current clique
|
||||
vector<GenericGraph> frontalFactors; vector<GenericUnaryGraph> frontalUnaryFactors; typename SubNLG::Weeklinks weeklinks;
|
||||
vector<size_t> localFrontals; vector<vector<size_t> > childFrontals, childSeps;
|
||||
partitionFactorsAndVariables(gfg, unaryFactors, frontals, partitionTable, numSubmaps,
|
||||
frontalFactors, frontalUnaryFactors, sepFactors, childFrontals, childSeps, localFrontals, weeklinks);
|
||||
// split the factors between child cliques and the current clique
|
||||
vector<GenericGraph> frontalFactors; vector<GenericUnaryGraph> frontalUnaryFactors; typename SubNLG::Weeklinks weeklinks;
|
||||
vector<size_t> localFrontals; vector<vector<size_t> > childFrontals, childSeps;
|
||||
partitionFactorsAndVariables(gfg, unaryFactors, frontals, partitionTable, numSubmaps,
|
||||
frontalFactors, frontalUnaryFactors, sepFactors, childFrontals, childSeps, localFrontals, weeklinks);
|
||||
|
||||
// make a new cluster
|
||||
boost::shared_ptr<SubNLG> current = makeSubNLG(sepFactors, localFrontals, sep, parent);
|
||||
current->setWeeklinks(weeklinks);
|
||||
// make a new cluster
|
||||
boost::shared_ptr<SubNLG> current = makeSubNLG(sepFactors, localFrontals, sep, parent);
|
||||
current->setWeeklinks(weeklinks);
|
||||
|
||||
// create child clusters
|
||||
for (int i=0; i<2; i++) {
|
||||
boost::shared_ptr<SubNLG> child = recursivePartition(frontalFactors[i], frontalUnaryFactors[i], childFrontals[i], childSeps[i],
|
||||
cuts->children.empty() ? boost::shared_ptr<Cuts>() : cuts->children[i], current, workspace, verbose);
|
||||
current->addChild(child);
|
||||
}
|
||||
return current;
|
||||
}
|
||||
// create child clusters
|
||||
for (int i=0; i<2; i++) {
|
||||
boost::shared_ptr<SubNLG> child = recursivePartition(frontalFactors[i], frontalUnaryFactors[i], childFrontals[i], childSeps[i],
|
||||
cuts->children.empty() ? boost::shared_ptr<Cuts>() : cuts->children[i], current, workspace, verbose);
|
||||
current->addChild(child);
|
||||
}
|
||||
return current;
|
||||
}
|
||||
}} //namespace
|
||||
|
|
|
@ -14,56 +14,56 @@
|
|||
|
||||
namespace gtsam { namespace partition {
|
||||
|
||||
/**
|
||||
* Apply nested dissection algorithm to nonlinear factor graphs
|
||||
*/
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
class NestedDissection {
|
||||
public:
|
||||
typedef boost::shared_ptr<SubNLG> sharedSubNLG;
|
||||
/**
|
||||
* Apply nested dissection algorithm to nonlinear factor graphs
|
||||
*/
|
||||
template <class NLG, class SubNLG, class GenericGraph>
|
||||
class NestedDissection {
|
||||
public:
|
||||
typedef boost::shared_ptr<SubNLG> sharedSubNLG;
|
||||
|
||||
private:
|
||||
NLG fg_; // the original nonlinear factor graph
|
||||
Ordering ordering_; // the variable ordering in the nonlinear factor graph
|
||||
std::vector<Symbol> int2symbol_; // the mapping from integer key to symbol
|
||||
sharedSubNLG root_; // the root of generated cluster tree
|
||||
private:
|
||||
NLG fg_; // the original nonlinear factor graph
|
||||
Ordering ordering_; // the variable ordering in the nonlinear factor graph
|
||||
std::vector<Symbol> int2symbol_; // the mapping from integer key to symbol
|
||||
sharedSubNLG root_; // the root of generated cluster tree
|
||||
|
||||
public:
|
||||
sharedSubNLG root() const { return root_; }
|
||||
public:
|
||||
sharedSubNLG root() const { return root_; }
|
||||
|
||||
public:
|
||||
/* constructor with post-determined partitoning*/
|
||||
NestedDissection(const NLG& fg, const Ordering& ordering, const int numNodeStopPartition, const int minNodesPerMap, const bool verbose = false);
|
||||
public:
|
||||
/* constructor with post-determined partitoning*/
|
||||
NestedDissection(const NLG& fg, const Ordering& ordering, const int numNodeStopPartition, const int minNodesPerMap, const bool verbose = false);
|
||||
|
||||
/* constructor with pre-determined cuts*/
|
||||
NestedDissection(const NLG& fg, const Ordering& ordering, const boost::shared_ptr<Cuts>& cuts, const bool verbose = false);
|
||||
/* constructor with pre-determined cuts*/
|
||||
NestedDissection(const NLG& fg, const Ordering& ordering, const boost::shared_ptr<Cuts>& cuts, const bool verbose = false);
|
||||
|
||||
private:
|
||||
/* convert generic subgraph to nonlinear subgraph */
|
||||
sharedSubNLG makeSubNLG(const NLG& fg, const std::vector<size_t>& frontals, const std::vector<size_t>& sep, const boost::shared_ptr<SubNLG>& parent) const;
|
||||
private:
|
||||
/* convert generic subgraph to nonlinear subgraph */
|
||||
sharedSubNLG makeSubNLG(const NLG& fg, const std::vector<size_t>& frontals, const std::vector<size_t>& sep, const boost::shared_ptr<SubNLG>& parent) const;
|
||||
|
||||
void processFactor(const typename GenericGraph::value_type& factor, const std::vector<int>& partitionTable, // input
|
||||
std::vector<GenericGraph>& frontalFactors, NLG& sepFactors, std::vector<std::set<size_t> >& childSeps, // output factor graphs
|
||||
typename SubNLG::Weeklinks& weeklinks) const;
|
||||
void processFactor(const typename GenericGraph::value_type& factor, const std::vector<int>& partitionTable, // input
|
||||
std::vector<GenericGraph>& frontalFactors, NLG& sepFactors, std::vector<std::set<size_t> >& childSeps, // output factor graphs
|
||||
typename SubNLG::Weeklinks& weeklinks) const;
|
||||
|
||||
/* recursively partition the generic graph */
|
||||
void partitionFactorsAndVariables(
|
||||
const GenericGraph& fg, const GenericUnaryGraph& unaryFactors,
|
||||
const std::vector<size_t>& keys, const std::vector<int>& partitionTable, const int numSubmaps, // input
|
||||
std::vector<GenericGraph>& frontalFactors, vector<GenericUnaryGraph>& frontalUnaryFactors, NLG& sepFactors, // output factor graphs
|
||||
std::vector<std::vector<size_t> >& childFrontals, std::vector<std::vector<size_t> >& childSeps, std::vector<size_t>& localFrontals, // output sub-orderings
|
||||
typename SubNLG::Weeklinks& weeklinks) const;
|
||||
/* recursively partition the generic graph */
|
||||
void partitionFactorsAndVariables(
|
||||
const GenericGraph& fg, const GenericUnaryGraph& unaryFactors,
|
||||
const std::vector<size_t>& keys, const std::vector<int>& partitionTable, const int numSubmaps, // input
|
||||
std::vector<GenericGraph>& frontalFactors, vector<GenericUnaryGraph>& frontalUnaryFactors, NLG& sepFactors, // output factor graphs
|
||||
std::vector<std::vector<size_t> >& childFrontals, std::vector<std::vector<size_t> >& childSeps, std::vector<size_t>& localFrontals, // output sub-orderings
|
||||
typename SubNLG::Weeklinks& weeklinks) const;
|
||||
|
||||
NLG collectOriginalFactors(const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors) const;
|
||||
NLG collectOriginalFactors(const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors) const;
|
||||
|
||||
/* recursively partition the generic graph */
|
||||
sharedSubNLG recursivePartition(const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors, const std::vector<size_t>& frontals, const std::vector<size_t>& sep,
|
||||
const int numNodeStopPartition, const int minNodesPerMap, const boost::shared_ptr<SubNLG>& parent, WorkSpace& workspace, const bool verbose) const;
|
||||
/* recursively partition the generic graph */
|
||||
sharedSubNLG recursivePartition(const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors, const std::vector<size_t>& frontals, const std::vector<size_t>& sep,
|
||||
const int numNodeStopPartition, const int minNodesPerMap, const boost::shared_ptr<SubNLG>& parent, WorkSpace& workspace, const bool verbose) const;
|
||||
|
||||
/* recursively partition the generic graph */
|
||||
sharedSubNLG recursivePartition(const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors, const std::vector<size_t>& frontals, const std::vector<size_t>& sep,
|
||||
const boost::shared_ptr<Cuts>& cuts, const boost::shared_ptr<SubNLG>& parent, WorkSpace& workspace, const bool verbose) const;
|
||||
/* recursively partition the generic graph */
|
||||
sharedSubNLG recursivePartition(const GenericGraph& gfg, const GenericUnaryGraph& unaryFactors, const std::vector<size_t>& frontals, const std::vector<size_t>& sep,
|
||||
const boost::shared_ptr<Cuts>& cuts, const boost::shared_ptr<SubNLG>& parent, WorkSpace& workspace, const bool verbose) const;
|
||||
|
||||
};
|
||||
};
|
||||
|
||||
}} //namespace
|
||||
|
|
|
@ -13,32 +13,32 @@
|
|||
|
||||
namespace gtsam { namespace partition {
|
||||
|
||||
typedef std::vector<int> PartitionTable;
|
||||
typedef std::vector<int> PartitionTable;
|
||||
|
||||
// the work space, preallocated memory
|
||||
struct WorkSpace {
|
||||
std::vector<int> dictionary; // a mapping from the integer key in the original graph to 0-based index in the subgraph, useful when handling a subset of keys and graphs
|
||||
boost::shared_ptr<std::vector<size_t> > dsf; // a block memory pre-allocated for DSFVector
|
||||
PartitionTable partitionTable; // a mapping from a key to the submap index, 0 means the separator, i means the ith submap
|
||||
// the work space, preallocated memory
|
||||
struct WorkSpace {
|
||||
std::vector<int> dictionary; // a mapping from the integer key in the original graph to 0-based index in the subgraph, useful when handling a subset of keys and graphs
|
||||
boost::shared_ptr<std::vector<size_t> > dsf; // a block memory pre-allocated for DSFVector
|
||||
PartitionTable partitionTable; // a mapping from a key to the submap index, 0 means the separator, i means the ith submap
|
||||
|
||||
// constructor
|
||||
WorkSpace(const size_t numNodes) : dictionary(numNodes,0),
|
||||
dsf(new std::vector<size_t>(numNodes, 0)), partitionTable(numNodes, -1) { }
|
||||
// constructor
|
||||
WorkSpace(const size_t numNodes) : dictionary(numNodes,0),
|
||||
dsf(new std::vector<size_t>(numNodes, 0)), partitionTable(numNodes, -1) { }
|
||||
|
||||
// set up dictionary: -1: no such key, none-zero: the corresponding 0-based index
|
||||
inline void prepareDictionary(const std::vector<size_t>& keys) {
|
||||
int index = 0;
|
||||
std::fill(dictionary.begin(), dictionary.end(), -1);
|
||||
std::vector<size_t>::const_iterator it=keys.begin(), itLast=keys.end();
|
||||
while(it!=itLast) dictionary[*(it++)] = index++;
|
||||
}
|
||||
};
|
||||
// set up dictionary: -1: no such key, none-zero: the corresponding 0-based index
|
||||
inline void prepareDictionary(const std::vector<size_t>& keys) {
|
||||
int index = 0;
|
||||
std::fill(dictionary.begin(), dictionary.end(), -1);
|
||||
std::vector<size_t>::const_iterator it=keys.begin(), itLast=keys.end();
|
||||
while(it!=itLast) dictionary[*(it++)] = index++;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// manually defined cuts
|
||||
struct Cuts {
|
||||
PartitionTable partitionTable;
|
||||
std::vector<boost::shared_ptr<Cuts> > children;
|
||||
};
|
||||
// manually defined cuts
|
||||
struct Cuts {
|
||||
PartitionTable partitionTable;
|
||||
std::vector<boost::shared_ptr<Cuts> > children;
|
||||
};
|
||||
|
||||
}} // namespace
|
||||
|
|
|
@ -185,45 +185,45 @@ TEST ( Partition, findSeparator2 )
|
|||
// x25 x26 x27 x28
|
||||
TEST ( Partition, findSeparator3_with_reduced_camera )
|
||||
{
|
||||
GenericGraph3D graph;
|
||||
for (int j=1; j<=8; j++)
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(25, j));
|
||||
for (int j=1; j<=16; j++)
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(26, j));
|
||||
for (int j=9; j<=24; j++)
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(27, j));
|
||||
for (int j=17; j<=24; j++)
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(28, j));
|
||||
GenericGraph3D graph;
|
||||
for (int j=1; j<=8; j++)
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(25, j));
|
||||
for (int j=1; j<=16; j++)
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(26, j));
|
||||
for (int j=9; j<=24; j++)
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(27, j));
|
||||
for (int j=17; j<=24; j++)
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(28, j));
|
||||
|
||||
std::vector<size_t> keys;
|
||||
for(int i=1; i<=28; i++)
|
||||
keys.push_back(i);
|
||||
std::vector<size_t> keys;
|
||||
for(int i=1; i<=28; i++)
|
||||
keys.push_back(i);
|
||||
|
||||
vector<Symbol> int2symbol;
|
||||
int2symbol.push_back(Symbol('x',0)); // dummy
|
||||
for(int i=1; i<=24; i++)
|
||||
int2symbol.push_back(Symbol('l',i));
|
||||
int2symbol.push_back(Symbol('x',25));
|
||||
int2symbol.push_back(Symbol('x',26));
|
||||
int2symbol.push_back(Symbol('x',27));
|
||||
int2symbol.push_back(Symbol('x',28));
|
||||
vector<Symbol> int2symbol;
|
||||
int2symbol.push_back(Symbol('x',0)); // dummy
|
||||
for(int i=1; i<=24; i++)
|
||||
int2symbol.push_back(Symbol('l',i));
|
||||
int2symbol.push_back(Symbol('x',25));
|
||||
int2symbol.push_back(Symbol('x',26));
|
||||
int2symbol.push_back(Symbol('x',27));
|
||||
int2symbol.push_back(Symbol('x',28));
|
||||
|
||||
WorkSpace workspace(29);
|
||||
bool reduceGraph = true;
|
||||
int numIsland = findSeparator(graph, keys, 3, workspace, false, int2symbol, reduceGraph, 0, 0);
|
||||
LONGS_EQUAL(2, numIsland);
|
||||
WorkSpace workspace(29);
|
||||
bool reduceGraph = true;
|
||||
int numIsland = findSeparator(graph, keys, 3, workspace, false, int2symbol, reduceGraph, 0, 0);
|
||||
LONGS_EQUAL(2, numIsland);
|
||||
|
||||
partition::PartitionTable& partitionTable = workspace.partitionTable;
|
||||
for (int j=1; j<=8; j++)
|
||||
LONGS_EQUAL(1, partitionTable[j]);
|
||||
for (int j=9; j<=16; j++)
|
||||
LONGS_EQUAL(0, partitionTable[j]);
|
||||
for (int j=17; j<=24; j++)
|
||||
LONGS_EQUAL(2, partitionTable[j]);
|
||||
LONGS_EQUAL(1, partitionTable[25]);
|
||||
LONGS_EQUAL(1, partitionTable[26]);
|
||||
LONGS_EQUAL(2, partitionTable[27]);
|
||||
LONGS_EQUAL(2, partitionTable[28]);
|
||||
partition::PartitionTable& partitionTable = workspace.partitionTable;
|
||||
for (int j=1; j<=8; j++)
|
||||
LONGS_EQUAL(1, partitionTable[j]);
|
||||
for (int j=9; j<=16; j++)
|
||||
LONGS_EQUAL(0, partitionTable[j]);
|
||||
for (int j=17; j<=24; j++)
|
||||
LONGS_EQUAL(2, partitionTable[j]);
|
||||
LONGS_EQUAL(1, partitionTable[25]);
|
||||
LONGS_EQUAL(1, partitionTable[26]);
|
||||
LONGS_EQUAL(2, partitionTable[27]);
|
||||
LONGS_EQUAL(2, partitionTable[28]);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -29,29 +29,29 @@ using namespace gtsam::partition;
|
|||
*/
|
||||
TEST ( GenerciGraph, findIslands )
|
||||
{
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 9, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 9, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(6, NODE_POSE_2D, 9, NODE_LANDMARK_2D));
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 9, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 9, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(6, NODE_POSE_2D, 9, NODE_LANDMARK_2D));
|
||||
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 2, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 5, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 6, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5, 6, 7, 8, 9;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 2, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 5, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 6, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5, 6, 7, 8, 9;
|
||||
|
||||
WorkSpace workspace(10); // from 0 to 9
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 2, 3, 7, 8;
|
||||
vector<size_t> island2; island2 += 4, 5, 6, 9;
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
WorkSpace workspace(10); // from 0 to 9
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 2, 3, 7, 8;
|
||||
vector<size_t> island2; island2 += 4, 5, 6, 9;
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -62,27 +62,27 @@ TEST ( GenerciGraph, findIslands )
|
|||
*/
|
||||
TEST( GenerciGraph, findIslands2 )
|
||||
{
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(6, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(6, NODE_POSE_2D, 8, NODE_LANDMARK_2D));
|
||||
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 2, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 5, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 6, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5, 6, 7, 8;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 2, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 5, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(5, NODE_POSE_2D, 6, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5, 6, 7, 8;
|
||||
|
||||
WorkSpace workspace(15); // from 0 to 8, but testing over-allocation here
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(1, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 2, 3, 4, 5, 6, 7, 8;
|
||||
CHECK(island1 == islands.front());
|
||||
WorkSpace workspace(15); // from 0 to 8, but testing over-allocation here
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(1, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 2, 3, 4, 5, 6, 7, 8;
|
||||
CHECK(island1 == islands.front());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -92,21 +92,21 @@ TEST( GenerciGraph, findIslands2 )
|
|||
*/
|
||||
TEST ( GenerciGraph, findIslands3 )
|
||||
{
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 6, NODE_LANDMARK_2D));
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 6, NODE_LANDMARK_2D));
|
||||
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 4, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5, 6;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 4, NODE_POSE_2D));
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5, 6;
|
||||
|
||||
WorkSpace workspace(7); // from 0 to 9
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 5;
|
||||
vector<size_t> island2; island2 += 2, 3, 4, 6;
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
WorkSpace workspace(7); // from 0 to 9
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 5;
|
||||
vector<size_t> island2; island2 += 2, 3, 4, 6;
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -115,18 +115,18 @@ TEST ( GenerciGraph, findIslands3 )
|
|||
*/
|
||||
TEST ( GenerciGraph, findIslands4 )
|
||||
{
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 4, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(7, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
std::vector<size_t> keys; keys += 3, 4, 7;
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 4, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(7, NODE_POSE_2D, 7, NODE_LANDMARK_2D));
|
||||
std::vector<size_t> keys; keys += 3, 4, 7;
|
||||
|
||||
WorkSpace workspace(8); // from 0 to 7
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 3, 4;
|
||||
vector<size_t> island2; island2 += 7;
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
WorkSpace workspace(8); // from 0 to 7
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 3, 4;
|
||||
vector<size_t> island2; island2 += 7;
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -137,24 +137,24 @@ TEST ( GenerciGraph, findIslands4 )
|
|||
*/
|
||||
TEST ( GenerciGraph, findIslands5 )
|
||||
{
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
GenericGraph2D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(3, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(4, NODE_POSE_2D, 5, NODE_LANDMARK_2D));
|
||||
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 4, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(1, NODE_POSE_2D, 3, NODE_POSE_2D));
|
||||
graph.push_back(boost::make_shared<GenericFactor2D>(2, NODE_POSE_2D, 4, NODE_POSE_2D));
|
||||
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5;
|
||||
std::vector<size_t> keys; keys += 1, 2, 3, 4, 5;
|
||||
|
||||
WorkSpace workspace(6); // from 0 to 5
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 3, 5;
|
||||
vector<size_t> island2; island2 += 2, 4;
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
WorkSpace workspace(6); // from 0 to 5
|
||||
list<vector<size_t> > islands = findIslands(graph, keys, workspace, 7, 2);
|
||||
LONGS_EQUAL(2, islands.size());
|
||||
vector<size_t> island1; island1 += 1, 3, 5;
|
||||
vector<size_t> island2; island2 += 2, 4;
|
||||
CHECK(island1 == islands.front());
|
||||
CHECK(island2 == islands.back());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -165,31 +165,31 @@ TEST ( GenerciGraph, findIslands5 )
|
|||
*/
|
||||
TEST ( GenerciGraph, reduceGenericGraph )
|
||||
{
|
||||
GenericGraph3D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 3));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 4));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 5));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 5));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 6));
|
||||
GenericGraph3D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 3));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 4));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 5));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 5));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 6));
|
||||
|
||||
std::vector<size_t> cameraKeys, landmarkKeys;
|
||||
cameraKeys.push_back(1);
|
||||
cameraKeys.push_back(2);
|
||||
landmarkKeys.push_back(3);
|
||||
landmarkKeys.push_back(4);
|
||||
landmarkKeys.push_back(5);
|
||||
landmarkKeys.push_back(6);
|
||||
std::vector<size_t> cameraKeys, landmarkKeys;
|
||||
cameraKeys.push_back(1);
|
||||
cameraKeys.push_back(2);
|
||||
landmarkKeys.push_back(3);
|
||||
landmarkKeys.push_back(4);
|
||||
landmarkKeys.push_back(5);
|
||||
landmarkKeys.push_back(6);
|
||||
|
||||
std::vector<int> dictionary;
|
||||
dictionary.resize(7, -1); // from 0 to 6
|
||||
dictionary[1] = 0;
|
||||
dictionary[2] = 1;
|
||||
std::vector<int> dictionary;
|
||||
dictionary.resize(7, -1); // from 0 to 6
|
||||
dictionary[1] = 0;
|
||||
dictionary[2] = 1;
|
||||
|
||||
GenericGraph3D reduced;
|
||||
std::map<size_t, vector<size_t> > cameraToLandmarks;
|
||||
reduceGenericGraph(graph, cameraKeys, landmarkKeys, dictionary, reduced);
|
||||
LONGS_EQUAL(1, reduced.size());
|
||||
LONGS_EQUAL(1, reduced[0]->key1.index); LONGS_EQUAL(2, reduced[0]->key2.index);
|
||||
GenericGraph3D reduced;
|
||||
std::map<size_t, vector<size_t> > cameraToLandmarks;
|
||||
reduceGenericGraph(graph, cameraKeys, landmarkKeys, dictionary, reduced);
|
||||
LONGS_EQUAL(1, reduced.size());
|
||||
LONGS_EQUAL(1, reduced[0]->key1.index); LONGS_EQUAL(2, reduced[0]->key2.index);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -200,53 +200,53 @@ TEST ( GenerciGraph, reduceGenericGraph )
|
|||
*/
|
||||
TEST ( GenericGraph, reduceGenericGraph2 )
|
||||
{
|
||||
GenericGraph3D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 3, 0, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 4, 1, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 5, 2, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 5, 3, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 6, 4, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 7, 5, NODE_POSE_3D, NODE_POSE_3D));
|
||||
GenericGraph3D graph;
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 3, 0, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 4, 1, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(1, 5, 2, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 5, 3, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 6, 4, NODE_POSE_3D, NODE_LANDMARK_3D));
|
||||
graph.push_back(boost::make_shared<GenericFactor3D>(2, 7, 5, NODE_POSE_3D, NODE_POSE_3D));
|
||||
|
||||
std::vector<size_t> cameraKeys, landmarkKeys;
|
||||
cameraKeys.push_back(1);
|
||||
cameraKeys.push_back(2);
|
||||
cameraKeys.push_back(7);
|
||||
landmarkKeys.push_back(3);
|
||||
landmarkKeys.push_back(4);
|
||||
landmarkKeys.push_back(5);
|
||||
landmarkKeys.push_back(6);
|
||||
std::vector<size_t> cameraKeys, landmarkKeys;
|
||||
cameraKeys.push_back(1);
|
||||
cameraKeys.push_back(2);
|
||||
cameraKeys.push_back(7);
|
||||
landmarkKeys.push_back(3);
|
||||
landmarkKeys.push_back(4);
|
||||
landmarkKeys.push_back(5);
|
||||
landmarkKeys.push_back(6);
|
||||
|
||||
std::vector<int> dictionary;
|
||||
dictionary.resize(8, -1); // from 0 to 7
|
||||
dictionary[1] = 0;
|
||||
dictionary[2] = 1;
|
||||
dictionary[7] = 6;
|
||||
std::vector<int> dictionary;
|
||||
dictionary.resize(8, -1); // from 0 to 7
|
||||
dictionary[1] = 0;
|
||||
dictionary[2] = 1;
|
||||
dictionary[7] = 6;
|
||||
|
||||
GenericGraph3D reduced;
|
||||
std::map<size_t, vector<size_t> > cameraToLandmarks;
|
||||
reduceGenericGraph(graph, cameraKeys, landmarkKeys, dictionary, reduced);
|
||||
LONGS_EQUAL(2, reduced.size());
|
||||
LONGS_EQUAL(1, reduced[0]->key1.index); LONGS_EQUAL(2, reduced[0]->key2.index);
|
||||
LONGS_EQUAL(2, reduced[1]->key1.index); LONGS_EQUAL(7, reduced[1]->key2.index);
|
||||
GenericGraph3D reduced;
|
||||
std::map<size_t, vector<size_t> > cameraToLandmarks;
|
||||
reduceGenericGraph(graph, cameraKeys, landmarkKeys, dictionary, reduced);
|
||||
LONGS_EQUAL(2, reduced.size());
|
||||
LONGS_EQUAL(1, reduced[0]->key1.index); LONGS_EQUAL(2, reduced[0]->key2.index);
|
||||
LONGS_EQUAL(2, reduced[1]->key1.index); LONGS_EQUAL(7, reduced[1]->key2.index);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST ( GenerciGraph, hasCommonCamera )
|
||||
{
|
||||
std::set<size_t> cameras1; cameras1 += 1, 2, 3, 4, 5;
|
||||
std::set<size_t> cameras2; cameras2 += 8, 7, 6, 5;
|
||||
bool actual = hasCommonCamera(cameras1, cameras2);
|
||||
CHECK(actual);
|
||||
std::set<size_t> cameras1; cameras1 += 1, 2, 3, 4, 5;
|
||||
std::set<size_t> cameras2; cameras2 += 8, 7, 6, 5;
|
||||
bool actual = hasCommonCamera(cameras1, cameras2);
|
||||
CHECK(actual);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
TEST ( GenerciGraph, hasCommonCamera2 )
|
||||
{
|
||||
std::set<size_t> cameras1; cameras1 += 1, 3, 5, 7;
|
||||
std::set<size_t> cameras2; cameras2 += 2, 4, 6, 8, 10;
|
||||
bool actual = hasCommonCamera(cameras1, cameras2);
|
||||
CHECK(!actual);
|
||||
std::set<size_t> cameras1; cameras1 += 1, 3, 5, 7;
|
||||
std::set<size_t> cameras2; cameras2 += 2, 4, 6, 8, 10;
|
||||
bool actual = hasCommonCamera(cameras1, cameras2);
|
||||
CHECK(!actual);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -32,22 +32,22 @@ using namespace gtsam::partition;
|
|||
// l1
|
||||
TEST ( NestedDissection, oneIsland )
|
||||
{
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
Graph fg;
|
||||
fg.addOdometry(1, 2, Pose2(), odoNoise);
|
||||
fg.addBearingRange(1, 1, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addBearingRange(2, 1, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addPoseConstraint(1, Pose2());
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
Graph fg;
|
||||
fg.addOdometry(1, 2, Pose2(), odoNoise);
|
||||
fg.addBearingRange(1, 1, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addBearingRange(2, 1, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addPoseConstraint(1, Pose2());
|
||||
|
||||
Ordering ordering; ordering += x1, x2, l1;
|
||||
Ordering ordering; ordering += x1, x2, l1;
|
||||
|
||||
int numNodeStopPartition = 1e3;
|
||||
int minNodesPerMap = 1e3;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
LONGS_EQUAL(4, nd.root()->size());
|
||||
LONGS_EQUAL(3, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->children().size());
|
||||
int numNodeStopPartition = 1e3;
|
||||
int minNodesPerMap = 1e3;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
LONGS_EQUAL(4, nd.root()->size());
|
||||
LONGS_EQUAL(3, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->children().size());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -56,35 +56,35 @@ TEST ( NestedDissection, oneIsland )
|
|||
// x2/ \x5
|
||||
TEST ( NestedDissection, TwoIslands )
|
||||
{
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
Graph fg;
|
||||
fg.addOdometry(1, 2, Pose2(), odoNoise);
|
||||
fg.addOdometry(1, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(2, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 4, Pose2(), odoNoise);
|
||||
fg.addOdometry(4, 5, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 5, Pose2(), odoNoise);
|
||||
fg.addPoseConstraint(1, Pose2());
|
||||
fg.addPoseConstraint(4, Pose2());
|
||||
Ordering ordering; ordering += x1, x2, x3, x4, x5;
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
Graph fg;
|
||||
fg.addOdometry(1, 2, Pose2(), odoNoise);
|
||||
fg.addOdometry(1, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(2, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 4, Pose2(), odoNoise);
|
||||
fg.addOdometry(4, 5, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 5, Pose2(), odoNoise);
|
||||
fg.addPoseConstraint(1, Pose2());
|
||||
fg.addPoseConstraint(4, Pose2());
|
||||
Ordering ordering; ordering += x1, x2, x3, x4, x5;
|
||||
|
||||
int numNodeStopPartition = 2;
|
||||
int minNodesPerMap = 1;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
// root submap
|
||||
LONGS_EQUAL(0, nd.root()->size());
|
||||
LONGS_EQUAL(1, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(2, nd.root()->children().size()); // 2 leaf submaps
|
||||
int numNodeStopPartition = 2;
|
||||
int minNodesPerMap = 1;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
// root submap
|
||||
LONGS_EQUAL(0, nd.root()->size());
|
||||
LONGS_EQUAL(1, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(2, nd.root()->children().size()); // 2 leaf submaps
|
||||
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->frontal().size());
|
||||
LONGS_EQUAL(4, nd.root()->children()[0]->size());
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->frontal().size());
|
||||
LONGS_EQUAL(4, nd.root()->children()[0]->size());
|
||||
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->frontal().size());
|
||||
LONGS_EQUAL(4, nd.root()->children()[1]->size());
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->frontal().size());
|
||||
LONGS_EQUAL(4, nd.root()->children()[1]->size());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -93,40 +93,40 @@ TEST ( NestedDissection, TwoIslands )
|
|||
// x2/ \x5
|
||||
TEST ( NestedDissection, FourIslands )
|
||||
{
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
Graph fg;
|
||||
fg.addOdometry(1, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(2, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 4, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 5, Pose2(), odoNoise);
|
||||
fg.addPoseConstraint(1, Pose2());
|
||||
fg.addPoseConstraint(4, Pose2());
|
||||
Ordering ordering; ordering += x1, x2, x3, x4, x5;
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
Graph fg;
|
||||
fg.addOdometry(1, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(2, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 4, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 5, Pose2(), odoNoise);
|
||||
fg.addPoseConstraint(1, Pose2());
|
||||
fg.addPoseConstraint(4, Pose2());
|
||||
Ordering ordering; ordering += x1, x2, x3, x4, x5;
|
||||
|
||||
int numNodeStopPartition = 2;
|
||||
int minNodesPerMap = 1;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
LONGS_EQUAL(0, nd.root()->size());
|
||||
LONGS_EQUAL(1, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(4, nd.root()->children().size()); // 4 leaf submaps
|
||||
int numNodeStopPartition = 2;
|
||||
int minNodesPerMap = 1;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
LONGS_EQUAL(0, nd.root()->size());
|
||||
LONGS_EQUAL(1, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(4, nd.root()->children().size()); // 4 leaf submaps
|
||||
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[0]->frontal().size());
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->size());
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[0]->frontal().size());
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->size());
|
||||
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[1]->frontal().size());
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->size());
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[1]->frontal().size());
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->size());
|
||||
|
||||
// the 3rd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[2]->frontal().size());
|
||||
LONGS_EQUAL(1, nd.root()->children()[2]->size());
|
||||
// the 3rd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[2]->frontal().size());
|
||||
LONGS_EQUAL(1, nd.root()->children()[2]->size());
|
||||
|
||||
// the 4th submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[3]->frontal().size());
|
||||
LONGS_EQUAL(1, nd.root()->children()[3]->size());
|
||||
// the 4th submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[3]->frontal().size());
|
||||
LONGS_EQUAL(1, nd.root()->children()[3]->size());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -137,41 +137,41 @@ TEST ( NestedDissection, FourIslands )
|
|||
// x5
|
||||
TEST ( NestedDissection, weekLinks )
|
||||
{
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
Graph fg;
|
||||
fg.addOdometry(1, 2, Pose2(), odoNoise);
|
||||
fg.addOdometry(2, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(2, 4, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 4, Pose2(), odoNoise);
|
||||
fg.addBearingRange(1, 6, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addBearingRange(2, 6, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addBearingRange(5, 6, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addPoseConstraint(1, Pose2());
|
||||
fg.addPoseConstraint(4, Pose2());
|
||||
fg.addPoseConstraint(5, Pose2());
|
||||
Ordering ordering; ordering += x1, x2, x3, x4, x5, l6;
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
Graph fg;
|
||||
fg.addOdometry(1, 2, Pose2(), odoNoise);
|
||||
fg.addOdometry(2, 3, Pose2(), odoNoise);
|
||||
fg.addOdometry(2, 4, Pose2(), odoNoise);
|
||||
fg.addOdometry(3, 4, Pose2(), odoNoise);
|
||||
fg.addBearingRange(1, 6, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addBearingRange(2, 6, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addBearingRange(5, 6, Rot2(), 0., bearingRangeNoise);
|
||||
fg.addPoseConstraint(1, Pose2());
|
||||
fg.addPoseConstraint(4, Pose2());
|
||||
fg.addPoseConstraint(5, Pose2());
|
||||
Ordering ordering; ordering += x1, x2, x3, x4, x5, l6;
|
||||
|
||||
int numNodeStopPartition = 2;
|
||||
int minNodesPerMap = 1;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
LONGS_EQUAL(0, nd.root()->size()); // one weeklink
|
||||
LONGS_EQUAL(1, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(3, nd.root()->children().size()); // 4 leaf submaps
|
||||
LONGS_EQUAL(1, nd.root()->weeklinks().size());
|
||||
int numNodeStopPartition = 2;
|
||||
int minNodesPerMap = 1;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
LONGS_EQUAL(0, nd.root()->size()); // one weeklink
|
||||
LONGS_EQUAL(1, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(3, nd.root()->children().size()); // 4 leaf submaps
|
||||
LONGS_EQUAL(1, nd.root()->weeklinks().size());
|
||||
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->frontal().size()); // x3 and x4
|
||||
LONGS_EQUAL(4, nd.root()->children()[0]->size());
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->frontal().size()); // x3 and x4
|
||||
LONGS_EQUAL(4, nd.root()->children()[0]->size());
|
||||
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->frontal().size()); // x1 and l6
|
||||
LONGS_EQUAL(4, nd.root()->children()[1]->size());
|
||||
//
|
||||
// the 3rd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[2]->frontal().size()); // x5
|
||||
LONGS_EQUAL(1, nd.root()->children()[2]->size());
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->frontal().size()); // x1 and l6
|
||||
LONGS_EQUAL(4, nd.root()->children()[1]->size());
|
||||
//
|
||||
// the 3rd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[2]->frontal().size()); // x5
|
||||
LONGS_EQUAL(1, nd.root()->children()[2]->size());
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
@ -184,86 +184,86 @@ TEST ( NestedDissection, weekLinks )
|
|||
*/
|
||||
TEST ( NestedDissection, manual_cuts )
|
||||
{
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef partition::Cuts Cuts;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
typedef partition::PartitionTable PartitionTable;
|
||||
Graph fg;
|
||||
fg.addOdometry(x0, x1, Pose2(1.0, 0, 0), odoNoise);
|
||||
fg.addOdometry(x1, x2, Pose2(1.0, 0, 0), odoNoise);
|
||||
using namespace submapPlanarSLAM;
|
||||
typedef partition::Cuts Cuts;
|
||||
typedef TSAM2D::SubNLG SubNLG;
|
||||
typedef partition::PartitionTable PartitionTable;
|
||||
Graph fg;
|
||||
fg.addOdometry(x0, x1, Pose2(1.0, 0, 0), odoNoise);
|
||||
fg.addOdometry(x1, x2, Pose2(1.0, 0, 0), odoNoise);
|
||||
|
||||
fg.addBearingRange(x0, l1, Rot2::fromAngle( M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x0, l4, Rot2::fromAngle(-M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x0, l2, Rot2::fromAngle( M_PI_4), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x0, l5, Rot2::fromAngle(-M_PI_4), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x0, l1, Rot2::fromAngle( M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x0, l4, Rot2::fromAngle(-M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x0, l2, Rot2::fromAngle( M_PI_4), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x0, l5, Rot2::fromAngle(-M_PI_4), sqrt(2), bearingRangeNoise);
|
||||
|
||||
fg.addBearingRange(x1, l1, Rot2::fromAngle( M_PI_4 * 3), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l2, Rot2::fromAngle( M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l3, Rot2::fromAngle( M_PI_4), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l4, Rot2::fromAngle(-M_PI_4 * 3), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l5, Rot2::fromAngle( M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l6, Rot2::fromAngle(-M_PI_4), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l1, Rot2::fromAngle( M_PI_4 * 3), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l2, Rot2::fromAngle( M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l3, Rot2::fromAngle( M_PI_4), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l4, Rot2::fromAngle(-M_PI_4 * 3), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l5, Rot2::fromAngle( M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x1, l6, Rot2::fromAngle(-M_PI_4), sqrt(2), bearingRangeNoise);
|
||||
|
||||
fg.addBearingRange(x2, l2, Rot2::fromAngle( M_PI_4 * 3), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x2, l5, Rot2::fromAngle(-M_PI_4 * 3), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x2, l3, Rot2::fromAngle( M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x2, l6, Rot2::fromAngle(-M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x2, l2, Rot2::fromAngle( M_PI_4 * 3), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x2, l5, Rot2::fromAngle(-M_PI_4 * 3), sqrt(2), bearingRangeNoise);
|
||||
fg.addBearingRange(x2, l3, Rot2::fromAngle( M_PI_2), 1, bearingRangeNoise);
|
||||
fg.addBearingRange(x2, l6, Rot2::fromAngle(-M_PI_2), 1, bearingRangeNoise);
|
||||
|
||||
fg.addPrior(x0, Pose2(0.1, 0, 0), priorNoise);
|
||||
fg.addPrior(x0, Pose2(0.1, 0, 0), priorNoise);
|
||||
|
||||
// generate ordering
|
||||
Ordering ordering; ordering += x0, x1, x2, l1, l2, l3, l4, l5, l6;
|
||||
// generate ordering
|
||||
Ordering ordering; ordering += x0, x1, x2, l1, l2, l3, l4, l5, l6;
|
||||
|
||||
// define cuts
|
||||
boost::shared_ptr<Cuts> cuts(new Cuts());
|
||||
cuts->partitionTable = PartitionTable(9, -1); PartitionTable* p = &cuts->partitionTable;
|
||||
//x0 x1 x2 l1 l2 l3 l4 l5 l6
|
||||
(*p)[0]=1; (*p)[1]=0; (*p)[2]=2; (*p)[3]=1; (*p)[4]=0; (*p)[5]=2; (*p)[6]=1; (*p)[7]=0; (*p)[8]=2;
|
||||
// define cuts
|
||||
boost::shared_ptr<Cuts> cuts(new Cuts());
|
||||
cuts->partitionTable = PartitionTable(9, -1); PartitionTable* p = &cuts->partitionTable;
|
||||
//x0 x1 x2 l1 l2 l3 l4 l5 l6
|
||||
(*p)[0]=1; (*p)[1]=0; (*p)[2]=2; (*p)[3]=1; (*p)[4]=0; (*p)[5]=2; (*p)[6]=1; (*p)[7]=0; (*p)[8]=2;
|
||||
|
||||
cuts->children.push_back(boost::shared_ptr<Cuts>(new Cuts()));
|
||||
cuts->children[0]->partitionTable = PartitionTable(9, -1); p = &cuts->children[0]->partitionTable;
|
||||
//x0 x1 x2 l1 l2 l3 l4 l5 l6
|
||||
(*p)[0]=0; (*p)[1]=-1; (*p)[2]=-1; (*p)[3]=1; (*p)[4]=-1; (*p)[5]=-1; (*p)[6]=2; (*p)[7]=-1; (*p)[8]=-1;
|
||||
cuts->children.push_back(boost::shared_ptr<Cuts>(new Cuts()));
|
||||
cuts->children[0]->partitionTable = PartitionTable(9, -1); p = &cuts->children[0]->partitionTable;
|
||||
//x0 x1 x2 l1 l2 l3 l4 l5 l6
|
||||
(*p)[0]=0; (*p)[1]=-1; (*p)[2]=-1; (*p)[3]=1; (*p)[4]=-1; (*p)[5]=-1; (*p)[6]=2; (*p)[7]=-1; (*p)[8]=-1;
|
||||
|
||||
cuts->children.push_back(boost::shared_ptr<Cuts>(new Cuts()));
|
||||
cuts->children[1]->partitionTable = PartitionTable(9, -1); p = &cuts->children[1]->partitionTable;
|
||||
//x0 x1 x2 l1 l2 l3 l4 l5 l6
|
||||
(*p)[0]=-1; (*p)[1]=-1; (*p)[2]=0; (*p)[3]=-1; (*p)[4]=-1; (*p)[5]=1; (*p)[6]=-1; (*p)[7]=-1; (*p)[8]=2;
|
||||
cuts->children.push_back(boost::shared_ptr<Cuts>(new Cuts()));
|
||||
cuts->children[1]->partitionTable = PartitionTable(9, -1); p = &cuts->children[1]->partitionTable;
|
||||
//x0 x1 x2 l1 l2 l3 l4 l5 l6
|
||||
(*p)[0]=-1; (*p)[1]=-1; (*p)[2]=0; (*p)[3]=-1; (*p)[4]=-1; (*p)[5]=1; (*p)[6]=-1; (*p)[7]=-1; (*p)[8]=2;
|
||||
|
||||
|
||||
// nested dissection
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, cuts);
|
||||
LONGS_EQUAL(2, nd.root()->size());
|
||||
LONGS_EQUAL(3, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(2, nd.root()->children().size()); // 2 leaf submaps
|
||||
LONGS_EQUAL(0, nd.root()->weeklinks().size());
|
||||
// nested dissection
|
||||
NestedDissection<Graph, SubNLG, GenericGraph2D> nd(fg, ordering, cuts);
|
||||
LONGS_EQUAL(2, nd.root()->size());
|
||||
LONGS_EQUAL(3, nd.root()->frontal().size());
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(2, nd.root()->children().size()); // 2 leaf submaps
|
||||
LONGS_EQUAL(0, nd.root()->weeklinks().size());
|
||||
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[0]->frontal().size()); // x0
|
||||
LONGS_EQUAL(4, nd.root()->children()[0]->size());
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->children().size());
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[0]->frontal().size()); // x0
|
||||
LONGS_EQUAL(4, nd.root()->children()[0]->size());
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->children().size());
|
||||
|
||||
// the 1-1st submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[0]->children()[0]->frontal().size()); // l1
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->children()[0]->size());
|
||||
// the 1-1st submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[0]->children()[0]->frontal().size()); // l1
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->children()[0]->size());
|
||||
|
||||
// the 1-2nd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[0]->children()[1]->frontal().size()); // l4
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->children()[1]->size());
|
||||
// the 1-2nd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[0]->children()[1]->frontal().size()); // l4
|
||||
LONGS_EQUAL(2, nd.root()->children()[0]->children()[1]->size());
|
||||
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[1]->frontal().size()); // x2
|
||||
LONGS_EQUAL(3, nd.root()->children()[1]->size());
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->children().size());
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[1]->frontal().size()); // x2
|
||||
LONGS_EQUAL(3, nd.root()->children()[1]->size());
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->children().size());
|
||||
|
||||
// the 2-1st submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[1]->children()[0]->frontal().size()); // l3
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->children()[0]->size());
|
||||
// the 2-1st submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[1]->children()[0]->frontal().size()); // l3
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->children()[0]->size());
|
||||
|
||||
// the 2-2nd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[1]->children()[1]->frontal().size()); // l6
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->children()[1]->size());
|
||||
// the 2-2nd submap
|
||||
LONGS_EQUAL(1, nd.root()->children()[1]->children()[1]->frontal().size()); // l6
|
||||
LONGS_EQUAL(2, nd.root()->children()[1]->children()[1]->size());
|
||||
|
||||
}
|
||||
|
||||
|
@ -272,65 +272,65 @@ TEST ( NestedDissection, manual_cuts )
|
|||
// / | / \ | \
|
||||
// x0 x1 x2 x3
|
||||
TEST( NestedDissection, Graph3D) {
|
||||
using namespace gtsam::submapVisualSLAM;
|
||||
typedef TSAM3D::SubNLG SubNLG;
|
||||
typedef partition::PartitionTable PartitionTable;
|
||||
vector<GeneralCamera> cameras;
|
||||
cameras.push_back(GeneralCamera(Pose3(Rot3(), Point3(-2., 0., 0.))));
|
||||
cameras.push_back(GeneralCamera(Pose3(Rot3(), Point3(-1., 0., 0.))));
|
||||
cameras.push_back(GeneralCamera(Pose3(Rot3(), Point3( 1., 0., 0.))));
|
||||
cameras.push_back(GeneralCamera(Pose3(Rot3(), Point3( 2., 0., 0.))));
|
||||
using namespace gtsam::submapVisualSLAM;
|
||||
typedef TSAM3D::SubNLG SubNLG;
|
||||
typedef partition::PartitionTable PartitionTable;
|
||||
vector<GeneralCamera> cameras;
|
||||
cameras.push_back(GeneralCamera(Pose3(Rot3(), Point3(-2., 0., 0.))));
|
||||
cameras.push_back(GeneralCamera(Pose3(Rot3(), Point3(-1., 0., 0.))));
|
||||
cameras.push_back(GeneralCamera(Pose3(Rot3(), Point3( 1., 0., 0.))));
|
||||
cameras.push_back(GeneralCamera(Pose3(Rot3(), Point3( 2., 0., 0.))));
|
||||
|
||||
vector<Point3> points;
|
||||
for (int cube_index = 0; cube_index <= 3; cube_index++) {
|
||||
Point3 center((cube_index-1) * 3, 0.5, 10.);
|
||||
points.push_back(center + Point3(-0.5, -0.5, -0.5));
|
||||
points.push_back(center + Point3(-0.5, 0.5, -0.5));
|
||||
points.push_back(center + Point3( 0.5, 0.5, -0.5));
|
||||
points.push_back(center + Point3( 0.5, -0.5, -0.5));
|
||||
points.push_back(center + Point3(-0.5, -0.5, 0.5));
|
||||
points.push_back(center + Point3(-0.5, 0.5, 0.5));
|
||||
points.push_back(center + Point3( 0.5, 0.5, 0.5));
|
||||
points.push_back(center + Point3( 0.5, 0.5, 0.5));
|
||||
}
|
||||
vector<Point3> points;
|
||||
for (int cube_index = 0; cube_index <= 3; cube_index++) {
|
||||
Point3 center((cube_index-1) * 3, 0.5, 10.);
|
||||
points.push_back(center + Point3(-0.5, -0.5, -0.5));
|
||||
points.push_back(center + Point3(-0.5, 0.5, -0.5));
|
||||
points.push_back(center + Point3( 0.5, 0.5, -0.5));
|
||||
points.push_back(center + Point3( 0.5, -0.5, -0.5));
|
||||
points.push_back(center + Point3(-0.5, -0.5, 0.5));
|
||||
points.push_back(center + Point3(-0.5, 0.5, 0.5));
|
||||
points.push_back(center + Point3( 0.5, 0.5, 0.5));
|
||||
points.push_back(center + Point3( 0.5, 0.5, 0.5));
|
||||
}
|
||||
|
||||
Graph graph;
|
||||
SharedDiagonal measurementNoise(gtsam::Vector_(2, 1., 1.));
|
||||
SharedDiagonal measurementZeroNoise(gtsam::Vector_(2, 0., 0.));
|
||||
for (int j=1; j<=8; j++)
|
||||
graph.addMeasurement(0, j, cameras[0].project(points[j-1]).expmap(measurementZeroNoise->sample()), measurementNoise);
|
||||
for (int j=1; j<=16; j++)
|
||||
graph.addMeasurement(1, j, cameras[1].project(points[j-1]).expmap(measurementZeroNoise->sample()), measurementNoise);
|
||||
for (int j=9; j<=24; j++)
|
||||
graph.addMeasurement(2, j, cameras[2].project(points[j-1]).expmap(measurementZeroNoise->sample()), measurementNoise);
|
||||
for (int j=17; j<=24; j++)
|
||||
graph.addMeasurement(3, j, cameras[3].project(points[j-1]).expmap(measurementZeroNoise->sample()), measurementNoise);
|
||||
Graph graph;
|
||||
SharedDiagonal measurementNoise(gtsam::Vector_(2, 1., 1.));
|
||||
SharedDiagonal measurementZeroNoise(gtsam::Vector_(2, 0., 0.));
|
||||
for (int j=1; j<=8; j++)
|
||||
graph.addMeasurement(0, j, cameras[0].project(points[j-1]).expmap(measurementZeroNoise->sample()), measurementNoise);
|
||||
for (int j=1; j<=16; j++)
|
||||
graph.addMeasurement(1, j, cameras[1].project(points[j-1]).expmap(measurementZeroNoise->sample()), measurementNoise);
|
||||
for (int j=9; j<=24; j++)
|
||||
graph.addMeasurement(2, j, cameras[2].project(points[j-1]).expmap(measurementZeroNoise->sample()), measurementNoise);
|
||||
for (int j=17; j<=24; j++)
|
||||
graph.addMeasurement(3, j, cameras[3].project(points[j-1]).expmap(measurementZeroNoise->sample()), measurementNoise);
|
||||
|
||||
// make an easy ordering
|
||||
Ordering ordering; ordering += x0, x1, x2, x3;
|
||||
for (int j=1; j<=24; j++)
|
||||
ordering += Symbol('l', j);
|
||||
// make an easy ordering
|
||||
Ordering ordering; ordering += x0, x1, x2, x3;
|
||||
for (int j=1; j<=24; j++)
|
||||
ordering += Symbol('l', j);
|
||||
|
||||
// nested dissection
|
||||
const int numNodeStopPartition = 10;
|
||||
const int minNodesPerMap = 5;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph3D> nd(graph, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
// nested dissection
|
||||
const int numNodeStopPartition = 10;
|
||||
const int minNodesPerMap = 5;
|
||||
NestedDissection<Graph, SubNLG, GenericGraph3D> nd(graph, ordering, numNodeStopPartition, minNodesPerMap);
|
||||
|
||||
LONGS_EQUAL(0, nd.root()->size());
|
||||
LONGS_EQUAL(8, nd.root()->frontal().size()); // l9-l16
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(2, nd.root()->children().size()); // 2 leaf submaps
|
||||
LONGS_EQUAL(0, nd.root()->weeklinks().size());
|
||||
LONGS_EQUAL(0, nd.root()->size());
|
||||
LONGS_EQUAL(8, nd.root()->frontal().size()); // l9-l16
|
||||
LONGS_EQUAL(0, nd.root()->separator().size());
|
||||
LONGS_EQUAL(2, nd.root()->children().size()); // 2 leaf submaps
|
||||
LONGS_EQUAL(0, nd.root()->weeklinks().size());
|
||||
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(10, nd.root()->children()[0]->frontal().size()); // x0, x1, l1-l8
|
||||
LONGS_EQUAL(24, nd.root()->children()[0]->size()); // 8 + 16
|
||||
LONGS_EQUAL(0, nd.root()->children()[0]->children().size());
|
||||
// the 1st submap
|
||||
LONGS_EQUAL(10, nd.root()->children()[0]->frontal().size()); // x0, x1, l1-l8
|
||||
LONGS_EQUAL(24, nd.root()->children()[0]->size()); // 8 + 16
|
||||
LONGS_EQUAL(0, nd.root()->children()[0]->children().size());
|
||||
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(10, nd.root()->children()[1]->frontal().size()); // x2, x3, l1-l8
|
||||
LONGS_EQUAL(24, nd.root()->children()[1]->size()); // 16 + 8
|
||||
LONGS_EQUAL(0, nd.root()->children()[1]->children().size());
|
||||
// the 2nd submap
|
||||
LONGS_EQUAL(10, nd.root()->children()[1]->frontal().size()); // x2, x3, l1-l8
|
||||
LONGS_EQUAL(24, nd.root()->children()[1]->size()); // 16 + 8
|
||||
LONGS_EQUAL(0, nd.root()->children()[1]->children().size());
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -295,87 +295,87 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************* */
|
||||
SharedGaussian get_model_inlier() const {
|
||||
return model_inlier_;
|
||||
return model_inlier_;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
SharedGaussian get_model_outlier() const {
|
||||
return model_outlier_;
|
||||
return model_outlier_;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Matrix get_model_inlier_cov() const {
|
||||
return (model_inlier_->R().transpose()*model_inlier_->R()).inverse();
|
||||
return (model_inlier_->R().transpose()*model_inlier_->R()).inverse();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
Matrix get_model_outlier_cov() const {
|
||||
return (model_outlier_->R().transpose()*model_outlier_->R()).inverse();
|
||||
return (model_outlier_->R().transpose()*model_outlier_->R()).inverse();
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void updateNoiseModels(const gtsam::Values& values, const gtsam::NonlinearFactorGraph& graph){
|
||||
/* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
|
||||
* (note these are given in the E step, where indicator probabilities are calculated).
|
||||
*
|
||||
* Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
|
||||
* unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
|
||||
*
|
||||
* TODO: improve efficiency (info form)
|
||||
*/
|
||||
/* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
|
||||
* (note these are given in the E step, where indicator probabilities are calculated).
|
||||
*
|
||||
* Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
|
||||
* unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
|
||||
*
|
||||
* TODO: improve efficiency (info form)
|
||||
*/
|
||||
|
||||
// get joint covariance of the involved states
|
||||
std::vector<gtsam::Key> Keys;
|
||||
Keys.push_back(key1_);
|
||||
Keys.push_back(key2_);
|
||||
Marginals marginals( graph, values, Marginals::QR );
|
||||
JointMarginal joint_marginal12 = marginals.jointMarginalCovariance(Keys);
|
||||
Matrix cov1 = joint_marginal12(key1_, key1_);
|
||||
Matrix cov2 = joint_marginal12(key2_, key2_);
|
||||
Matrix cov12 = joint_marginal12(key1_, key2_);
|
||||
// get joint covariance of the involved states
|
||||
std::vector<gtsam::Key> Keys;
|
||||
Keys.push_back(key1_);
|
||||
Keys.push_back(key2_);
|
||||
Marginals marginals( graph, values, Marginals::QR );
|
||||
JointMarginal joint_marginal12 = marginals.jointMarginalCovariance(Keys);
|
||||
Matrix cov1 = joint_marginal12(key1_, key1_);
|
||||
Matrix cov2 = joint_marginal12(key2_, key2_);
|
||||
Matrix cov12 = joint_marginal12(key1_, key2_);
|
||||
|
||||
updateNoiseModels_givenCovs(values, cov1, cov2, cov12);
|
||||
updateNoiseModels_givenCovs(values, cov1, cov2, cov12);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void updateNoiseModels_givenCovs(const gtsam::Values& values, const Matrix& cov1, const Matrix& cov2, const Matrix& cov12){
|
||||
/* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
|
||||
* (note these are given in the E step, where indicator probabilities are calculated).
|
||||
*
|
||||
* Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
|
||||
* unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
|
||||
*
|
||||
* TODO: improve efficiency (info form)
|
||||
*/
|
||||
/* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
|
||||
* (note these are given in the E step, where indicator probabilities are calculated).
|
||||
*
|
||||
* Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
|
||||
* unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
|
||||
*
|
||||
* TODO: improve efficiency (info form)
|
||||
*/
|
||||
|
||||
const T& p1 = values.at<T>(key1_);
|
||||
const T& p2 = values.at<T>(key2_);
|
||||
const T& p1 = values.at<T>(key1_);
|
||||
const T& p2 = values.at<T>(key2_);
|
||||
|
||||
Matrix H1, H2;
|
||||
T hx = p1.between(p2, H1, H2); // h(x)
|
||||
Matrix H1, H2;
|
||||
T hx = p1.between(p2, H1, H2); // h(x)
|
||||
|
||||
Matrix H;
|
||||
H.resize(H1.rows(), H1.rows()+H2.rows());
|
||||
H << H1, H2; // H = [H1 H2]
|
||||
Matrix H;
|
||||
H.resize(H1.rows(), H1.rows()+H2.rows());
|
||||
H << H1, H2; // H = [H1 H2]
|
||||
|
||||
Matrix joint_cov;
|
||||
joint_cov.resize(cov1.rows()+cov2.rows(), cov1.cols()+cov2.cols());
|
||||
joint_cov << cov1, cov12,
|
||||
cov12.transpose(), cov2;
|
||||
Matrix joint_cov;
|
||||
joint_cov.resize(cov1.rows()+cov2.rows(), cov1.cols()+cov2.cols());
|
||||
joint_cov << cov1, cov12,
|
||||
cov12.transpose(), cov2;
|
||||
|
||||
Matrix cov_state = H*joint_cov*H.transpose();
|
||||
Matrix cov_state = H*joint_cov*H.transpose();
|
||||
|
||||
// model_inlier_->print("before:");
|
||||
// model_inlier_->print("before:");
|
||||
|
||||
// update inlier and outlier noise models
|
||||
Matrix covRinlier = (model_inlier_->R().transpose()*model_inlier_->R()).inverse();
|
||||
model_inlier_ = gtsam::noiseModel::Gaussian::Covariance(covRinlier + cov_state);
|
||||
// update inlier and outlier noise models
|
||||
Matrix covRinlier = (model_inlier_->R().transpose()*model_inlier_->R()).inverse();
|
||||
model_inlier_ = gtsam::noiseModel::Gaussian::Covariance(covRinlier + cov_state);
|
||||
|
||||
Matrix covRoutlier = (model_outlier_->R().transpose()*model_outlier_->R()).inverse();
|
||||
model_outlier_ = gtsam::noiseModel::Gaussian::Covariance(covRoutlier + cov_state);
|
||||
Matrix covRoutlier = (model_outlier_->R().transpose()*model_outlier_->R()).inverse();
|
||||
model_outlier_ = gtsam::noiseModel::Gaussian::Covariance(covRoutlier + cov_state);
|
||||
|
||||
// model_inlier_->print("after:");
|
||||
// std::cout<<"covRinlier + cov_state: "<<covRinlier + cov_state<<std::endl;
|
||||
// model_inlier_->print("after:");
|
||||
// std::cout<<"covRinlier + cov_state: "<<covRinlier + cov_state<<std::endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
|
|
|
@ -28,9 +28,9 @@ namespace gtsam {
|
|||
|
||||
/*
|
||||
* - The 1st order GaussMarkov factor relates two keys of the same type. This relation is given via
|
||||
* key_2 = exp(-1/tau*delta_t) * key1 + w_d
|
||||
* where tau is the time constant and delta_t is the time difference between the two keys.
|
||||
* w_d is the equivalent discrete noise, whose covariance is calculated from the continuous noise model and delta_t.
|
||||
* key_2 = exp(-1/tau*delta_t) * key1 + w_d
|
||||
* where tau is the time constant and delta_t is the time difference between the two keys.
|
||||
* w_d is the equivalent discrete noise, whose covariance is calculated from the continuous noise model and delta_t.
|
||||
* - w_d is approximated as a Gaussian noise.
|
||||
* - In the multi-dimensional case, tau is a vector, and the above equation is applied on each element
|
||||
* in the state (represented by keys), using the appropriate time constant in the vector tau.
|
||||
|
@ -80,7 +80,7 @@ public:
|
|||
|
||||
/** equals */
|
||||
virtual bool equals(const NonlinearFactor& expected, double tol=1e-9) const {
|
||||
const This *e = dynamic_cast<const This*> (&expected);
|
||||
const This *e = dynamic_cast<const This*> (&expected);
|
||||
return e != NULL && Base::equals(*e, tol);
|
||||
}
|
||||
|
||||
|
|
|
@ -256,43 +256,43 @@ TEST( BetweenFactorEM, CaseStudy)
|
|||
|
||||
///* ************************************************************************** */
|
||||
TEST (BetweenFactorEM, updateNoiseModel ) {
|
||||
gtsam::Key key1(1);
|
||||
gtsam::Key key2(2);
|
||||
gtsam::Key key1(1);
|
||||
gtsam::Key key2(2);
|
||||
|
||||
gtsam::Pose2 p1(10.0, 15.0, 0.1);
|
||||
gtsam::Pose2 p2(15.0, 15.0, 0.3);
|
||||
gtsam::Pose2 noise(0.5, 0.4, 0.01);
|
||||
gtsam::Pose2 rel_pose_ideal = p1.between(p2);
|
||||
gtsam::Pose2 rel_pose_msr = rel_pose_ideal.compose(noise);
|
||||
gtsam::Pose2 p1(10.0, 15.0, 0.1);
|
||||
gtsam::Pose2 p2(15.0, 15.0, 0.3);
|
||||
gtsam::Pose2 noise(0.5, 0.4, 0.01);
|
||||
gtsam::Pose2 rel_pose_ideal = p1.between(p2);
|
||||
gtsam::Pose2 rel_pose_msr = rel_pose_ideal.compose(noise);
|
||||
|
||||
SharedGaussian model_inlier(noiseModel::Diagonal::Sigmas( (gtsam::Vector(3) << 1.5, 2.5, 4.05)));
|
||||
SharedGaussian model_outlier(noiseModel::Diagonal::Sigmas( (gtsam::Vector(3) << 50.0, 50.0, 10.0)));
|
||||
SharedGaussian model_inlier(noiseModel::Diagonal::Sigmas( (gtsam::Vector(3) << 1.5, 2.5, 4.05)));
|
||||
SharedGaussian model_outlier(noiseModel::Diagonal::Sigmas( (gtsam::Vector(3) << 50.0, 50.0, 10.0)));
|
||||
|
||||
gtsam::Values values;
|
||||
values.insert(key1, p1);
|
||||
values.insert(key2, p2);
|
||||
gtsam::Values values;
|
||||
values.insert(key1, p1);
|
||||
values.insert(key2, p2);
|
||||
|
||||
double prior_outlier = 0.0;
|
||||
double prior_inlier = 1.0;
|
||||
double prior_outlier = 0.0;
|
||||
double prior_inlier = 1.0;
|
||||
|
||||
BetweenFactorEM<gtsam::Pose2> f(key1, key2, rel_pose_msr, model_inlier, model_outlier,
|
||||
prior_inlier, prior_outlier);
|
||||
BetweenFactorEM<gtsam::Pose2> f(key1, key2, rel_pose_msr, model_inlier, model_outlier,
|
||||
prior_inlier, prior_outlier);
|
||||
|
||||
SharedGaussian model = SharedGaussian(noiseModel::Isotropic::Sigma(3, 1e2));
|
||||
SharedGaussian model = SharedGaussian(noiseModel::Isotropic::Sigma(3, 1e2));
|
||||
|
||||
NonlinearFactorGraph graph;
|
||||
graph.push_back(gtsam::PriorFactor<Pose2>(key1, p1, model));
|
||||
graph.push_back(gtsam::PriorFactor<Pose2>(key2, p2, model));
|
||||
NonlinearFactorGraph graph;
|
||||
graph.push_back(gtsam::PriorFactor<Pose2>(key1, p1, model));
|
||||
graph.push_back(gtsam::PriorFactor<Pose2>(key2, p2, model));
|
||||
|
||||
f.updateNoiseModels(values, graph);
|
||||
f.updateNoiseModels(values, graph);
|
||||
|
||||
SharedGaussian model_inlier_new = f.get_model_inlier();
|
||||
SharedGaussian model_outlier_new = f.get_model_outlier();
|
||||
SharedGaussian model_inlier_new = f.get_model_inlier();
|
||||
SharedGaussian model_outlier_new = f.get_model_outlier();
|
||||
|
||||
model_inlier->print("model_inlier:");
|
||||
model_outlier->print("model_outlier:");
|
||||
model_inlier_new->print("model_inlier_new:");
|
||||
model_outlier_new->print("model_outlier_new:");
|
||||
model_inlier->print("model_inlier:");
|
||||
model_outlier->print("model_outlier:");
|
||||
model_inlier_new->print("model_inlier_new:");
|
||||
model_outlier_new->print("model_outlier_new:");
|
||||
}
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue