commit
0d4b0d76d9
|
@ -39,181 +39,276 @@ using namespace std;
|
||||||
using namespace gtsam;
|
using namespace gtsam;
|
||||||
using namespace boost::algorithm;
|
using namespace boost::algorithm;
|
||||||
|
|
||||||
|
using symbol_shorthand::L;
|
||||||
using symbol_shorthand::M;
|
using symbol_shorthand::M;
|
||||||
using symbol_shorthand::X;
|
using symbol_shorthand::X;
|
||||||
|
|
||||||
// Testing params
|
const size_t kMaxLoopCount = 2000; // Example default value
|
||||||
const size_t max_loop_count = 2000; // 2000; // 200 //2000 //8000
|
const size_t kMaxNrHypotheses = 10;
|
||||||
|
|
||||||
noiseModel::Diagonal::shared_ptr prior_noise_model =
|
auto kOpenLoopModel = noiseModel::Diagonal::Sigmas(Vector3::Ones() * 10);
|
||||||
noiseModel::Diagonal::Sigmas(
|
|
||||||
|
auto kPriorNoiseModel = noiseModel::Diagonal::Sigmas(
|
||||||
(Vector(3) << 0.0001, 0.0001, 0.0001).finished());
|
(Vector(3) << 0.0001, 0.0001, 0.0001).finished());
|
||||||
|
|
||||||
noiseModel::Diagonal::shared_ptr pose_noise_model =
|
auto kPoseNoiseModel = noiseModel::Diagonal::Sigmas(
|
||||||
noiseModel::Diagonal::Sigmas(
|
|
||||||
(Vector(3) << 1.0 / 30.0, 1.0 / 30.0, 1.0 / 100.0).finished());
|
(Vector(3) << 1.0 / 30.0, 1.0 / 30.0, 1.0 / 100.0).finished());
|
||||||
|
|
||||||
/**
|
// Experiment Class
|
||||||
* @brief Write the results of optimization to filename.
|
class Experiment {
|
||||||
|
private:
|
||||||
|
std::string filename_;
|
||||||
|
HybridSmoother smoother_;
|
||||||
|
HybridNonlinearFactorGraph graph_;
|
||||||
|
Values initial_;
|
||||||
|
Values result_;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Write the result of optimization to file.
|
||||||
*
|
*
|
||||||
* @param results The Values object with the final results.
|
* @param result The Values object with the final result.
|
||||||
* @param num_poses The number of poses to write to the file.
|
* @param num_poses The number of poses to write to the file.
|
||||||
* @param filename The file name to save the results to.
|
* @param filename The file name to save the result to.
|
||||||
*/
|
*/
|
||||||
void write_results(const Values& results, size_t num_poses,
|
void writeResult(const Values& result, size_t numPoses,
|
||||||
const std::string& filename = "ISAM2_city10000.txt") {
|
const std::string& filename = "Hybrid_city10000.txt") {
|
||||||
ofstream outfile;
|
ofstream outfile;
|
||||||
outfile.open(filename);
|
outfile.open(filename);
|
||||||
|
|
||||||
for (size_t i = 0; i < num_poses; ++i) {
|
for (size_t i = 0; i < numPoses; ++i) {
|
||||||
Pose2 out_pose = results.at<Pose2>(X(i));
|
Pose2 outPose = result.at<Pose2>(X(i));
|
||||||
|
outfile << outPose.x() << " " << outPose.y() << " " << outPose.theta()
|
||||||
outfile << out_pose.x() << " " << out_pose.y() << " " << out_pose.theta()
|
|
||||||
<< std::endl;
|
<< std::endl;
|
||||||
}
|
}
|
||||||
outfile.close();
|
outfile.close();
|
||||||
std::cout << "output written to " << filename << std::endl;
|
std::cout << "Output written to " << filename << std::endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
void SmootherUpdate(HybridSmoother& smoother, HybridNonlinearFactorGraph& graph,
|
/**
|
||||||
const Values& initial, size_t maxNrHypotheses,
|
* @brief Create a hybrid loop closure factor where
|
||||||
Values* results) {
|
* 0 - loose noise model and 1 - loop noise model.
|
||||||
|
*/
|
||||||
|
HybridNonlinearFactor hybridLoopClosureFactor(size_t loopCounter, size_t keyS,
|
||||||
|
size_t keyT,
|
||||||
|
const Pose2& measurement) {
|
||||||
|
DiscreteKey l(L(loopCounter), 2);
|
||||||
|
|
||||||
|
auto f0 = std::make_shared<BetweenFactor<Pose2>>(
|
||||||
|
X(keyS), X(keyT), measurement, kOpenLoopModel);
|
||||||
|
auto f1 = std::make_shared<BetweenFactor<Pose2>>(
|
||||||
|
X(keyS), X(keyT), measurement, kPoseNoiseModel);
|
||||||
|
|
||||||
|
std::vector<NonlinearFactorValuePair> factors{
|
||||||
|
{f0, kOpenLoopModel->negLogConstant()},
|
||||||
|
{f1, kPoseNoiseModel->negLogConstant()}};
|
||||||
|
HybridNonlinearFactor mixtureFactor(l, factors);
|
||||||
|
return mixtureFactor;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// @brief Create hybrid odometry factor with discrete measurement choices.
|
||||||
|
HybridNonlinearFactor hybridOdometryFactor(
|
||||||
|
size_t numMeasurements, size_t keyS, size_t keyT, const DiscreteKey& m,
|
||||||
|
const std::vector<Pose2>& poseArray,
|
||||||
|
const SharedNoiseModel& poseNoiseModel) {
|
||||||
|
auto f0 = std::make_shared<BetweenFactor<Pose2>>(
|
||||||
|
X(keyS), X(keyT), poseArray[0], poseNoiseModel);
|
||||||
|
auto f1 = std::make_shared<BetweenFactor<Pose2>>(
|
||||||
|
X(keyS), X(keyT), poseArray[1], poseNoiseModel);
|
||||||
|
|
||||||
|
std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
|
||||||
|
HybridNonlinearFactor mixtureFactor(m, factors);
|
||||||
|
return mixtureFactor;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// @brief Perform smoother update and optimize the graph.
|
||||||
|
void smootherUpdate(HybridSmoother& smoother,
|
||||||
|
HybridNonlinearFactorGraph& graph, const Values& initial,
|
||||||
|
size_t kMaxNrHypotheses, Values* result) {
|
||||||
HybridGaussianFactorGraph linearized = *graph.linearize(initial);
|
HybridGaussianFactorGraph linearized = *graph.linearize(initial);
|
||||||
// std::cout << "index: " << index << std::endl;
|
smoother.update(linearized, kMaxNrHypotheses);
|
||||||
smoother.update(linearized, maxNrHypotheses);
|
// throw if x0 not in hybridBayesNet_:
|
||||||
|
const KeySet& keys = smoother.hybridBayesNet().keys();
|
||||||
|
if (keys.find(X(0)) == keys.end()) {
|
||||||
|
throw std::runtime_error("x0 not in hybridBayesNet_");
|
||||||
|
}
|
||||||
graph.resize(0);
|
graph.resize(0);
|
||||||
HybridValues delta = smoother.hybridBayesNet().optimize();
|
// HybridValues delta = smoother.hybridBayesNet().optimize();
|
||||||
results->insert_or_assign(initial.retract(delta.continuous()));
|
// result->insert_or_assign(initial.retract(delta.continuous()));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
public:
|
||||||
int main(int argc, char* argv[]) {
|
/// Construct with filename of experiment to run
|
||||||
ifstream in(findExampleDataFile("T1_city10000_04.txt"));
|
explicit Experiment(const std::string& filename)
|
||||||
// ifstream in("../data/mh_T1_city10000_04.txt"); //Type #1 only
|
: filename_(filename), smoother_(0.99) {}
|
||||||
// ifstream in("../data/mh_T3b_city10000_10.txt"); //Type #3 only
|
|
||||||
// ifstream in("../data/mh_T1_T3_city10000_04.txt"); //Type #1 + Type #3
|
|
||||||
|
|
||||||
// ifstream in("../data/mh_All_city10000_groundtruth.txt");
|
/// @brief Run the main experiment with a given maxLoopCount.
|
||||||
|
void run(size_t maxLoopCount) {
|
||||||
|
// Prepare reading
|
||||||
|
ifstream in(filename_);
|
||||||
|
if (!in.is_open()) {
|
||||||
|
cerr << "Failed to open file: " << filename_ << endl;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
size_t discrete_count = 0, index = 0;
|
// Initialize local variables
|
||||||
|
size_t discreteCount = 0, index = 0;
|
||||||
|
size_t loopCount = 0;
|
||||||
|
|
||||||
std::list<double> time_list;
|
std::list<double> timeList;
|
||||||
|
|
||||||
HybridSmoother smoother(0.99);
|
|
||||||
|
|
||||||
HybridNonlinearFactorGraph graph;
|
|
||||||
|
|
||||||
Values init_values;
|
|
||||||
Values results;
|
|
||||||
|
|
||||||
size_t maxNrHypotheses = 3;
|
|
||||||
|
|
||||||
|
// Set up initial prior
|
||||||
double x = 0.0;
|
double x = 0.0;
|
||||||
double y = 0.0;
|
double y = 0.0;
|
||||||
double rad = 0.0;
|
double rad = 0.0;
|
||||||
|
|
||||||
Pose2 prior_pose(x, y, rad);
|
Pose2 priorPose(x, y, rad);
|
||||||
|
initial_.insert(X(0), priorPose);
|
||||||
|
graph_.push_back(PriorFactor<Pose2>(X(0), priorPose, kPriorNoiseModel));
|
||||||
|
|
||||||
init_values.insert(X(0), prior_pose);
|
// Initial update
|
||||||
|
clock_t beforeUpdate = clock();
|
||||||
|
smootherUpdate(smoother_, graph_, initial_, kMaxNrHypotheses, &result_);
|
||||||
|
clock_t afterUpdate = clock();
|
||||||
|
std::vector<std::pair<size_t, double>> smootherUpdateTimes;
|
||||||
|
smootherUpdateTimes.push_back({index, afterUpdate - beforeUpdate});
|
||||||
|
|
||||||
graph.push_back(PriorFactor<Pose2>(X(0), prior_pose, prior_noise_model));
|
// Start main loop
|
||||||
|
size_t keyS = 0, keyT = 0;
|
||||||
|
clock_t startTime = clock();
|
||||||
|
std::string line;
|
||||||
|
while (getline(in, line) && index < maxLoopCount) {
|
||||||
|
std::vector<std::string> parts;
|
||||||
|
split(parts, line, is_any_of(" "));
|
||||||
|
|
||||||
SmootherUpdate(smoother, graph, init_values, maxNrHypotheses, &results);
|
keyS = stoi(parts[1]);
|
||||||
|
keyT = stoi(parts[3]);
|
||||||
|
|
||||||
size_t key_s, key_t{0};
|
int numMeasurements = stoi(parts[5]);
|
||||||
|
std::vector<Pose2> poseArray(numMeasurements);
|
||||||
clock_t start_time = clock();
|
for (int i = 0; i < numMeasurements; ++i) {
|
||||||
std::string str;
|
|
||||||
while (getline(in, str) && index < max_loop_count) {
|
|
||||||
vector<string> parts;
|
|
||||||
split(parts, str, is_any_of(" "));
|
|
||||||
|
|
||||||
key_s = stoi(parts[1]);
|
|
||||||
key_t = stoi(parts[3]);
|
|
||||||
|
|
||||||
int num_measurements = stoi(parts[5]);
|
|
||||||
vector<Pose2> pose_array(num_measurements);
|
|
||||||
for (int i = 0; i < num_measurements; ++i) {
|
|
||||||
x = stod(parts[6 + 3 * i]);
|
x = stod(parts[6 + 3 * i]);
|
||||||
y = stod(parts[7 + 3 * i]);
|
y = stod(parts[7 + 3 * i]);
|
||||||
rad = stod(parts[8 + 3 * i]);
|
rad = stod(parts[8 + 3 * i]);
|
||||||
pose_array[i] = Pose2(x, y, rad);
|
poseArray[i] = Pose2(x, y, rad);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Flag to decide whether to run smoother update
|
||||||
|
bool doSmootherUpdate = false;
|
||||||
|
|
||||||
// Take the first one as the initial estimate
|
// Take the first one as the initial estimate
|
||||||
Pose2 odom_pose = pose_array[0];
|
Pose2 odomPose = poseArray[0];
|
||||||
if (key_s == key_t - 1) { // new X(key)
|
if (keyS == keyT - 1) {
|
||||||
init_values.insert(X(key_t), init_values.at<Pose2>(X(key_s)) * odom_pose);
|
// Odometry factor
|
||||||
|
if (numMeasurements > 1) {
|
||||||
} else { // loop
|
// Add hybrid factor
|
||||||
// index++;
|
DiscreteKey m(M(discreteCount), numMeasurements);
|
||||||
}
|
HybridNonlinearFactor mixtureFactor = hybridOdometryFactor(
|
||||||
|
numMeasurements, keyS, keyT, m, poseArray, kPoseNoiseModel);
|
||||||
// Flag if we should run smoother update
|
graph_.push_back(mixtureFactor);
|
||||||
bool smoother_update = false;
|
discreteCount++;
|
||||||
|
doSmootherUpdate = true;
|
||||||
if (num_measurements == 2) {
|
std::cout << "mixtureFactor: " << keyS << " " << keyT << std::endl;
|
||||||
// Add hybrid factor which considers both measurements
|
|
||||||
DiscreteKey m(M(discrete_count), num_measurements);
|
|
||||||
discrete_count++;
|
|
||||||
|
|
||||||
graph.push_back(DecisionTreeFactor(m, "0.6 0.4"));
|
|
||||||
|
|
||||||
auto f0 = std::make_shared<BetweenFactor<Pose2>>(
|
|
||||||
X(key_s), X(key_t), pose_array[0], pose_noise_model);
|
|
||||||
auto f1 = std::make_shared<BetweenFactor<Pose2>>(
|
|
||||||
X(key_s), X(key_t), pose_array[1], pose_noise_model);
|
|
||||||
std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
|
|
||||||
// HybridNonlinearFactor mixtureFactor(m, factors);
|
|
||||||
HybridNonlinearFactor mixtureFactor(m, {f0, f1});
|
|
||||||
graph.push_back(mixtureFactor);
|
|
||||||
|
|
||||||
smoother_update = true;
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
graph.add(BetweenFactor<Pose2>(X(key_s), X(key_t), odom_pose,
|
graph_.add(BetweenFactor<Pose2>(X(keyS), X(keyT), odomPose,
|
||||||
pose_noise_model));
|
kPoseNoiseModel));
|
||||||
|
}
|
||||||
|
// Insert next pose initial guess
|
||||||
|
initial_.insert(X(keyT), initial_.at<Pose2>(X(keyS)) * odomPose);
|
||||||
|
} else {
|
||||||
|
// Loop closure
|
||||||
|
HybridNonlinearFactor loopFactor =
|
||||||
|
hybridLoopClosureFactor(loopCount, keyS, keyT, odomPose);
|
||||||
|
// print loop closure event keys:
|
||||||
|
std::cout << "Loop closure: " << keyS << " " << keyT << std::endl;
|
||||||
|
graph_.add(loopFactor);
|
||||||
|
doSmootherUpdate = true;
|
||||||
|
loopCount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (smoother_update) {
|
if (doSmootherUpdate) {
|
||||||
SmootherUpdate(smoother, graph, init_values, maxNrHypotheses, &results);
|
gttic_(SmootherUpdate);
|
||||||
|
beforeUpdate = clock();
|
||||||
|
smootherUpdate(smoother_, graph_, initial_, kMaxNrHypotheses, &result_);
|
||||||
|
afterUpdate = clock();
|
||||||
|
smootherUpdateTimes.push_back({index, afterUpdate - beforeUpdate});
|
||||||
|
gttoc_(SmootherUpdate);
|
||||||
|
doSmootherUpdate = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Print loop index and time taken in processor clock ticks
|
// Record timing for odometry edges only
|
||||||
// if (index % 50 == 0 && key_s != key_t - 1) {
|
if (keyS == keyT - 1) {
|
||||||
|
clock_t curTime = clock();
|
||||||
|
timeList.push_back(curTime - startTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print some status every 100 steps
|
||||||
if (index % 100 == 0) {
|
if (index % 100 == 0) {
|
||||||
std::cout << "index: " << index << std::endl;
|
std::cout << "Index: " << index << std::endl;
|
||||||
std::cout << "acc_time: " << time_list.back() / CLOCKS_PER_SEC << std::endl;
|
if (!timeList.empty()) {
|
||||||
|
std::cout << "Acc_time: " << timeList.back() / CLOCKS_PER_SEC
|
||||||
|
<< " seconds" << std::endl;
|
||||||
// delta.discrete().print("The Discrete Assignment");
|
// delta.discrete().print("The Discrete Assignment");
|
||||||
tictoc_finishedIteration_();
|
tictoc_finishedIteration_();
|
||||||
tictoc_print_();
|
tictoc_print_();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (key_s == key_t - 1) {
|
|
||||||
clock_t cur_time = clock();
|
|
||||||
time_list.push_back(cur_time - start_time);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
index += 1;
|
index++;
|
||||||
}
|
}
|
||||||
|
|
||||||
SmootherUpdate(smoother, graph, init_values, maxNrHypotheses, &results);
|
// Final update
|
||||||
|
beforeUpdate = clock();
|
||||||
|
smootherUpdate(smoother_, graph_, initial_, kMaxNrHypotheses, &result_);
|
||||||
|
afterUpdate = clock();
|
||||||
|
smootherUpdateTimes.push_back({index, afterUpdate - beforeUpdate});
|
||||||
|
|
||||||
clock_t end_time = clock();
|
// Final optimize
|
||||||
clock_t total_time = end_time - start_time;
|
gttic_(HybridSmootherOptimize);
|
||||||
cout << "total_time: " << total_time / CLOCKS_PER_SEC << " seconds" << endl;
|
HybridValues delta = smoother_.optimize();
|
||||||
|
gttoc_(HybridSmootherOptimize);
|
||||||
|
|
||||||
/// Write results to file
|
result_.insert_or_assign(initial_.retract(delta.continuous()));
|
||||||
write_results(results, (key_t + 1), "HybridISAM_city10000.txt");
|
|
||||||
|
|
||||||
ofstream outfile_time;
|
std::cout << "Final error: " << smoother_.hybridBayesNet().error(delta)
|
||||||
std::string time_file_name = "HybridISAM_city10000_time.txt";
|
<< std::endl;
|
||||||
outfile_time.open(time_file_name);
|
|
||||||
for (auto acc_time : time_list) {
|
clock_t endTime = clock();
|
||||||
outfile_time << acc_time << std::endl;
|
clock_t totalTime = endTime - startTime;
|
||||||
|
std::cout << "Total time: " << totalTime / CLOCKS_PER_SEC << " seconds"
|
||||||
|
<< std::endl;
|
||||||
|
|
||||||
|
// Write results to file
|
||||||
|
writeResult(result_, keyT + 1, "Hybrid_City10000.txt");
|
||||||
|
|
||||||
|
// TODO Write to file
|
||||||
|
// for (size_t i = 0; i < smoother_update_times.size(); i++) {
|
||||||
|
// auto p = smoother_update_times.at(i);
|
||||||
|
// std::cout << p.first << ", " << p.second / CLOCKS_PER_SEC <<
|
||||||
|
// std::endl;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// Write timing info to file
|
||||||
|
ofstream outfileTime;
|
||||||
|
std::string timeFileName = "Hybrid_City10000_time.txt";
|
||||||
|
outfileTime.open(timeFileName);
|
||||||
|
for (auto accTime : timeList) {
|
||||||
|
outfileTime << accTime << std::endl;
|
||||||
}
|
}
|
||||||
outfile_time.close();
|
outfileTime.close();
|
||||||
cout << "output " << time_file_name << " file." << endl;
|
std::cout << "Output " << timeFileName << " file." << std::endl;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
int main() {
|
||||||
|
Experiment experiment(findExampleDataFile("T1_city10000_04.txt"));
|
||||||
|
// Experiment experiment("../data/mh_T1_city10000_04.txt"); //Type #1 only
|
||||||
|
// Experiment experiment("../data/mh_T3b_city10000_10.txt"); //Type #3 only
|
||||||
|
// Experiment experiment("../data/mh_T1_T3_city10000_04.txt"); //Type #1 +
|
||||||
|
// Type #3
|
||||||
|
|
||||||
|
// Run the experiment
|
||||||
|
experiment.run(kMaxLoopCount);
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
|
@ -165,7 +165,8 @@ int main(int argc, char* argv[]) {
|
||||||
// Print loop index and time taken in processor clock ticks
|
// Print loop index and time taken in processor clock ticks
|
||||||
if (index % 50 == 0 && key_s != key_t - 1) {
|
if (index % 50 == 0 && key_s != key_t - 1) {
|
||||||
std::cout << "index: " << index << std::endl;
|
std::cout << "index: " << index << std::endl;
|
||||||
std::cout << "acc_time: " << time_list.back() << std::endl;
|
std::cout << "acc_time: " << time_list.back() / CLOCKS_PER_SEC
|
||||||
|
<< std::endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (key_s == key_t - 1) {
|
if (key_s == key_t - 1) {
|
||||||
|
@ -190,7 +191,7 @@ int main(int argc, char* argv[]) {
|
||||||
|
|
||||||
clock_t end_time = clock();
|
clock_t end_time = clock();
|
||||||
clock_t total_time = end_time - start_time;
|
clock_t total_time = end_time - start_time;
|
||||||
cout << "total_time: " << total_time << endl;
|
cout << "total_time: " << total_time / CLOCKS_PER_SEC << endl;
|
||||||
|
|
||||||
/// Write results to file
|
/// Write results to file
|
||||||
write_results(results, (key_t + 1));
|
write_results(results, (key_t + 1));
|
||||||
|
|
|
@ -164,6 +164,12 @@ namespace gtsam {
|
||||||
virtual DiscreteFactor::shared_ptr multiply(
|
virtual DiscreteFactor::shared_ptr multiply(
|
||||||
const DiscreteFactor::shared_ptr& f) const override;
|
const DiscreteFactor::shared_ptr& f) const override;
|
||||||
|
|
||||||
|
/// multiply with a scalar
|
||||||
|
DiscreteFactor::shared_ptr operator*(double s) const override {
|
||||||
|
return std::make_shared<DecisionTreeFactor>(
|
||||||
|
apply([s](const double& a) { return Ring::mul(a, s); }));
|
||||||
|
}
|
||||||
|
|
||||||
/// multiply two factors
|
/// multiply two factors
|
||||||
DecisionTreeFactor operator*(const DecisionTreeFactor& f) const override {
|
DecisionTreeFactor operator*(const DecisionTreeFactor& f) const override {
|
||||||
return apply(f, Ring::mul);
|
return apply(f, Ring::mul);
|
||||||
|
@ -201,6 +207,9 @@ namespace gtsam {
|
||||||
return combine(keys, Ring::add);
|
return combine(keys, Ring::add);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Find the maximum value in the factor.
|
||||||
|
double max() const override { return ADT::max(); };
|
||||||
|
|
||||||
/// Create new factor by maximizing over all values with the same separator.
|
/// Create new factor by maximizing over all values with the same separator.
|
||||||
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override {
|
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override {
|
||||||
return combine(nrFrontals, Ring::max);
|
return combine(nrFrontals, Ring::max);
|
||||||
|
|
|
@ -89,7 +89,7 @@ DiscreteBayesNet DiscreteBayesNet::prune(
|
||||||
DiscreteValues deadModesValues;
|
DiscreteValues deadModesValues;
|
||||||
// If we have a dead mode threshold and discrete variables left after pruning,
|
// If we have a dead mode threshold and discrete variables left after pruning,
|
||||||
// then we run dead mode removal.
|
// then we run dead mode removal.
|
||||||
if (marginalThreshold.has_value() && pruned.keys().size() > 0) {
|
if (marginalThreshold && pruned.keys().size() > 0) {
|
||||||
DiscreteMarginals marginals(DiscreteFactorGraph{pruned});
|
DiscreteMarginals marginals(DiscreteFactorGraph{pruned});
|
||||||
for (auto dkey : pruned.discreteKeys()) {
|
for (auto dkey : pruned.discreteKeys()) {
|
||||||
const Vector probabilities = marginals.marginalProbabilities(dkey);
|
const Vector probabilities = marginals.marginalProbabilities(dkey);
|
||||||
|
|
|
@ -73,10 +73,7 @@ AlgebraicDecisionTree<Key> DiscreteFactor::errorTree() const {
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
DiscreteFactor::shared_ptr DiscreteFactor::scale() const {
|
DiscreteFactor::shared_ptr DiscreteFactor::scale() const {
|
||||||
// Max over all the potentials by pretending all keys are frontal:
|
return this->operator*(1.0 / max());
|
||||||
shared_ptr denominator = this->max(this->size());
|
|
||||||
// Normalize the product factor to prevent underflow.
|
|
||||||
return this->operator/(denominator);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
@ -126,6 +126,9 @@ class GTSAM_EXPORT DiscreteFactor : public Factor {
|
||||||
/// Compute error for each assignment and return as a tree
|
/// Compute error for each assignment and return as a tree
|
||||||
virtual AlgebraicDecisionTree<Key> errorTree() const;
|
virtual AlgebraicDecisionTree<Key> errorTree() const;
|
||||||
|
|
||||||
|
/// Multiply with a scalar
|
||||||
|
virtual DiscreteFactor::shared_ptr operator*(double s) const = 0;
|
||||||
|
|
||||||
/// Multiply in a DecisionTreeFactor and return the result as
|
/// Multiply in a DecisionTreeFactor and return the result as
|
||||||
/// DecisionTreeFactor
|
/// DecisionTreeFactor
|
||||||
virtual DecisionTreeFactor operator*(const DecisionTreeFactor&) const = 0;
|
virtual DecisionTreeFactor operator*(const DecisionTreeFactor&) const = 0;
|
||||||
|
@ -152,6 +155,9 @@ class GTSAM_EXPORT DiscreteFactor : public Factor {
|
||||||
/// Create new factor by summing all values with the same separator values
|
/// Create new factor by summing all values with the same separator values
|
||||||
virtual DiscreteFactor::shared_ptr sum(const Ordering& keys) const = 0;
|
virtual DiscreteFactor::shared_ptr sum(const Ordering& keys) const = 0;
|
||||||
|
|
||||||
|
/// Find the maximum value in the factor.
|
||||||
|
virtual double max() const = 0;
|
||||||
|
|
||||||
/// Create new factor by maximizing over all values with the same separator.
|
/// Create new factor by maximizing over all values with the same separator.
|
||||||
virtual DiscreteFactor::shared_ptr max(size_t nrFrontals) const = 0;
|
virtual DiscreteFactor::shared_ptr max(size_t nrFrontals) const = 0;
|
||||||
|
|
||||||
|
|
|
@ -65,15 +65,10 @@ namespace gtsam {
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
DiscreteFactor::shared_ptr DiscreteFactorGraph::product() const {
|
DiscreteFactor::shared_ptr DiscreteFactorGraph::product() const {
|
||||||
DiscreteFactor::shared_ptr result;
|
DiscreteFactor::shared_ptr result = nullptr;
|
||||||
for (auto it = this->begin(); it != this->end(); ++it) {
|
for (const auto& factor : *this) {
|
||||||
if (*it) {
|
if (factor) {
|
||||||
if (result) {
|
result = result ? result->multiply(factor) : factor;
|
||||||
result = result->multiply(*it);
|
|
||||||
} else {
|
|
||||||
// Assign to the first non-null factor
|
|
||||||
result = *it;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
|
@ -120,15 +115,7 @@ namespace gtsam {
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
DiscreteFactor::shared_ptr DiscreteFactorGraph::scaledProduct() const {
|
DiscreteFactor::shared_ptr DiscreteFactorGraph::scaledProduct() const {
|
||||||
// PRODUCT: multiply all factors
|
return product()->scale();
|
||||||
gttic(product);
|
|
||||||
DiscreteFactor::shared_ptr product = this->product();
|
|
||||||
gttoc(product);
|
|
||||||
|
|
||||||
// Normalize the product factor to prevent underflow.
|
|
||||||
product = product->scale();
|
|
||||||
|
|
||||||
return product;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
|
@ -216,7 +203,7 @@ namespace gtsam {
|
||||||
const Ordering& frontalKeys) {
|
const Ordering& frontalKeys) {
|
||||||
gttic(product);
|
gttic(product);
|
||||||
// `product` is scaled later to prevent underflow.
|
// `product` is scaled later to prevent underflow.
|
||||||
DiscreteFactor::shared_ptr product = factors.product();
|
DiscreteFactor::shared_ptr product = factors.scaledProduct();
|
||||||
gttoc(product);
|
gttoc(product);
|
||||||
|
|
||||||
// sum out frontals, this is the factor on the separator
|
// sum out frontals, this is the factor on the separator
|
||||||
|
@ -224,16 +211,6 @@ namespace gtsam {
|
||||||
DiscreteFactor::shared_ptr sum = product->sum(frontalKeys);
|
DiscreteFactor::shared_ptr sum = product->sum(frontalKeys);
|
||||||
gttoc(sum);
|
gttoc(sum);
|
||||||
|
|
||||||
// Normalize/scale to prevent underflow.
|
|
||||||
// We divide both `product` and `sum` by `max(sum)`
|
|
||||||
// since it is faster to compute and when the conditional
|
|
||||||
// is formed by `product/sum`, the scaling term cancels out.
|
|
||||||
gttic(scale);
|
|
||||||
DiscreteFactor::shared_ptr denominator = sum->max(sum->size());
|
|
||||||
product = product->operator/(denominator);
|
|
||||||
sum = sum->operator/(denominator);
|
|
||||||
gttoc(scale);
|
|
||||||
|
|
||||||
// Ordering keys for the conditional so that frontalKeys are really in front
|
// Ordering keys for the conditional so that frontalKeys are really in front
|
||||||
Ordering orderedKeys;
|
Ordering orderedKeys;
|
||||||
orderedKeys.insert(orderedKeys.end(), frontalKeys.begin(),
|
orderedKeys.insert(orderedKeys.end(), frontalKeys.begin(),
|
||||||
|
|
|
@ -110,6 +110,11 @@ DiscreteFactor::shared_ptr TableDistribution::max(const Ordering& keys) const {
|
||||||
return table_.max(keys);
|
return table_.max(keys);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ****************************************************************************/
|
||||||
|
DiscreteFactor::shared_ptr TableDistribution::operator*(double s) const {
|
||||||
|
return table_ * s;
|
||||||
|
}
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
DiscreteFactor::shared_ptr TableDistribution::operator/(
|
DiscreteFactor::shared_ptr TableDistribution::operator/(
|
||||||
const DiscreteFactor::shared_ptr& f) const {
|
const DiscreteFactor::shared_ptr& f) const {
|
||||||
|
|
|
@ -116,12 +116,19 @@ class GTSAM_EXPORT TableDistribution : public DiscreteConditional {
|
||||||
/// Create new factor by summing all values with the same separator values
|
/// Create new factor by summing all values with the same separator values
|
||||||
DiscreteFactor::shared_ptr sum(const Ordering& keys) const override;
|
DiscreteFactor::shared_ptr sum(const Ordering& keys) const override;
|
||||||
|
|
||||||
|
/// Find the maximum value in the factor.
|
||||||
|
double max() const override { return table_.max(); }
|
||||||
|
|
||||||
/// Create new factor by maximizing over all values with the same separator.
|
/// Create new factor by maximizing over all values with the same separator.
|
||||||
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override;
|
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override;
|
||||||
|
|
||||||
/// Create new factor by maximizing over all values with the same separator.
|
/// Create new factor by maximizing over all values with the same separator.
|
||||||
DiscreteFactor::shared_ptr max(const Ordering& keys) const override;
|
DiscreteFactor::shared_ptr max(const Ordering& keys) const override;
|
||||||
|
|
||||||
|
|
||||||
|
/// Multiply by scalar s
|
||||||
|
DiscreteFactor::shared_ptr operator*(double s) const override;
|
||||||
|
|
||||||
/// divide by DiscreteFactor::shared_ptr f (safely)
|
/// divide by DiscreteFactor::shared_ptr f (safely)
|
||||||
DiscreteFactor::shared_ptr operator/(
|
DiscreteFactor::shared_ptr operator/(
|
||||||
const DiscreteFactor::shared_ptr& f) const override;
|
const DiscreteFactor::shared_ptr& f) const override;
|
||||||
|
|
|
@ -389,6 +389,36 @@ void TableFactor::print(const string& s, const KeyFormatter& formatter) const {
|
||||||
cout << "number of nnzs: " << sparse_table_.nonZeros() << endl;
|
cout << "number of nnzs: " << sparse_table_.nonZeros() << endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************ */
|
||||||
|
DiscreteFactor::shared_ptr TableFactor::sum(size_t nrFrontals) const {
|
||||||
|
return combine(nrFrontals, Ring::add);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************ */
|
||||||
|
DiscreteFactor::shared_ptr TableFactor::sum(const Ordering& keys) const {
|
||||||
|
return combine(keys, Ring::add);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************ */
|
||||||
|
double TableFactor::max() const {
|
||||||
|
double max_value = std::numeric_limits<double>::lowest();
|
||||||
|
for (Eigen::SparseVector<double>::InnerIterator it(sparse_table_); it; ++it) {
|
||||||
|
max_value = std::max(max_value, it.value());
|
||||||
|
}
|
||||||
|
return max_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************ */
|
||||||
|
DiscreteFactor::shared_ptr TableFactor::max(size_t nrFrontals) const {
|
||||||
|
return combine(nrFrontals, Ring::max);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************ */
|
||||||
|
DiscreteFactor::shared_ptr TableFactor::max(const Ordering& keys) const {
|
||||||
|
return combine(keys, Ring::max);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
TableFactor TableFactor::apply(Unary op) const {
|
TableFactor TableFactor::apply(Unary op) const {
|
||||||
// Initialize new factor.
|
// Initialize new factor.
|
||||||
|
|
|
@ -171,6 +171,12 @@ class GTSAM_EXPORT TableFactor : public DiscreteFactor {
|
||||||
/// Calculate error for DiscreteValues `x`, is -log(probability).
|
/// Calculate error for DiscreteValues `x`, is -log(probability).
|
||||||
double error(const DiscreteValues& values) const override;
|
double error(const DiscreteValues& values) const override;
|
||||||
|
|
||||||
|
/// multiply with a scalar
|
||||||
|
DiscreteFactor::shared_ptr operator*(double s) const override {
|
||||||
|
return std::make_shared<TableFactor>(
|
||||||
|
apply([s](const double& a) { return Ring::mul(a, s); }));
|
||||||
|
}
|
||||||
|
|
||||||
/// multiply two TableFactors
|
/// multiply two TableFactors
|
||||||
TableFactor operator*(const TableFactor& f) const {
|
TableFactor operator*(const TableFactor& f) const {
|
||||||
return apply(f, Ring::mul);
|
return apply(f, Ring::mul);
|
||||||
|
@ -215,24 +221,19 @@ class GTSAM_EXPORT TableFactor : public DiscreteFactor {
|
||||||
DiscreteKeys parent_keys) const;
|
DiscreteKeys parent_keys) const;
|
||||||
|
|
||||||
/// Create new factor by summing all values with the same separator values
|
/// Create new factor by summing all values with the same separator values
|
||||||
DiscreteFactor::shared_ptr sum(size_t nrFrontals) const override {
|
DiscreteFactor::shared_ptr sum(size_t nrFrontals) const override;
|
||||||
return combine(nrFrontals, Ring::add);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create new factor by summing all values with the same separator values
|
/// Create new factor by summing all values with the same separator values
|
||||||
DiscreteFactor::shared_ptr sum(const Ordering& keys) const override {
|
DiscreteFactor::shared_ptr sum(const Ordering& keys) const override;
|
||||||
return combine(keys, Ring::add);
|
|
||||||
}
|
/// Find the maximum value in the factor.
|
||||||
|
double max() const override;
|
||||||
|
|
||||||
/// Create new factor by maximizing over all values with the same separator.
|
/// Create new factor by maximizing over all values with the same separator.
|
||||||
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override {
|
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override;
|
||||||
return combine(nrFrontals, Ring::max);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create new factor by maximizing over all values with the same separator.
|
/// Create new factor by maximizing over all values with the same separator.
|
||||||
DiscreteFactor::shared_ptr max(const Ordering& keys) const override {
|
DiscreteFactor::shared_ptr max(const Ordering& keys) const override;
|
||||||
return combine(keys, Ring::max);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// @}
|
/// @}
|
||||||
/// @name Advanced Interface
|
/// @name Advanced Interface
|
||||||
|
|
|
@ -191,11 +191,19 @@ size_t HybridGaussianConditional::nrComponents() const {
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
GaussianConditional::shared_ptr HybridGaussianConditional::choose(
|
GaussianConditional::shared_ptr HybridGaussianConditional::choose(
|
||||||
const DiscreteValues &discreteValues) const {
|
const DiscreteValues &discreteValues) const {
|
||||||
|
try {
|
||||||
auto &[factor, _] = factors()(discreteValues);
|
auto &[factor, _] = factors()(discreteValues);
|
||||||
if (!factor) return nullptr;
|
if (!factor) return nullptr;
|
||||||
|
|
||||||
auto conditional = checkConditional(factor);
|
auto conditional = checkConditional(factor);
|
||||||
return conditional;
|
return conditional;
|
||||||
|
} catch (const std::out_of_range &e) {
|
||||||
|
GTSAM_PRINT(*this);
|
||||||
|
GTSAM_PRINT(discreteValues);
|
||||||
|
throw std::runtime_error(
|
||||||
|
"HybridGaussianConditional::choose: discreteValues does not contain "
|
||||||
|
"all discrete parents.");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* *******************************************************************************/
|
/* *******************************************************************************/
|
||||||
|
@ -313,18 +321,21 @@ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune(
|
||||||
std::set_difference(theirs.begin(), theirs.end(), mine.begin(), mine.end(),
|
std::set_difference(theirs.begin(), theirs.end(), mine.begin(), mine.end(),
|
||||||
std::back_inserter(diff));
|
std::back_inserter(diff));
|
||||||
|
|
||||||
// Find maximum probability value for every combination of our keys.
|
// Find maximum probability value for every combination of *our* keys.
|
||||||
Ordering keys(diff);
|
Ordering ordering(diff);
|
||||||
auto max = discreteProbs.max(keys);
|
auto max = discreteProbs.max(ordering);
|
||||||
|
|
||||||
// Check the max value for every combination of our keys.
|
// Check the max value for every combination of our keys.
|
||||||
// If the max value is 0.0, we can prune the corresponding conditional.
|
// If the max value is 0.0, we can prune the corresponding conditional.
|
||||||
|
bool allPruned = true;
|
||||||
auto pruner =
|
auto pruner =
|
||||||
[&](const Assignment<Key> &choices,
|
[&](const Assignment<Key> &choices,
|
||||||
const GaussianFactorValuePair &pair) -> GaussianFactorValuePair {
|
const GaussianFactorValuePair &pair) -> GaussianFactorValuePair {
|
||||||
if (max->evaluate(choices) == 0.0)
|
// If this choice is zero probability or Gaussian is null, return infinity
|
||||||
|
if (!pair.first || max->evaluate(choices) == 0.0) {
|
||||||
return {nullptr, std::numeric_limits<double>::infinity()};
|
return {nullptr, std::numeric_limits<double>::infinity()};
|
||||||
else {
|
} else {
|
||||||
|
allPruned = false;
|
||||||
// Add negLogConstant_ back so that the minimum negLogConstant in the
|
// Add negLogConstant_ back so that the minimum negLogConstant in the
|
||||||
// HybridGaussianConditional is set correctly.
|
// HybridGaussianConditional is set correctly.
|
||||||
return {pair.first, pair.second + negLogConstant_};
|
return {pair.first, pair.second + negLogConstant_};
|
||||||
|
@ -332,6 +343,7 @@ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune(
|
||||||
};
|
};
|
||||||
|
|
||||||
FactorValuePairs prunedConditionals = factors().apply(pruner);
|
FactorValuePairs prunedConditionals = factors().apply(pruner);
|
||||||
|
if (allPruned) return nullptr;
|
||||||
return std::make_shared<HybridGaussianConditional>(discreteKeys(),
|
return std::make_shared<HybridGaussianConditional>(discreteKeys(),
|
||||||
prunedConditionals, true);
|
prunedConditionals, true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,6 +50,8 @@
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
#define GTSAM_HYBRID_WITH_TABLEFACTOR 1
|
||||||
|
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/// Specialize EliminateableFactorGraph for HybridGaussianFactorGraph:
|
/// Specialize EliminateableFactorGraph for HybridGaussianFactorGraph:
|
||||||
|
@ -253,7 +255,11 @@ static DiscreteFactor::shared_ptr DiscreteFactorFromErrors(
|
||||||
double min_log = errors.min();
|
double min_log = errors.min();
|
||||||
AlgebraicDecisionTree<Key> potentials(
|
AlgebraicDecisionTree<Key> potentials(
|
||||||
errors, [&min_log](const double x) { return exp(-(x - min_log)); });
|
errors, [&min_log](const double x) { return exp(-(x - min_log)); });
|
||||||
|
#if GTSAM_HYBRID_WITH_TABLEFACTOR
|
||||||
return std::make_shared<TableFactor>(discreteKeys, potentials);
|
return std::make_shared<TableFactor>(discreteKeys, potentials);
|
||||||
|
#else
|
||||||
|
return std::make_shared<DecisionTreeFactor>(discreteKeys, potentials);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************ */
|
/* ************************************************************************ */
|
||||||
|
@ -290,9 +296,13 @@ static DiscreteFactorGraph CollectDiscreteFactors(
|
||||||
/// Get the underlying TableFactor
|
/// Get the underlying TableFactor
|
||||||
dfg.push_back(dtc->table());
|
dfg.push_back(dtc->table());
|
||||||
} else {
|
} else {
|
||||||
|
#if GTSAM_HYBRID_WITH_TABLEFACTOR
|
||||||
// Convert DiscreteConditional to TableFactor
|
// Convert DiscreteConditional to TableFactor
|
||||||
auto tdc = std::make_shared<TableFactor>(*dc);
|
auto tdc = std::make_shared<TableFactor>(*dc);
|
||||||
dfg.push_back(tdc);
|
dfg.push_back(tdc);
|
||||||
|
#else
|
||||||
|
dfg.push_back(dc);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
#if GTSAM_HYBRID_TIMING
|
#if GTSAM_HYBRID_TIMING
|
||||||
gttoc_(ConvertConditionalToTableFactor);
|
gttoc_(ConvertConditionalToTableFactor);
|
||||||
|
@ -309,11 +319,18 @@ static DiscreteFactorGraph CollectDiscreteFactors(
|
||||||
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
|
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
|
||||||
discreteElimination(const HybridGaussianFactorGraph &factors,
|
discreteElimination(const HybridGaussianFactorGraph &factors,
|
||||||
const Ordering &frontalKeys) {
|
const Ordering &frontalKeys) {
|
||||||
|
#if GTSAM_HYBRID_TIMING
|
||||||
|
gttic_(CollectDiscreteFactors);
|
||||||
|
#endif
|
||||||
DiscreteFactorGraph dfg = CollectDiscreteFactors(factors);
|
DiscreteFactorGraph dfg = CollectDiscreteFactors(factors);
|
||||||
|
#if GTSAM_HYBRID_TIMING
|
||||||
|
gttoc_(CollectDiscreteFactors);
|
||||||
|
#endif
|
||||||
|
|
||||||
#if GTSAM_HYBRID_TIMING
|
#if GTSAM_HYBRID_TIMING
|
||||||
gttic_(EliminateDiscrete);
|
gttic_(EliminateDiscrete);
|
||||||
#endif
|
#endif
|
||||||
|
#if GTSAM_HYBRID_WITH_TABLEFACTOR
|
||||||
// Check if separator is empty.
|
// Check if separator is empty.
|
||||||
// This is the same as checking if the number of frontal variables
|
// This is the same as checking if the number of frontal variables
|
||||||
// is the same as the number of variables in the DiscreteFactorGraph.
|
// is the same as the number of variables in the DiscreteFactorGraph.
|
||||||
|
@ -323,9 +340,6 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
|
||||||
// Get product factor
|
// Get product factor
|
||||||
DiscreteFactor::shared_ptr product = dfg.scaledProduct();
|
DiscreteFactor::shared_ptr product = dfg.scaledProduct();
|
||||||
|
|
||||||
#if GTSAM_HYBRID_TIMING
|
|
||||||
gttic_(EliminateDiscreteFormDiscreteConditional);
|
|
||||||
#endif
|
|
||||||
// Check type of product, and get as TableFactor for efficiency.
|
// Check type of product, and get as TableFactor for efficiency.
|
||||||
// Use object instead of pointer since we need it
|
// Use object instead of pointer since we need it
|
||||||
// for the TableDistribution constructor.
|
// for the TableDistribution constructor.
|
||||||
|
@ -337,19 +351,18 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
|
||||||
}
|
}
|
||||||
auto conditional = std::make_shared<TableDistribution>(p);
|
auto conditional = std::make_shared<TableDistribution>(p);
|
||||||
|
|
||||||
#if GTSAM_HYBRID_TIMING
|
|
||||||
gttoc_(EliminateDiscreteFormDiscreteConditional);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
DiscreteFactor::shared_ptr sum = p.sum(frontalKeys);
|
DiscreteFactor::shared_ptr sum = p.sum(frontalKeys);
|
||||||
|
|
||||||
return {std::make_shared<HybridConditional>(conditional), sum};
|
return {std::make_shared<HybridConditional>(conditional), sum};
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
#endif
|
||||||
// Perform sum-product.
|
// Perform sum-product.
|
||||||
auto result = EliminateDiscrete(dfg, frontalKeys);
|
auto result = EliminateDiscrete(dfg, frontalKeys);
|
||||||
return {std::make_shared<HybridConditional>(result.first), result.second};
|
return {std::make_shared<HybridConditional>(result.first), result.second};
|
||||||
|
#if GTSAM_HYBRID_WITH_TABLEFACTOR
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
#if GTSAM_HYBRID_TIMING
|
#if GTSAM_HYBRID_TIMING
|
||||||
gttoc_(EliminateDiscrete);
|
gttoc_(EliminateDiscrete);
|
||||||
#endif
|
#endif
|
||||||
|
@ -411,8 +424,14 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(
|
||||||
throw std::runtime_error("createHybridGaussianFactors has mixed NULLs");
|
throw std::runtime_error("createHybridGaussianFactors has mixed NULLs");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
#if GTSAM_HYBRID_TIMING
|
||||||
|
gttic_(HybridCreateGaussianFactor);
|
||||||
|
#endif
|
||||||
DecisionTree<Key, GaussianFactorValuePair> newFactors(eliminationResults,
|
DecisionTree<Key, GaussianFactorValuePair> newFactors(eliminationResults,
|
||||||
correct);
|
correct);
|
||||||
|
#if GTSAM_HYBRID_TIMING
|
||||||
|
gttoc_(HybridCreateGaussianFactor);
|
||||||
|
#endif
|
||||||
|
|
||||||
return std::make_shared<HybridGaussianFactor>(discreteSeparator, newFactors);
|
return std::make_shared<HybridGaussianFactor>(discreteSeparator, newFactors);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,55 +21,70 @@
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <unordered_set>
|
#include <unordered_set>
|
||||||
|
|
||||||
|
// #define DEBUG_SMOOTHER
|
||||||
namespace gtsam {
|
namespace gtsam {
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
Ordering HybridSmoother::getOrdering(const HybridGaussianFactorGraph &factors,
|
Ordering HybridSmoother::getOrdering(const HybridGaussianFactorGraph &factors,
|
||||||
const KeySet &newFactorKeys) {
|
const KeySet &lastKeysToEliminate) {
|
||||||
// Get all the discrete keys from the factors
|
// Get all the discrete keys from the factors
|
||||||
KeySet allDiscrete = factors.discreteKeySet();
|
KeySet allDiscrete = factors.discreteKeySet();
|
||||||
|
|
||||||
// Create KeyVector with continuous keys followed by discrete keys.
|
// Create KeyVector with continuous keys followed by discrete keys.
|
||||||
KeyVector newKeysDiscreteLast;
|
KeyVector lastKeys;
|
||||||
|
|
||||||
// Insert continuous keys first.
|
// Insert continuous keys first.
|
||||||
for (auto &k : newFactorKeys) {
|
for (auto &k : lastKeysToEliminate) {
|
||||||
if (!allDiscrete.exists(k)) {
|
if (!allDiscrete.exists(k)) {
|
||||||
newKeysDiscreteLast.push_back(k);
|
lastKeys.push_back(k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert discrete keys at the end
|
// Insert discrete keys at the end
|
||||||
std::copy(allDiscrete.begin(), allDiscrete.end(),
|
std::copy(allDiscrete.begin(), allDiscrete.end(),
|
||||||
std::back_inserter(newKeysDiscreteLast));
|
std::back_inserter(lastKeys));
|
||||||
|
|
||||||
const VariableIndex index(factors);
|
|
||||||
|
|
||||||
// Get an ordering where the new keys are eliminated last
|
// Get an ordering where the new keys are eliminated last
|
||||||
Ordering ordering = Ordering::ColamdConstrainedLast(
|
Ordering ordering = Ordering::ColamdConstrainedLast(
|
||||||
index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()),
|
factors, KeyVector(lastKeys.begin(), lastKeys.end()), true);
|
||||||
true);
|
|
||||||
return ordering;
|
return ordering;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
void HybridSmoother::update(const HybridGaussianFactorGraph &newFactors,
|
||||||
std::optional<size_t> maxNrLeaves,
|
std::optional<size_t> maxNrLeaves,
|
||||||
const std::optional<Ordering> given_ordering) {
|
const std::optional<Ordering> given_ordering) {
|
||||||
|
const KeySet originalNewFactorKeys = newFactors.keys();
|
||||||
|
#ifdef DEBUG_SMOOTHER
|
||||||
|
std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size()
|
||||||
|
<< std::endl;
|
||||||
|
std::cout << "newFactors size: " << newFactors.size() << std::endl;
|
||||||
|
#endif
|
||||||
HybridGaussianFactorGraph updatedGraph;
|
HybridGaussianFactorGraph updatedGraph;
|
||||||
// Add the necessary conditionals from the previous timestep(s).
|
// Add the necessary conditionals from the previous timestep(s).
|
||||||
std::tie(updatedGraph, hybridBayesNet_) =
|
std::tie(updatedGraph, hybridBayesNet_) =
|
||||||
addConditionals(graph, hybridBayesNet_);
|
addConditionals(newFactors, hybridBayesNet_);
|
||||||
|
#ifdef DEBUG_SMOOTHER
|
||||||
|
// print size of newFactors, updatedGraph, hybridBayesNet_
|
||||||
|
std::cout << "updatedGraph size: " << updatedGraph.size() << std::endl;
|
||||||
|
std::cout << "hybridBayesNet_ size after: " << hybridBayesNet_.size()
|
||||||
|
<< std::endl;
|
||||||
|
std::cout << "total size: " << updatedGraph.size() + hybridBayesNet_.size()
|
||||||
|
<< std::endl;
|
||||||
|
#endif
|
||||||
|
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
// If no ordering provided, then we compute one
|
// If no ordering provided, then we compute one
|
||||||
if (!given_ordering.has_value()) {
|
if (!given_ordering.has_value()) {
|
||||||
// Get the keys from the new factors
|
// Get the keys from the new factors
|
||||||
const KeySet newFactorKeys = graph.keys();
|
KeySet continuousKeysToInclude; // Scheme 1: empty, 15sec/2000, 64sec/3000 (69s without TF)
|
||||||
|
// continuousKeysToInclude = newFactors.keys(); // Scheme 2: all, 8sec/2000, 160sec/3000
|
||||||
|
// continuousKeysToInclude = updatedGraph.keys(); // Scheme 3: all, stopped after 80sec/2000
|
||||||
|
|
||||||
// Since updatedGraph now has all the connected conditionals,
|
// Since updatedGraph now has all the connected conditionals,
|
||||||
// we can get the correct ordering.
|
// we can get the correct ordering.
|
||||||
ordering = this->getOrdering(updatedGraph, newFactorKeys);
|
ordering = this->getOrdering(updatedGraph, continuousKeysToInclude);
|
||||||
} else {
|
} else {
|
||||||
ordering = *given_ordering;
|
ordering = *given_ordering;
|
||||||
}
|
}
|
||||||
|
@ -77,25 +92,64 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
||||||
// Eliminate.
|
// Eliminate.
|
||||||
HybridBayesNet bayesNetFragment = *updatedGraph.eliminateSequential(ordering);
|
HybridBayesNet bayesNetFragment = *updatedGraph.eliminateSequential(ordering);
|
||||||
|
|
||||||
|
#ifdef DEBUG_SMOOTHER_DETAIL
|
||||||
|
for (auto conditional : bayesNetFragment) {
|
||||||
|
auto e = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
|
||||||
|
conditional);
|
||||||
|
GTSAM_PRINT(*e);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef DEBUG_SMOOTHER
|
||||||
|
// Print discrete keys in the bayesNetFragment:
|
||||||
|
std::cout << "Discrete keys in bayesNetFragment: ";
|
||||||
|
for (auto &key : HybridFactorGraph(bayesNetFragment).discreteKeySet()) {
|
||||||
|
std::cout << DefaultKeyFormatter(key) << " ";
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/// Prune
|
/// Prune
|
||||||
if (maxNrLeaves) {
|
if (maxNrLeaves) {
|
||||||
// `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
|
// `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
|
||||||
// all the conditionals with the same keys in bayesNetFragment.
|
// all the conditionals with the same keys in bayesNetFragment.
|
||||||
bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_);
|
DiscreteValues newlyFixedValues;
|
||||||
|
bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_,
|
||||||
|
&newlyFixedValues);
|
||||||
|
fixedValues_.insert(newlyFixedValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef DEBUG_SMOOTHER
|
||||||
|
// Print discrete keys in the bayesNetFragment:
|
||||||
|
std::cout << "\nAfter pruning: ";
|
||||||
|
for (auto &key : HybridFactorGraph(bayesNetFragment).discreteKeySet()) {
|
||||||
|
std::cout << DefaultKeyFormatter(key) << " ";
|
||||||
|
}
|
||||||
|
std::cout << std::endl << std::endl;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef DEBUG_SMOOTHER_DETAIL
|
||||||
|
for (auto conditional : bayesNetFragment) {
|
||||||
|
auto c = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
|
||||||
|
conditional);
|
||||||
|
GTSAM_PRINT(*c);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
// Add the partial bayes net to the posterior bayes net.
|
// Add the partial bayes net to the posterior bayes net.
|
||||||
hybridBayesNet_.add(bayesNetFragment);
|
hybridBayesNet_.add(bayesNetFragment);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ************************************************************************* */
|
/* ************************************************************************* */
|
||||||
std::pair<HybridGaussianFactorGraph, HybridBayesNet>
|
std::pair<HybridGaussianFactorGraph, HybridBayesNet>
|
||||||
HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
|
HybridSmoother::addConditionals(const HybridGaussianFactorGraph &newFactors,
|
||||||
const HybridBayesNet &hybridBayesNet) const {
|
const HybridBayesNet &hybridBayesNet) const {
|
||||||
HybridGaussianFactorGraph graph(originalGraph);
|
HybridGaussianFactorGraph graph(newFactors);
|
||||||
HybridBayesNet updatedHybridBayesNet(hybridBayesNet);
|
HybridBayesNet updatedHybridBayesNet(hybridBayesNet);
|
||||||
|
|
||||||
KeySet factorKeys = graph.keys();
|
KeySet involvedKeys = newFactors.keys();
|
||||||
|
auto involved = [&involvedKeys](const Key &key) {
|
||||||
|
return involvedKeys.find(key) != involvedKeys.end();
|
||||||
|
};
|
||||||
|
|
||||||
// If hybridBayesNet is not empty,
|
// If hybridBayesNet is not empty,
|
||||||
// it means we have conditionals to add to the factor graph.
|
// it means we have conditionals to add to the factor graph.
|
||||||
|
@ -108,21 +162,36 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
|
||||||
|
|
||||||
// NOTE(Varun) Using a for-range loop doesn't work since some of the
|
// NOTE(Varun) Using a for-range loop doesn't work since some of the
|
||||||
// conditionals are invalid pointers
|
// conditionals are invalid pointers
|
||||||
|
|
||||||
|
// First get all the keys involved.
|
||||||
|
// We do this by iterating over all conditionals, and checking if their
|
||||||
|
// frontals are involved in the factor graph. If yes, then also make the
|
||||||
|
// parent keys involved in the factor graph.
|
||||||
for (size_t i = 0; i < hybridBayesNet.size(); i++) {
|
for (size_t i = 0; i < hybridBayesNet.size(); i++) {
|
||||||
auto conditional = hybridBayesNet.at(i);
|
auto conditional = hybridBayesNet.at(i);
|
||||||
|
|
||||||
for (auto &key : conditional->frontals()) {
|
for (auto &key : conditional->frontals()) {
|
||||||
if (std::find(factorKeys.begin(), factorKeys.end(), key) !=
|
if (involved(key)) {
|
||||||
factorKeys.end()) {
|
// Add the conditional parents to involvedKeys
|
||||||
newConditionals.push_back(conditional);
|
|
||||||
|
|
||||||
// Add the conditional parents to factorKeys
|
|
||||||
// so we add those conditionals too.
|
// so we add those conditionals too.
|
||||||
// NOTE: This assumes we have a structure where
|
|
||||||
// variables depend on those in the future.
|
|
||||||
for (auto &&parentKey : conditional->parents()) {
|
for (auto &&parentKey : conditional->parents()) {
|
||||||
factorKeys.insert(parentKey);
|
involvedKeys.insert(parentKey);
|
||||||
}
|
}
|
||||||
|
// Break so we don't add parents twice.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#ifdef DEBUG_SMOOTHER
|
||||||
|
PrintKeySet(involvedKeys);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
for (size_t i = 0; i < hybridBayesNet.size(); i++) {
|
||||||
|
auto conditional = hybridBayesNet.at(i);
|
||||||
|
|
||||||
|
for (auto &key : conditional->frontals()) {
|
||||||
|
if (involved(key)) {
|
||||||
|
newConditionals.push_back(conditional);
|
||||||
|
|
||||||
// Remove the conditional from the updated Bayes net
|
// Remove the conditional from the updated Bayes net
|
||||||
auto it = find(updatedHybridBayesNet.begin(),
|
auto it = find(updatedHybridBayesNet.begin(),
|
||||||
|
@ -151,4 +220,21 @@ const HybridBayesNet &HybridSmoother::hybridBayesNet() const {
|
||||||
return hybridBayesNet_;
|
return hybridBayesNet_;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ************************************************************************* */
|
||||||
|
HybridValues HybridSmoother::optimize() const {
|
||||||
|
// Solve for the MPE
|
||||||
|
DiscreteValues mpe = hybridBayesNet_.mpe();
|
||||||
|
|
||||||
|
// Add fixed values to the MPE.
|
||||||
|
mpe.insert(fixedValues_);
|
||||||
|
|
||||||
|
// Given the MPE, compute the optimal continuous values.
|
||||||
|
GaussianBayesNet gbn = hybridBayesNet_.choose(mpe);
|
||||||
|
const VectorValues continuous = gbn.optimize();
|
||||||
|
if (std::find(gbn.begin(), gbn.end(), nullptr) != gbn.end()) {
|
||||||
|
throw std::runtime_error("At least one nullptr factor in hybridBayesNet_");
|
||||||
|
}
|
||||||
|
return HybridValues(continuous, mpe);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
@ -106,6 +106,9 @@ class GTSAM_EXPORT HybridSmoother {
|
||||||
|
|
||||||
/// Return the Bayes Net posterior.
|
/// Return the Bayes Net posterior.
|
||||||
const HybridBayesNet& hybridBayesNet() const;
|
const HybridBayesNet& hybridBayesNet() const;
|
||||||
|
|
||||||
|
/// Optimize the hybrid Bayes Net, taking into accound fixed values.
|
||||||
|
HybridValues optimize() const;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace gtsam
|
} // namespace gtsam
|
||||||
|
|
|
@ -28,6 +28,28 @@ using symbol_shorthand::M;
|
||||||
using symbol_shorthand::X;
|
using symbol_shorthand::X;
|
||||||
using symbol_shorthand::Z;
|
using symbol_shorthand::Z;
|
||||||
|
|
||||||
|
/* ****************************************************************************/
|
||||||
|
// Test the HybridConditional constructor.
|
||||||
|
TEST(HybridConditional, Constructor) {
|
||||||
|
// Create a HybridGaussianConditional.
|
||||||
|
const KeyVector continuousKeys{X(0), X(1)};
|
||||||
|
const DiscreteKeys discreteKeys{{M(0), 2}};
|
||||||
|
const size_t nFrontals = 1;
|
||||||
|
const HybridConditional hc(continuousKeys, discreteKeys, nFrontals);
|
||||||
|
|
||||||
|
// Check Frontals:
|
||||||
|
EXPECT_LONGS_EQUAL(1, hc.nrFrontals());
|
||||||
|
const auto frontals = hc.frontals();
|
||||||
|
EXPECT_LONGS_EQUAL(1, frontals.size());
|
||||||
|
EXPECT_LONGS_EQUAL(X(0), *frontals.begin());
|
||||||
|
|
||||||
|
// Check parents:
|
||||||
|
const auto parents = hc.parents();
|
||||||
|
EXPECT_LONGS_EQUAL(2, parents.size());
|
||||||
|
EXPECT_LONGS_EQUAL(X(1), *parents.begin());
|
||||||
|
EXPECT_LONGS_EQUAL(M(0), *(parents.begin() + 1));
|
||||||
|
}
|
||||||
|
|
||||||
/* ****************************************************************************/
|
/* ****************************************************************************/
|
||||||
// Check invariants for all conditionals in a tiny Bayes net.
|
// Check invariants for all conditionals in a tiny Bayes net.
|
||||||
TEST(HybridConditional, Invariants) {
|
TEST(HybridConditional, Invariants) {
|
||||||
|
@ -43,6 +65,12 @@ TEST(HybridConditional, Invariants) {
|
||||||
auto hc0 = bn.at(0);
|
auto hc0 = bn.at(0);
|
||||||
CHECK(hc0->isHybrid());
|
CHECK(hc0->isHybrid());
|
||||||
|
|
||||||
|
// Check parents:
|
||||||
|
const auto parents = hc0->parents();
|
||||||
|
EXPECT_LONGS_EQUAL(2, parents.size());
|
||||||
|
EXPECT_LONGS_EQUAL(X(0), *parents.begin());
|
||||||
|
EXPECT_LONGS_EQUAL(M(0), *(parents.begin() + 1));
|
||||||
|
|
||||||
// Check invariants as a HybridGaussianConditional.
|
// Check invariants as a HybridGaussianConditional.
|
||||||
const auto conditional = hc0->asHybrid();
|
const auto conditional = hc0->asHybrid();
|
||||||
EXPECT(HybridGaussianConditional::CheckInvariants(*conditional, values));
|
EXPECT(HybridGaussianConditional::CheckInvariants(*conditional, values));
|
||||||
|
|
|
@ -87,6 +87,16 @@ class GTSAM_UNSTABLE_EXPORT Constraint : public DiscreteFactor {
|
||||||
this->operator*(df->toDecisionTreeFactor()));
|
this->operator*(df->toDecisionTreeFactor()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Multiply by a scalar
|
||||||
|
virtual DiscreteFactor::shared_ptr operator*(double s) const override {
|
||||||
|
return this->toDecisionTreeFactor() * s;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Multiply by a DecisionTreeFactor and return a DecisionTreeFactor
|
||||||
|
DecisionTreeFactor operator*(const DecisionTreeFactor& dtf) const override {
|
||||||
|
return this->toDecisionTreeFactor() * dtf;
|
||||||
|
}
|
||||||
|
|
||||||
/// divide by DiscreteFactor::shared_ptr f (safely)
|
/// divide by DiscreteFactor::shared_ptr f (safely)
|
||||||
DiscreteFactor::shared_ptr operator/(
|
DiscreteFactor::shared_ptr operator/(
|
||||||
const DiscreteFactor::shared_ptr& df) const override {
|
const DiscreteFactor::shared_ptr& df) const override {
|
||||||
|
@ -104,6 +114,9 @@ class GTSAM_UNSTABLE_EXPORT Constraint : public DiscreteFactor {
|
||||||
return toDecisionTreeFactor().sum(keys);
|
return toDecisionTreeFactor().sum(keys);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Find the max value
|
||||||
|
double max() const override { return toDecisionTreeFactor().max(); }
|
||||||
|
|
||||||
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override {
|
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override {
|
||||||
return toDecisionTreeFactor().max(nrFrontals);
|
return toDecisionTreeFactor().max(nrFrontals);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue