Merge pull request #1989 from borglab/city10000

release/4.3a0
Varun Agrawal 2025-01-26 12:15:18 -05:00 committed by GitHub
commit 131f51a30c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 31153 additions and 6 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,219 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010-2020, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file Hybrid_City10000.cpp
* @brief Example of using hybrid estimation
* with multiple odometry measurements.
* @author Varun Agrawal
* @date January 22, 2025
*/
#include <gtsam/geometry/Pose2.h>
#include <gtsam/hybrid/HybridNonlinearFactor.h>
#include <gtsam/hybrid/HybridNonlinearFactorGraph.h>
#include <gtsam/hybrid/HybridSmoother.h>
#include <gtsam/hybrid/HybridValues.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/dataset.h>
#include <time.h>
#include <boost/algorithm/string/classification.hpp>
#include <boost/algorithm/string/split.hpp>
#include <fstream>
#include <string>
#include <vector>
using namespace std;
using namespace gtsam;
using namespace boost::algorithm;
using symbol_shorthand::M;
using symbol_shorthand::X;
// Testing params
const size_t max_loop_count = 2000; // 2000; // 200 //2000 //8000
noiseModel::Diagonal::shared_ptr prior_noise_model =
noiseModel::Diagonal::Sigmas(
(Vector(3) << 0.0001, 0.0001, 0.0001).finished());
noiseModel::Diagonal::shared_ptr pose_noise_model =
noiseModel::Diagonal::Sigmas(
(Vector(3) << 1.0 / 30.0, 1.0 / 30.0, 1.0 / 100.0).finished());
/**
* @brief Write the results of optimization to filename.
*
* @param results The Values object with the final results.
* @param num_poses The number of poses to write to the file.
* @param filename The file name to save the results to.
*/
void write_results(const Values& results, size_t num_poses,
const std::string& filename = "ISAM2_city10000.txt") {
ofstream outfile;
outfile.open(filename);
for (size_t i = 0; i < num_poses; ++i) {
Pose2 out_pose = results.at<Pose2>(X(i));
outfile << out_pose.x() << " " << out_pose.y() << " " << out_pose.theta()
<< std::endl;
}
outfile.close();
std::cout << "output written to " << filename << std::endl;
}
void SmootherUpdate(HybridSmoother& smoother, HybridNonlinearFactorGraph& graph,
const Values& initial, size_t maxNrHypotheses,
Values* results) {
HybridGaussianFactorGraph linearized = *graph.linearize(initial);
// std::cout << "index: " << index << std::endl;
smoother.update(linearized, maxNrHypotheses);
graph.resize(0);
HybridValues delta = smoother.hybridBayesNet().optimize();
results->insert_or_assign(initial.retract(delta.continuous()));
}
/* ************************************************************************* */
int main(int argc, char* argv[]) {
ifstream in(findExampleDataFile("T1_city10000_04.txt"));
// ifstream in("../data/mh_T1_city10000_04.txt"); //Type #1 only
// ifstream in("../data/mh_T3b_city10000_10.txt"); //Type #3 only
// ifstream in("../data/mh_T1_T3_city10000_04.txt"); //Type #1 + Type #3
// ifstream in("../data/mh_All_city10000_groundtruth.txt");
size_t discrete_count = 0, index = 0;
std::list<double> time_list;
HybridSmoother smoother(0.99);
HybridNonlinearFactorGraph graph;
Values init_values;
Values results;
size_t maxNrHypotheses = 3;
double x = 0.0;
double y = 0.0;
double rad = 0.0;
Pose2 prior_pose(x, y, rad);
init_values.insert(X(0), prior_pose);
graph.push_back(PriorFactor<Pose2>(X(0), prior_pose, prior_noise_model));
SmootherUpdate(smoother, graph, init_values, maxNrHypotheses, &results);
size_t key_s, key_t;
clock_t start_time = clock();
std::string str;
while (getline(in, str) && index < max_loop_count) {
vector<string> parts;
split(parts, str, is_any_of(" "));
key_s = stoi(parts[1]);
key_t = stoi(parts[3]);
int num_measurements = stoi(parts[5]);
vector<Pose2> pose_array(num_measurements);
for (int i = 0; i < num_measurements; ++i) {
x = stod(parts[6 + 3 * i]);
y = stod(parts[7 + 3 * i]);
rad = stod(parts[8 + 3 * i]);
pose_array[i] = Pose2(x, y, rad);
}
// Take the first one as the initial estimate
Pose2 odom_pose = pose_array[0];
if (key_s == key_t - 1) { // new X(key)
init_values.insert(X(key_t), init_values.at<Pose2>(X(key_s)) * odom_pose);
} else { // loop
// index++;
}
// Flag if we should run smoother update
bool smoother_update = false;
if (num_measurements == 2) {
// Add hybrid factor which considers both measurements
DiscreteKey m(M(discrete_count), num_measurements);
discrete_count++;
graph.push_back(DecisionTreeFactor(m, "0.6 0.4"));
auto f0 = std::make_shared<BetweenFactor<Pose2>>(
X(key_s), X(key_t), pose_array[0], pose_noise_model);
auto f1 = std::make_shared<BetweenFactor<Pose2>>(
X(key_s), X(key_t), pose_array[1], pose_noise_model);
std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
// HybridNonlinearFactor mixtureFactor(m, factors);
HybridNonlinearFactor mixtureFactor(m, {f0, f1});
graph.push_back(mixtureFactor);
smoother_update = true;
} else {
graph.add(BetweenFactor<Pose2>(X(key_s), X(key_t), odom_pose,
pose_noise_model));
}
if (smoother_update) {
SmootherUpdate(smoother, graph, init_values, maxNrHypotheses, &results);
}
// Print loop index and time taken in processor clock ticks
// if (index % 50 == 0 && key_s != key_t - 1) {
if (index % 100 == 0) {
std::cout << "index: " << index << std::endl;
std::cout << "acc_time: " << time_list.back() / CLOCKS_PER_SEC << std::endl;
// delta.discrete().print("The Discrete Assignment");
tictoc_finishedIteration_();
tictoc_print_();
}
if (key_s == key_t - 1) {
clock_t cur_time = clock();
time_list.push_back(cur_time - start_time);
}
index += 1;
}
SmootherUpdate(smoother, graph, init_values, maxNrHypotheses, &results);
clock_t end_time = clock();
clock_t total_time = end_time - start_time;
cout << "total_time: " << total_time / CLOCKS_PER_SEC << " seconds" << endl;
/// Write results to file
write_results(results, (key_t + 1), "HybridISAM_city10000.txt");
ofstream outfile_time;
std::string time_file_name = "HybridISAM_city10000_time.txt";
outfile_time.open(time_file_name);
for (auto acc_time : time_list) {
outfile_time << acc_time << std::endl;
}
outfile_time.close();
cout << "output " << time_file_name << " file." << endl;
return 0;
}

View File

@ -0,0 +1,208 @@
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010-2020, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file Hybrid_City10000.cpp
* @brief Example of using hybrid estimation
* with multiple odometry measurements.
* @author Varun Agrawal
* @date January 22, 2025
*/
#include <gtsam/geometry/Pose2.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/nonlinear/ISAM2.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/slam/dataset.h>
#include <time.h>
#include <boost/algorithm/string/classification.hpp>
#include <boost/algorithm/string/split.hpp>
#include <fstream>
#include <string>
#include <vector>
using namespace std;
using namespace gtsam;
using namespace boost::algorithm;
using symbol_shorthand::X;
// Testing params
const size_t max_loop_count = 2000; // 200 //2000 //8000
const bool is_with_ambiguity = false; // run original iSAM2 without ambiguities
// const bool is_with_ambiguity = true; // run original iSAM2 with ambiguities
noiseModel::Diagonal::shared_ptr prior_noise_model =
noiseModel::Diagonal::Sigmas(
(Vector(3) << 0.0001, 0.0001, 0.0001).finished());
noiseModel::Diagonal::shared_ptr pose_noise_model =
noiseModel::Diagonal::Sigmas(
(Vector(3) << 1.0 / 30.0, 1.0 / 30.0, 1.0 / 100.0).finished());
/**
* @brief Write the results of optimization to filename.
*
* @param results The Values object with the final results.
* @param num_poses The number of poses to write to the file.
* @param filename The file name to save the results to.
*/
void write_results(const Values& results, size_t num_poses,
const std::string& filename = "ISAM2_city10000.txt") {
ofstream outfile;
outfile.open(filename);
for (size_t i = 0; i < num_poses; ++i) {
Pose2 out_pose = results.at<Pose2>(X(i));
outfile << out_pose.x() << " " << out_pose.y() << " " << out_pose.theta()
<< std::endl;
}
outfile.close();
std::cout << "output written to " << filename << std::endl;
}
/* ************************************************************************* */
int main(int argc, char* argv[]) {
ifstream in(findExampleDataFile("T1_city10000_04.txt"));
// ifstream in("../data/mh_T1_city10000_04.txt"); //Type #1 only
// ifstream in("../data/mh_T3b_city10000_10.txt"); //Type #3 only
// ifstream in("../data/mh_T1_T3_city10000_04.txt"); //Type #1 + Type #3
// ifstream in("../data/mh_All_city10000_groundtruth.txt");
size_t pose_count = 0;
size_t index = 0;
std::list<double> time_list;
ISAM2Params parameters;
parameters.optimizationParams = gtsam::ISAM2GaussNewtonParams(0.0);
parameters.relinearizeThreshold = 0.01;
parameters.relinearizeSkip = 1;
ISAM2* isam2 = new ISAM2(parameters);
NonlinearFactorGraph* graph = new NonlinearFactorGraph();
Values init_values;
Values results;
double x = 0.0;
double y = 0.0;
double rad = 0.0;
Pose2 prior_pose(x, y, rad);
init_values.insert(X(0), prior_pose);
pose_count++;
graph->addPrior<Pose2>(X(0), prior_pose, prior_noise_model);
isam2->update(*graph, init_values);
graph->resize(0);
init_values.clear();
results = isam2->calculateBestEstimate();
//*
size_t key_s = 0;
size_t key_t = 0;
clock_t start_time = clock();
string str;
while (getline(in, str) && index < max_loop_count) {
// cout << str << endl;
vector<string> parts;
split(parts, str, is_any_of(" "));
key_s = stoi(parts[1]);
key_t = stoi(parts[3]);
int num_measurements = stoi(parts[5]);
vector<Pose2> pose_array(num_measurements);
for (int i = 0; i < num_measurements; ++i) {
x = stod(parts[6 + 3 * i]);
y = stod(parts[7 + 3 * i]);
rad = stod(parts[8 + 3 * i]);
pose_array[i] = Pose2(x, y, rad);
}
Pose2 odom_pose;
if (is_with_ambiguity) {
// Get wrong intentionally
int id = index % num_measurements;
odom_pose = Pose2(pose_array[id]);
} else {
odom_pose = pose_array[0];
}
if (key_s == key_t - 1) { // new X(key)
init_values.insert(X(key_t), results.at<Pose2>(X(key_s)) * odom_pose);
pose_count++;
} else { // loop
index++;
}
graph->add(
BetweenFactor<Pose2>(X(key_s), X(key_t), odom_pose, pose_noise_model));
isam2->update(*graph, init_values);
graph->resize(0);
init_values.clear();
results = isam2->calculateBestEstimate();
// Print loop index and time taken in processor clock ticks
if (index % 50 == 0 && key_s != key_t - 1) {
std::cout << "index: " << index << std::endl;
std::cout << "acc_time: " << time_list.back() << std::endl;
}
if (key_s == key_t - 1) {
clock_t cur_time = clock();
time_list.push_back(cur_time - start_time);
}
if (time_list.size() % 100 == 0 && (key_s == key_t - 1)) {
string step_file_idx = std::to_string(100000 + time_list.size());
ofstream step_outfile;
string step_file_name = "step_files/ISAM2_city10000_S" + step_file_idx;
step_outfile.open(step_file_name + ".txt");
for (size_t i = 0; i < (key_t + 1); ++i) {
Pose2 out_pose = results.at<Pose2>(X(i));
step_outfile << out_pose.x() << " " << out_pose.y() << " "
<< out_pose.theta() << endl;
}
step_outfile.close();
}
}
clock_t end_time = clock();
clock_t total_time = end_time - start_time;
cout << "total_time: " << total_time << endl;
/// Write results to file
write_results(results, (key_t + 1));
ofstream outfile_time;
std::string time_file_name = "ISAM2_city10000_time.txt";
outfile_time.open(time_file_name);
for (auto acc_time : time_list) {
outfile_time << acc_time << std::endl;
}
outfile_time.close();
cout << "output " << time_file_name << " file." << endl;
return 0;
}

28
examples/plot_city10000.m Normal file
View File

@ -0,0 +1,28 @@
clear;
gt = dlmread('Data/ISAM2_GT_city10000.txt');
eh_poses = dlmread('../build/examples/ISAM2_city10000.txt');
h_poses = dlmread('../build/examples/HybridISAM_city10000.txt');
% Plot the same number of GT poses as estimated ones
% gt = gt(1:size(eh_poses, 1), :);
gt = gt(1:size(h_poses, 1), :);
eh_poses = eh_poses(1:size(h_poses, 1), :);
figure(1)
hold on;
axis equal;
axis([-65 65 -75 60])
plot(gt(:,1), gt(:,2), '--', 'LineWidth', 4, 'color', [0.1 0.7 0.1 0.5]);
% hold off;
% figure(2)
% hold on;
% axis equal;
% axis([-65 65 -75 60])
plot(eh_poses(:,1), eh_poses(:,2), '-', 'LineWidth', 2, 'color', [0.9 0.1 0. 0.4]);
plot(h_poses(:,1), h_poses(:,2), '-', 'LineWidth', 2, 'color', [0.1 0.1 0.9 0.4]);
hold off;

View File

@ -247,7 +247,7 @@ namespace gtsam {
/* ************************************************************************ */
std::vector<double> DecisionTreeFactor::probabilities() const {
// Set of all keys
std::set<Key> allKeys(keys().begin(), keys().end());
KeySet allKeys(keys().begin(), keys().end());
std::vector<double> probs;
@ -260,7 +260,7 @@ namespace gtsam {
*/
auto op = [&](const Assignment<Key>& a, double p) {
// Get all the keys in the current assignment
std::set<Key> assignment_keys;
KeySet assignment_keys;
for (auto&& [k, _] : a) {
assignment_keys.insert(k);
}
@ -458,13 +458,13 @@ namespace gtsam {
auto op = [&](const Assignment<Key>& a, double p) {
// Get all the keys in the current assignment
std::set<Key> assignment_keys;
KeySet assignment_keys;
for (auto&& [k, _] : a) {
assignment_keys.insert(k);
}
// Find the keys missing in the assignment
std::vector<Key> diff;
KeyVector diff;
std::set_difference(allKeys.begin(), allKeys.end(),
assignment_keys.begin(), assignment_keys.end(),
std::back_inserter(diff));

View File

@ -66,7 +66,9 @@ HybridBayesNet HybridBayesNet::prune(
pruned.prune(maxNrLeaves);
DiscreteValues deadModesValues;
if (deadModeThreshold.has_value()) {
// If we have a dead mode threshold and discrete variables left after pruning,
// then we run dead mode removal.
if (deadModeThreshold.has_value() && pruned.keys().size() > 0) {
DiscreteMarginals marginals(DiscreteFactorGraph{pruned});
for (auto dkey : pruned.discreteKeys()) {
Vector probabilities = marginals.marginalProbabilities(dkey);

View File

@ -327,6 +327,8 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
gttic_(EliminateDiscreteFormDiscreteConditional);
#endif
// Check type of product, and get as TableFactor for efficiency.
// Use object instead of pointer since we need it
// for the TableDistribution constructor.
TableFactor p;
if (auto tf = std::dynamic_pointer_cast<TableFactor>(product)) {
p = *tf;
@ -334,11 +336,12 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
p = TableFactor(product->toDecisionTreeFactor());
}
auto conditional = std::make_shared<TableDistribution>(p);
#if GTSAM_HYBRID_TIMING
gttoc_(EliminateDiscreteFormDiscreteConditional);
#endif
DiscreteFactor::shared_ptr sum = product->sum(frontalKeys);
DiscreteFactor::shared_ptr sum = p.sum(frontalKeys);
return {std::make_shared<HybridConditional>(conditional), sum};