commit
0d4b0d76d9
|
@ -39,181 +39,276 @@ using namespace std;
|
|||
using namespace gtsam;
|
||||
using namespace boost::algorithm;
|
||||
|
||||
using symbol_shorthand::L;
|
||||
using symbol_shorthand::M;
|
||||
using symbol_shorthand::X;
|
||||
|
||||
// Testing params
|
||||
const size_t max_loop_count = 2000; // 2000; // 200 //2000 //8000
|
||||
const size_t kMaxLoopCount = 2000; // Example default value
|
||||
const size_t kMaxNrHypotheses = 10;
|
||||
|
||||
noiseModel::Diagonal::shared_ptr prior_noise_model =
|
||||
noiseModel::Diagonal::Sigmas(
|
||||
(Vector(3) << 0.0001, 0.0001, 0.0001).finished());
|
||||
auto kOpenLoopModel = noiseModel::Diagonal::Sigmas(Vector3::Ones() * 10);
|
||||
|
||||
noiseModel::Diagonal::shared_ptr pose_noise_model =
|
||||
noiseModel::Diagonal::Sigmas(
|
||||
(Vector(3) << 1.0 / 30.0, 1.0 / 30.0, 1.0 / 100.0).finished());
|
||||
auto kPriorNoiseModel = noiseModel::Diagonal::Sigmas(
|
||||
(Vector(3) << 0.0001, 0.0001, 0.0001).finished());
|
||||
|
||||
/**
|
||||
* @brief Write the results of optimization to filename.
|
||||
*
|
||||
* @param results The Values object with the final results.
|
||||
* @param num_poses The number of poses to write to the file.
|
||||
* @param filename The file name to save the results to.
|
||||
*/
|
||||
void write_results(const Values& results, size_t num_poses,
|
||||
const std::string& filename = "ISAM2_city10000.txt") {
|
||||
ofstream outfile;
|
||||
outfile.open(filename);
|
||||
auto kPoseNoiseModel = noiseModel::Diagonal::Sigmas(
|
||||
(Vector(3) << 1.0 / 30.0, 1.0 / 30.0, 1.0 / 100.0).finished());
|
||||
|
||||
for (size_t i = 0; i < num_poses; ++i) {
|
||||
Pose2 out_pose = results.at<Pose2>(X(i));
|
||||
// Experiment Class
|
||||
class Experiment {
|
||||
private:
|
||||
std::string filename_;
|
||||
HybridSmoother smoother_;
|
||||
HybridNonlinearFactorGraph graph_;
|
||||
Values initial_;
|
||||
Values result_;
|
||||
|
||||
outfile << out_pose.x() << " " << out_pose.y() << " " << out_pose.theta()
|
||||
<< std::endl;
|
||||
/**
|
||||
* @brief Write the result of optimization to file.
|
||||
*
|
||||
* @param result The Values object with the final result.
|
||||
* @param num_poses The number of poses to write to the file.
|
||||
* @param filename The file name to save the result to.
|
||||
*/
|
||||
void writeResult(const Values& result, size_t numPoses,
|
||||
const std::string& filename = "Hybrid_city10000.txt") {
|
||||
ofstream outfile;
|
||||
outfile.open(filename);
|
||||
|
||||
for (size_t i = 0; i < numPoses; ++i) {
|
||||
Pose2 outPose = result.at<Pose2>(X(i));
|
||||
outfile << outPose.x() << " " << outPose.y() << " " << outPose.theta()
|
||||
<< std::endl;
|
||||
}
|
||||
outfile.close();
|
||||
std::cout << "Output written to " << filename << std::endl;
|
||||
}
|
||||
outfile.close();
|
||||
std::cout << "output written to " << filename << std::endl;
|
||||
}
|
||||
|
||||
void SmootherUpdate(HybridSmoother& smoother, HybridNonlinearFactorGraph& graph,
|
||||
const Values& initial, size_t maxNrHypotheses,
|
||||
Values* results) {
|
||||
HybridGaussianFactorGraph linearized = *graph.linearize(initial);
|
||||
// std::cout << "index: " << index << std::endl;
|
||||
smoother.update(linearized, maxNrHypotheses);
|
||||
graph.resize(0);
|
||||
HybridValues delta = smoother.hybridBayesNet().optimize();
|
||||
results->insert_or_assign(initial.retract(delta.continuous()));
|
||||
}
|
||||
/**
|
||||
* @brief Create a hybrid loop closure factor where
|
||||
* 0 - loose noise model and 1 - loop noise model.
|
||||
*/
|
||||
HybridNonlinearFactor hybridLoopClosureFactor(size_t loopCounter, size_t keyS,
|
||||
size_t keyT,
|
||||
const Pose2& measurement) {
|
||||
DiscreteKey l(L(loopCounter), 2);
|
||||
|
||||
auto f0 = std::make_shared<BetweenFactor<Pose2>>(
|
||||
X(keyS), X(keyT), measurement, kOpenLoopModel);
|
||||
auto f1 = std::make_shared<BetweenFactor<Pose2>>(
|
||||
X(keyS), X(keyT), measurement, kPoseNoiseModel);
|
||||
|
||||
std::vector<NonlinearFactorValuePair> factors{
|
||||
{f0, kOpenLoopModel->negLogConstant()},
|
||||
{f1, kPoseNoiseModel->negLogConstant()}};
|
||||
HybridNonlinearFactor mixtureFactor(l, factors);
|
||||
return mixtureFactor;
|
||||
}
|
||||
|
||||
/// @brief Create hybrid odometry factor with discrete measurement choices.
|
||||
HybridNonlinearFactor hybridOdometryFactor(
|
||||
size_t numMeasurements, size_t keyS, size_t keyT, const DiscreteKey& m,
|
||||
const std::vector<Pose2>& poseArray,
|
||||
const SharedNoiseModel& poseNoiseModel) {
|
||||
auto f0 = std::make_shared<BetweenFactor<Pose2>>(
|
||||
X(keyS), X(keyT), poseArray[0], poseNoiseModel);
|
||||
auto f1 = std::make_shared<BetweenFactor<Pose2>>(
|
||||
X(keyS), X(keyT), poseArray[1], poseNoiseModel);
|
||||
|
||||
std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
|
||||
HybridNonlinearFactor mixtureFactor(m, factors);
|
||||
return mixtureFactor;
|
||||
}
|
||||
|
||||
/// @brief Perform smoother update and optimize the graph.
|
||||
void smootherUpdate(HybridSmoother& smoother,
|
||||
HybridNonlinearFactorGraph& graph, const Values& initial,
|
||||
size_t kMaxNrHypotheses, Values* result) {
|
||||
HybridGaussianFactorGraph linearized = *graph.linearize(initial);
|
||||
smoother.update(linearized, kMaxNrHypotheses);
|
||||
// throw if x0 not in hybridBayesNet_:
|
||||
const KeySet& keys = smoother.hybridBayesNet().keys();
|
||||
if (keys.find(X(0)) == keys.end()) {
|
||||
throw std::runtime_error("x0 not in hybridBayesNet_");
|
||||
}
|
||||
graph.resize(0);
|
||||
// HybridValues delta = smoother.hybridBayesNet().optimize();
|
||||
// result->insert_or_assign(initial.retract(delta.continuous()));
|
||||
}
|
||||
|
||||
public:
|
||||
/// Construct with filename of experiment to run
|
||||
explicit Experiment(const std::string& filename)
|
||||
: filename_(filename), smoother_(0.99) {}
|
||||
|
||||
/// @brief Run the main experiment with a given maxLoopCount.
|
||||
void run(size_t maxLoopCount) {
|
||||
// Prepare reading
|
||||
ifstream in(filename_);
|
||||
if (!in.is_open()) {
|
||||
cerr << "Failed to open file: " << filename_ << endl;
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize local variables
|
||||
size_t discreteCount = 0, index = 0;
|
||||
size_t loopCount = 0;
|
||||
|
||||
std::list<double> timeList;
|
||||
|
||||
// Set up initial prior
|
||||
double x = 0.0;
|
||||
double y = 0.0;
|
||||
double rad = 0.0;
|
||||
|
||||
Pose2 priorPose(x, y, rad);
|
||||
initial_.insert(X(0), priorPose);
|
||||
graph_.push_back(PriorFactor<Pose2>(X(0), priorPose, kPriorNoiseModel));
|
||||
|
||||
// Initial update
|
||||
clock_t beforeUpdate = clock();
|
||||
smootherUpdate(smoother_, graph_, initial_, kMaxNrHypotheses, &result_);
|
||||
clock_t afterUpdate = clock();
|
||||
std::vector<std::pair<size_t, double>> smootherUpdateTimes;
|
||||
smootherUpdateTimes.push_back({index, afterUpdate - beforeUpdate});
|
||||
|
||||
// Start main loop
|
||||
size_t keyS = 0, keyT = 0;
|
||||
clock_t startTime = clock();
|
||||
std::string line;
|
||||
while (getline(in, line) && index < maxLoopCount) {
|
||||
std::vector<std::string> parts;
|
||||
split(parts, line, is_any_of(" "));
|
||||
|
||||
keyS = stoi(parts[1]);
|
||||
keyT = stoi(parts[3]);
|
||||
|
||||
int numMeasurements = stoi(parts[5]);
|
||||
std::vector<Pose2> poseArray(numMeasurements);
|
||||
for (int i = 0; i < numMeasurements; ++i) {
|
||||
x = stod(parts[6 + 3 * i]);
|
||||
y = stod(parts[7 + 3 * i]);
|
||||
rad = stod(parts[8 + 3 * i]);
|
||||
poseArray[i] = Pose2(x, y, rad);
|
||||
}
|
||||
|
||||
// Flag to decide whether to run smoother update
|
||||
bool doSmootherUpdate = false;
|
||||
|
||||
// Take the first one as the initial estimate
|
||||
Pose2 odomPose = poseArray[0];
|
||||
if (keyS == keyT - 1) {
|
||||
// Odometry factor
|
||||
if (numMeasurements > 1) {
|
||||
// Add hybrid factor
|
||||
DiscreteKey m(M(discreteCount), numMeasurements);
|
||||
HybridNonlinearFactor mixtureFactor = hybridOdometryFactor(
|
||||
numMeasurements, keyS, keyT, m, poseArray, kPoseNoiseModel);
|
||||
graph_.push_back(mixtureFactor);
|
||||
discreteCount++;
|
||||
doSmootherUpdate = true;
|
||||
std::cout << "mixtureFactor: " << keyS << " " << keyT << std::endl;
|
||||
} else {
|
||||
graph_.add(BetweenFactor<Pose2>(X(keyS), X(keyT), odomPose,
|
||||
kPoseNoiseModel));
|
||||
}
|
||||
// Insert next pose initial guess
|
||||
initial_.insert(X(keyT), initial_.at<Pose2>(X(keyS)) * odomPose);
|
||||
} else {
|
||||
// Loop closure
|
||||
HybridNonlinearFactor loopFactor =
|
||||
hybridLoopClosureFactor(loopCount, keyS, keyT, odomPose);
|
||||
// print loop closure event keys:
|
||||
std::cout << "Loop closure: " << keyS << " " << keyT << std::endl;
|
||||
graph_.add(loopFactor);
|
||||
doSmootherUpdate = true;
|
||||
loopCount++;
|
||||
}
|
||||
|
||||
if (doSmootherUpdate) {
|
||||
gttic_(SmootherUpdate);
|
||||
beforeUpdate = clock();
|
||||
smootherUpdate(smoother_, graph_, initial_, kMaxNrHypotheses, &result_);
|
||||
afterUpdate = clock();
|
||||
smootherUpdateTimes.push_back({index, afterUpdate - beforeUpdate});
|
||||
gttoc_(SmootherUpdate);
|
||||
doSmootherUpdate = false;
|
||||
}
|
||||
|
||||
// Record timing for odometry edges only
|
||||
if (keyS == keyT - 1) {
|
||||
clock_t curTime = clock();
|
||||
timeList.push_back(curTime - startTime);
|
||||
}
|
||||
|
||||
// Print some status every 100 steps
|
||||
if (index % 100 == 0) {
|
||||
std::cout << "Index: " << index << std::endl;
|
||||
if (!timeList.empty()) {
|
||||
std::cout << "Acc_time: " << timeList.back() / CLOCKS_PER_SEC
|
||||
<< " seconds" << std::endl;
|
||||
// delta.discrete().print("The Discrete Assignment");
|
||||
tictoc_finishedIteration_();
|
||||
tictoc_print_();
|
||||
}
|
||||
}
|
||||
|
||||
index++;
|
||||
}
|
||||
|
||||
// Final update
|
||||
beforeUpdate = clock();
|
||||
smootherUpdate(smoother_, graph_, initial_, kMaxNrHypotheses, &result_);
|
||||
afterUpdate = clock();
|
||||
smootherUpdateTimes.push_back({index, afterUpdate - beforeUpdate});
|
||||
|
||||
// Final optimize
|
||||
gttic_(HybridSmootherOptimize);
|
||||
HybridValues delta = smoother_.optimize();
|
||||
gttoc_(HybridSmootherOptimize);
|
||||
|
||||
result_.insert_or_assign(initial_.retract(delta.continuous()));
|
||||
|
||||
std::cout << "Final error: " << smoother_.hybridBayesNet().error(delta)
|
||||
<< std::endl;
|
||||
|
||||
clock_t endTime = clock();
|
||||
clock_t totalTime = endTime - startTime;
|
||||
std::cout << "Total time: " << totalTime / CLOCKS_PER_SEC << " seconds"
|
||||
<< std::endl;
|
||||
|
||||
// Write results to file
|
||||
writeResult(result_, keyT + 1, "Hybrid_City10000.txt");
|
||||
|
||||
// TODO Write to file
|
||||
// for (size_t i = 0; i < smoother_update_times.size(); i++) {
|
||||
// auto p = smoother_update_times.at(i);
|
||||
// std::cout << p.first << ", " << p.second / CLOCKS_PER_SEC <<
|
||||
// std::endl;
|
||||
// }
|
||||
|
||||
// Write timing info to file
|
||||
ofstream outfileTime;
|
||||
std::string timeFileName = "Hybrid_City10000_time.txt";
|
||||
outfileTime.open(timeFileName);
|
||||
for (auto accTime : timeList) {
|
||||
outfileTime << accTime << std::endl;
|
||||
}
|
||||
outfileTime.close();
|
||||
std::cout << "Output " << timeFileName << " file." << std::endl;
|
||||
}
|
||||
};
|
||||
|
||||
/* ************************************************************************* */
|
||||
int main(int argc, char* argv[]) {
|
||||
ifstream in(findExampleDataFile("T1_city10000_04.txt"));
|
||||
// ifstream in("../data/mh_T1_city10000_04.txt"); //Type #1 only
|
||||
// ifstream in("../data/mh_T3b_city10000_10.txt"); //Type #3 only
|
||||
// ifstream in("../data/mh_T1_T3_city10000_04.txt"); //Type #1 + Type #3
|
||||
int main() {
|
||||
Experiment experiment(findExampleDataFile("T1_city10000_04.txt"));
|
||||
// Experiment experiment("../data/mh_T1_city10000_04.txt"); //Type #1 only
|
||||
// Experiment experiment("../data/mh_T3b_city10000_10.txt"); //Type #3 only
|
||||
// Experiment experiment("../data/mh_T1_T3_city10000_04.txt"); //Type #1 +
|
||||
// Type #3
|
||||
|
||||
// ifstream in("../data/mh_All_city10000_groundtruth.txt");
|
||||
|
||||
size_t discrete_count = 0, index = 0;
|
||||
|
||||
std::list<double> time_list;
|
||||
|
||||
HybridSmoother smoother(0.99);
|
||||
|
||||
HybridNonlinearFactorGraph graph;
|
||||
|
||||
Values init_values;
|
||||
Values results;
|
||||
|
||||
size_t maxNrHypotheses = 3;
|
||||
|
||||
double x = 0.0;
|
||||
double y = 0.0;
|
||||
double rad = 0.0;
|
||||
|
||||
Pose2 prior_pose(x, y, rad);
|
||||
|
||||
init_values.insert(X(0), prior_pose);
|
||||
|
||||
graph.push_back(PriorFactor<Pose2>(X(0), prior_pose, prior_noise_model));
|
||||
|
||||
SmootherUpdate(smoother, graph, init_values, maxNrHypotheses, &results);
|
||||
|
||||
size_t key_s, key_t{0};
|
||||
|
||||
clock_t start_time = clock();
|
||||
std::string str;
|
||||
while (getline(in, str) && index < max_loop_count) {
|
||||
vector<string> parts;
|
||||
split(parts, str, is_any_of(" "));
|
||||
|
||||
key_s = stoi(parts[1]);
|
||||
key_t = stoi(parts[3]);
|
||||
|
||||
int num_measurements = stoi(parts[5]);
|
||||
vector<Pose2> pose_array(num_measurements);
|
||||
for (int i = 0; i < num_measurements; ++i) {
|
||||
x = stod(parts[6 + 3 * i]);
|
||||
y = stod(parts[7 + 3 * i]);
|
||||
rad = stod(parts[8 + 3 * i]);
|
||||
pose_array[i] = Pose2(x, y, rad);
|
||||
}
|
||||
|
||||
// Take the first one as the initial estimate
|
||||
Pose2 odom_pose = pose_array[0];
|
||||
if (key_s == key_t - 1) { // new X(key)
|
||||
init_values.insert(X(key_t), init_values.at<Pose2>(X(key_s)) * odom_pose);
|
||||
|
||||
} else { // loop
|
||||
// index++;
|
||||
}
|
||||
|
||||
// Flag if we should run smoother update
|
||||
bool smoother_update = false;
|
||||
|
||||
if (num_measurements == 2) {
|
||||
// Add hybrid factor which considers both measurements
|
||||
DiscreteKey m(M(discrete_count), num_measurements);
|
||||
discrete_count++;
|
||||
|
||||
graph.push_back(DecisionTreeFactor(m, "0.6 0.4"));
|
||||
|
||||
auto f0 = std::make_shared<BetweenFactor<Pose2>>(
|
||||
X(key_s), X(key_t), pose_array[0], pose_noise_model);
|
||||
auto f1 = std::make_shared<BetweenFactor<Pose2>>(
|
||||
X(key_s), X(key_t), pose_array[1], pose_noise_model);
|
||||
std::vector<NonlinearFactorValuePair> factors{{f0, 0.0}, {f1, 0.0}};
|
||||
// HybridNonlinearFactor mixtureFactor(m, factors);
|
||||
HybridNonlinearFactor mixtureFactor(m, {f0, f1});
|
||||
graph.push_back(mixtureFactor);
|
||||
|
||||
smoother_update = true;
|
||||
|
||||
} else {
|
||||
graph.add(BetweenFactor<Pose2>(X(key_s), X(key_t), odom_pose,
|
||||
pose_noise_model));
|
||||
}
|
||||
|
||||
if (smoother_update) {
|
||||
SmootherUpdate(smoother, graph, init_values, maxNrHypotheses, &results);
|
||||
}
|
||||
|
||||
// Print loop index and time taken in processor clock ticks
|
||||
// if (index % 50 == 0 && key_s != key_t - 1) {
|
||||
if (index % 100 == 0) {
|
||||
std::cout << "index: " << index << std::endl;
|
||||
std::cout << "acc_time: " << time_list.back() / CLOCKS_PER_SEC << std::endl;
|
||||
// delta.discrete().print("The Discrete Assignment");
|
||||
tictoc_finishedIteration_();
|
||||
tictoc_print_();
|
||||
}
|
||||
|
||||
if (key_s == key_t - 1) {
|
||||
clock_t cur_time = clock();
|
||||
time_list.push_back(cur_time - start_time);
|
||||
}
|
||||
|
||||
index += 1;
|
||||
}
|
||||
|
||||
SmootherUpdate(smoother, graph, init_values, maxNrHypotheses, &results);
|
||||
|
||||
clock_t end_time = clock();
|
||||
clock_t total_time = end_time - start_time;
|
||||
cout << "total_time: " << total_time / CLOCKS_PER_SEC << " seconds" << endl;
|
||||
|
||||
/// Write results to file
|
||||
write_results(results, (key_t + 1), "HybridISAM_city10000.txt");
|
||||
|
||||
ofstream outfile_time;
|
||||
std::string time_file_name = "HybridISAM_city10000_time.txt";
|
||||
outfile_time.open(time_file_name);
|
||||
for (auto acc_time : time_list) {
|
||||
outfile_time << acc_time << std::endl;
|
||||
}
|
||||
outfile_time.close();
|
||||
cout << "output " << time_file_name << " file." << endl;
|
||||
// Run the experiment
|
||||
experiment.run(kMaxLoopCount);
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
|
@ -165,7 +165,8 @@ int main(int argc, char* argv[]) {
|
|||
// Print loop index and time taken in processor clock ticks
|
||||
if (index % 50 == 0 && key_s != key_t - 1) {
|
||||
std::cout << "index: " << index << std::endl;
|
||||
std::cout << "acc_time: " << time_list.back() << std::endl;
|
||||
std::cout << "acc_time: " << time_list.back() / CLOCKS_PER_SEC
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
if (key_s == key_t - 1) {
|
||||
|
@ -190,7 +191,7 @@ int main(int argc, char* argv[]) {
|
|||
|
||||
clock_t end_time = clock();
|
||||
clock_t total_time = end_time - start_time;
|
||||
cout << "total_time: " << total_time << endl;
|
||||
cout << "total_time: " << total_time / CLOCKS_PER_SEC << endl;
|
||||
|
||||
/// Write results to file
|
||||
write_results(results, (key_t + 1));
|
||||
|
|
|
@ -164,6 +164,12 @@ namespace gtsam {
|
|||
virtual DiscreteFactor::shared_ptr multiply(
|
||||
const DiscreteFactor::shared_ptr& f) const override;
|
||||
|
||||
/// multiply with a scalar
|
||||
DiscreteFactor::shared_ptr operator*(double s) const override {
|
||||
return std::make_shared<DecisionTreeFactor>(
|
||||
apply([s](const double& a) { return Ring::mul(a, s); }));
|
||||
}
|
||||
|
||||
/// multiply two factors
|
||||
DecisionTreeFactor operator*(const DecisionTreeFactor& f) const override {
|
||||
return apply(f, Ring::mul);
|
||||
|
@ -201,6 +207,9 @@ namespace gtsam {
|
|||
return combine(keys, Ring::add);
|
||||
}
|
||||
|
||||
/// Find the maximum value in the factor.
|
||||
double max() const override { return ADT::max(); };
|
||||
|
||||
/// Create new factor by maximizing over all values with the same separator.
|
||||
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override {
|
||||
return combine(nrFrontals, Ring::max);
|
||||
|
|
|
@ -89,7 +89,7 @@ DiscreteBayesNet DiscreteBayesNet::prune(
|
|||
DiscreteValues deadModesValues;
|
||||
// If we have a dead mode threshold and discrete variables left after pruning,
|
||||
// then we run dead mode removal.
|
||||
if (marginalThreshold.has_value() && pruned.keys().size() > 0) {
|
||||
if (marginalThreshold && pruned.keys().size() > 0) {
|
||||
DiscreteMarginals marginals(DiscreteFactorGraph{pruned});
|
||||
for (auto dkey : pruned.discreteKeys()) {
|
||||
const Vector probabilities = marginals.marginalProbabilities(dkey);
|
||||
|
|
|
@ -73,10 +73,7 @@ AlgebraicDecisionTree<Key> DiscreteFactor::errorTree() const {
|
|||
|
||||
/* ************************************************************************ */
|
||||
DiscreteFactor::shared_ptr DiscreteFactor::scale() const {
|
||||
// Max over all the potentials by pretending all keys are frontal:
|
||||
shared_ptr denominator = this->max(this->size());
|
||||
// Normalize the product factor to prevent underflow.
|
||||
return this->operator/(denominator);
|
||||
return this->operator*(1.0 / max());
|
||||
}
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -126,6 +126,9 @@ class GTSAM_EXPORT DiscreteFactor : public Factor {
|
|||
/// Compute error for each assignment and return as a tree
|
||||
virtual AlgebraicDecisionTree<Key> errorTree() const;
|
||||
|
||||
/// Multiply with a scalar
|
||||
virtual DiscreteFactor::shared_ptr operator*(double s) const = 0;
|
||||
|
||||
/// Multiply in a DecisionTreeFactor and return the result as
|
||||
/// DecisionTreeFactor
|
||||
virtual DecisionTreeFactor operator*(const DecisionTreeFactor&) const = 0;
|
||||
|
@ -152,6 +155,9 @@ class GTSAM_EXPORT DiscreteFactor : public Factor {
|
|||
/// Create new factor by summing all values with the same separator values
|
||||
virtual DiscreteFactor::shared_ptr sum(const Ordering& keys) const = 0;
|
||||
|
||||
/// Find the maximum value in the factor.
|
||||
virtual double max() const = 0;
|
||||
|
||||
/// Create new factor by maximizing over all values with the same separator.
|
||||
virtual DiscreteFactor::shared_ptr max(size_t nrFrontals) const = 0;
|
||||
|
||||
|
|
|
@ -65,15 +65,10 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************ */
|
||||
DiscreteFactor::shared_ptr DiscreteFactorGraph::product() const {
|
||||
DiscreteFactor::shared_ptr result;
|
||||
for (auto it = this->begin(); it != this->end(); ++it) {
|
||||
if (*it) {
|
||||
if (result) {
|
||||
result = result->multiply(*it);
|
||||
} else {
|
||||
// Assign to the first non-null factor
|
||||
result = *it;
|
||||
}
|
||||
DiscreteFactor::shared_ptr result = nullptr;
|
||||
for (const auto& factor : *this) {
|
||||
if (factor) {
|
||||
result = result ? result->multiply(factor) : factor;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -120,15 +115,7 @@ namespace gtsam {
|
|||
|
||||
/* ************************************************************************ */
|
||||
DiscreteFactor::shared_ptr DiscreteFactorGraph::scaledProduct() const {
|
||||
// PRODUCT: multiply all factors
|
||||
gttic(product);
|
||||
DiscreteFactor::shared_ptr product = this->product();
|
||||
gttoc(product);
|
||||
|
||||
// Normalize the product factor to prevent underflow.
|
||||
product = product->scale();
|
||||
|
||||
return product;
|
||||
return product()->scale();
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
|
@ -216,7 +203,7 @@ namespace gtsam {
|
|||
const Ordering& frontalKeys) {
|
||||
gttic(product);
|
||||
// `product` is scaled later to prevent underflow.
|
||||
DiscreteFactor::shared_ptr product = factors.product();
|
||||
DiscreteFactor::shared_ptr product = factors.scaledProduct();
|
||||
gttoc(product);
|
||||
|
||||
// sum out frontals, this is the factor on the separator
|
||||
|
@ -224,16 +211,6 @@ namespace gtsam {
|
|||
DiscreteFactor::shared_ptr sum = product->sum(frontalKeys);
|
||||
gttoc(sum);
|
||||
|
||||
// Normalize/scale to prevent underflow.
|
||||
// We divide both `product` and `sum` by `max(sum)`
|
||||
// since it is faster to compute and when the conditional
|
||||
// is formed by `product/sum`, the scaling term cancels out.
|
||||
gttic(scale);
|
||||
DiscreteFactor::shared_ptr denominator = sum->max(sum->size());
|
||||
product = product->operator/(denominator);
|
||||
sum = sum->operator/(denominator);
|
||||
gttoc(scale);
|
||||
|
||||
// Ordering keys for the conditional so that frontalKeys are really in front
|
||||
Ordering orderedKeys;
|
||||
orderedKeys.insert(orderedKeys.end(), frontalKeys.begin(),
|
||||
|
|
|
@ -110,6 +110,11 @@ DiscreteFactor::shared_ptr TableDistribution::max(const Ordering& keys) const {
|
|||
return table_.max(keys);
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
DiscreteFactor::shared_ptr TableDistribution::operator*(double s) const {
|
||||
return table_ * s;
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
DiscreteFactor::shared_ptr TableDistribution::operator/(
|
||||
const DiscreteFactor::shared_ptr& f) const {
|
||||
|
|
|
@ -116,12 +116,19 @@ class GTSAM_EXPORT TableDistribution : public DiscreteConditional {
|
|||
/// Create new factor by summing all values with the same separator values
|
||||
DiscreteFactor::shared_ptr sum(const Ordering& keys) const override;
|
||||
|
||||
/// Find the maximum value in the factor.
|
||||
double max() const override { return table_.max(); }
|
||||
|
||||
/// Create new factor by maximizing over all values with the same separator.
|
||||
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override;
|
||||
|
||||
/// Create new factor by maximizing over all values with the same separator.
|
||||
DiscreteFactor::shared_ptr max(const Ordering& keys) const override;
|
||||
|
||||
|
||||
/// Multiply by scalar s
|
||||
DiscreteFactor::shared_ptr operator*(double s) const override;
|
||||
|
||||
/// divide by DiscreteFactor::shared_ptr f (safely)
|
||||
DiscreteFactor::shared_ptr operator/(
|
||||
const DiscreteFactor::shared_ptr& f) const override;
|
||||
|
|
|
@ -389,6 +389,36 @@ void TableFactor::print(const string& s, const KeyFormatter& formatter) const {
|
|||
cout << "number of nnzs: " << sparse_table_.nonZeros() << endl;
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
DiscreteFactor::shared_ptr TableFactor::sum(size_t nrFrontals) const {
|
||||
return combine(nrFrontals, Ring::add);
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
DiscreteFactor::shared_ptr TableFactor::sum(const Ordering& keys) const {
|
||||
return combine(keys, Ring::add);
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
double TableFactor::max() const {
|
||||
double max_value = std::numeric_limits<double>::lowest();
|
||||
for (Eigen::SparseVector<double>::InnerIterator it(sparse_table_); it; ++it) {
|
||||
max_value = std::max(max_value, it.value());
|
||||
}
|
||||
return max_value;
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
DiscreteFactor::shared_ptr TableFactor::max(size_t nrFrontals) const {
|
||||
return combine(nrFrontals, Ring::max);
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
DiscreteFactor::shared_ptr TableFactor::max(const Ordering& keys) const {
|
||||
return combine(keys, Ring::max);
|
||||
}
|
||||
|
||||
|
||||
/* ************************************************************************ */
|
||||
TableFactor TableFactor::apply(Unary op) const {
|
||||
// Initialize new factor.
|
||||
|
|
|
@ -171,6 +171,12 @@ class GTSAM_EXPORT TableFactor : public DiscreteFactor {
|
|||
/// Calculate error for DiscreteValues `x`, is -log(probability).
|
||||
double error(const DiscreteValues& values) const override;
|
||||
|
||||
/// multiply with a scalar
|
||||
DiscreteFactor::shared_ptr operator*(double s) const override {
|
||||
return std::make_shared<TableFactor>(
|
||||
apply([s](const double& a) { return Ring::mul(a, s); }));
|
||||
}
|
||||
|
||||
/// multiply two TableFactors
|
||||
TableFactor operator*(const TableFactor& f) const {
|
||||
return apply(f, Ring::mul);
|
||||
|
@ -215,24 +221,19 @@ class GTSAM_EXPORT TableFactor : public DiscreteFactor {
|
|||
DiscreteKeys parent_keys) const;
|
||||
|
||||
/// Create new factor by summing all values with the same separator values
|
||||
DiscreteFactor::shared_ptr sum(size_t nrFrontals) const override {
|
||||
return combine(nrFrontals, Ring::add);
|
||||
}
|
||||
DiscreteFactor::shared_ptr sum(size_t nrFrontals) const override;
|
||||
|
||||
/// Create new factor by summing all values with the same separator values
|
||||
DiscreteFactor::shared_ptr sum(const Ordering& keys) const override {
|
||||
return combine(keys, Ring::add);
|
||||
}
|
||||
DiscreteFactor::shared_ptr sum(const Ordering& keys) const override;
|
||||
|
||||
/// Find the maximum value in the factor.
|
||||
double max() const override;
|
||||
|
||||
/// Create new factor by maximizing over all values with the same separator.
|
||||
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override {
|
||||
return combine(nrFrontals, Ring::max);
|
||||
}
|
||||
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override;
|
||||
|
||||
/// Create new factor by maximizing over all values with the same separator.
|
||||
DiscreteFactor::shared_ptr max(const Ordering& keys) const override {
|
||||
return combine(keys, Ring::max);
|
||||
}
|
||||
DiscreteFactor::shared_ptr max(const Ordering& keys) const override;
|
||||
|
||||
/// @}
|
||||
/// @name Advanced Interface
|
||||
|
|
|
@ -191,11 +191,19 @@ size_t HybridGaussianConditional::nrComponents() const {
|
|||
/* *******************************************************************************/
|
||||
GaussianConditional::shared_ptr HybridGaussianConditional::choose(
|
||||
const DiscreteValues &discreteValues) const {
|
||||
auto &[factor, _] = factors()(discreteValues);
|
||||
if (!factor) return nullptr;
|
||||
try {
|
||||
auto &[factor, _] = factors()(discreteValues);
|
||||
if (!factor) return nullptr;
|
||||
|
||||
auto conditional = checkConditional(factor);
|
||||
return conditional;
|
||||
auto conditional = checkConditional(factor);
|
||||
return conditional;
|
||||
} catch (const std::out_of_range &e) {
|
||||
GTSAM_PRINT(*this);
|
||||
GTSAM_PRINT(discreteValues);
|
||||
throw std::runtime_error(
|
||||
"HybridGaussianConditional::choose: discreteValues does not contain "
|
||||
"all discrete parents.");
|
||||
}
|
||||
}
|
||||
|
||||
/* *******************************************************************************/
|
||||
|
@ -313,18 +321,21 @@ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune(
|
|||
std::set_difference(theirs.begin(), theirs.end(), mine.begin(), mine.end(),
|
||||
std::back_inserter(diff));
|
||||
|
||||
// Find maximum probability value for every combination of our keys.
|
||||
Ordering keys(diff);
|
||||
auto max = discreteProbs.max(keys);
|
||||
// Find maximum probability value for every combination of *our* keys.
|
||||
Ordering ordering(diff);
|
||||
auto max = discreteProbs.max(ordering);
|
||||
|
||||
// Check the max value for every combination of our keys.
|
||||
// If the max value is 0.0, we can prune the corresponding conditional.
|
||||
bool allPruned = true;
|
||||
auto pruner =
|
||||
[&](const Assignment<Key> &choices,
|
||||
const GaussianFactorValuePair &pair) -> GaussianFactorValuePair {
|
||||
if (max->evaluate(choices) == 0.0)
|
||||
// If this choice is zero probability or Gaussian is null, return infinity
|
||||
if (!pair.first || max->evaluate(choices) == 0.0) {
|
||||
return {nullptr, std::numeric_limits<double>::infinity()};
|
||||
else {
|
||||
} else {
|
||||
allPruned = false;
|
||||
// Add negLogConstant_ back so that the minimum negLogConstant in the
|
||||
// HybridGaussianConditional is set correctly.
|
||||
return {pair.first, pair.second + negLogConstant_};
|
||||
|
@ -332,6 +343,7 @@ HybridGaussianConditional::shared_ptr HybridGaussianConditional::prune(
|
|||
};
|
||||
|
||||
FactorValuePairs prunedConditionals = factors().apply(pruner);
|
||||
if (allPruned) return nullptr;
|
||||
return std::make_shared<HybridGaussianConditional>(discreteKeys(),
|
||||
prunedConditionals, true);
|
||||
}
|
||||
|
|
|
@ -50,6 +50,8 @@
|
|||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#define GTSAM_HYBRID_WITH_TABLEFACTOR 1
|
||||
|
||||
namespace gtsam {
|
||||
|
||||
/// Specialize EliminateableFactorGraph for HybridGaussianFactorGraph:
|
||||
|
@ -253,7 +255,11 @@ static DiscreteFactor::shared_ptr DiscreteFactorFromErrors(
|
|||
double min_log = errors.min();
|
||||
AlgebraicDecisionTree<Key> potentials(
|
||||
errors, [&min_log](const double x) { return exp(-(x - min_log)); });
|
||||
#if GTSAM_HYBRID_WITH_TABLEFACTOR
|
||||
return std::make_shared<TableFactor>(discreteKeys, potentials);
|
||||
#else
|
||||
return std::make_shared<DecisionTreeFactor>(discreteKeys, potentials);
|
||||
#endif
|
||||
}
|
||||
|
||||
/* ************************************************************************ */
|
||||
|
@ -290,9 +296,13 @@ static DiscreteFactorGraph CollectDiscreteFactors(
|
|||
/// Get the underlying TableFactor
|
||||
dfg.push_back(dtc->table());
|
||||
} else {
|
||||
#if GTSAM_HYBRID_WITH_TABLEFACTOR
|
||||
// Convert DiscreteConditional to TableFactor
|
||||
auto tdc = std::make_shared<TableFactor>(*dc);
|
||||
dfg.push_back(tdc);
|
||||
#else
|
||||
dfg.push_back(dc);
|
||||
#endif
|
||||
}
|
||||
#if GTSAM_HYBRID_TIMING
|
||||
gttoc_(ConvertConditionalToTableFactor);
|
||||
|
@ -309,11 +319,18 @@ static DiscreteFactorGraph CollectDiscreteFactors(
|
|||
static std::pair<HybridConditional::shared_ptr, std::shared_ptr<Factor>>
|
||||
discreteElimination(const HybridGaussianFactorGraph &factors,
|
||||
const Ordering &frontalKeys) {
|
||||
#if GTSAM_HYBRID_TIMING
|
||||
gttic_(CollectDiscreteFactors);
|
||||
#endif
|
||||
DiscreteFactorGraph dfg = CollectDiscreteFactors(factors);
|
||||
#if GTSAM_HYBRID_TIMING
|
||||
gttoc_(CollectDiscreteFactors);
|
||||
#endif
|
||||
|
||||
#if GTSAM_HYBRID_TIMING
|
||||
gttic_(EliminateDiscrete);
|
||||
#endif
|
||||
#if GTSAM_HYBRID_WITH_TABLEFACTOR
|
||||
// Check if separator is empty.
|
||||
// This is the same as checking if the number of frontal variables
|
||||
// is the same as the number of variables in the DiscreteFactorGraph.
|
||||
|
@ -323,9 +340,6 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
|
|||
// Get product factor
|
||||
DiscreteFactor::shared_ptr product = dfg.scaledProduct();
|
||||
|
||||
#if GTSAM_HYBRID_TIMING
|
||||
gttic_(EliminateDiscreteFormDiscreteConditional);
|
||||
#endif
|
||||
// Check type of product, and get as TableFactor for efficiency.
|
||||
// Use object instead of pointer since we need it
|
||||
// for the TableDistribution constructor.
|
||||
|
@ -337,19 +351,18 @@ discreteElimination(const HybridGaussianFactorGraph &factors,
|
|||
}
|
||||
auto conditional = std::make_shared<TableDistribution>(p);
|
||||
|
||||
#if GTSAM_HYBRID_TIMING
|
||||
gttoc_(EliminateDiscreteFormDiscreteConditional);
|
||||
#endif
|
||||
|
||||
DiscreteFactor::shared_ptr sum = p.sum(frontalKeys);
|
||||
|
||||
return {std::make_shared<HybridConditional>(conditional), sum};
|
||||
|
||||
} else {
|
||||
#endif
|
||||
// Perform sum-product.
|
||||
auto result = EliminateDiscrete(dfg, frontalKeys);
|
||||
return {std::make_shared<HybridConditional>(result.first), result.second};
|
||||
#if GTSAM_HYBRID_WITH_TABLEFACTOR
|
||||
}
|
||||
#endif
|
||||
#if GTSAM_HYBRID_TIMING
|
||||
gttoc_(EliminateDiscrete);
|
||||
#endif
|
||||
|
@ -411,8 +424,14 @@ static std::shared_ptr<Factor> createHybridGaussianFactor(
|
|||
throw std::runtime_error("createHybridGaussianFactors has mixed NULLs");
|
||||
}
|
||||
};
|
||||
#if GTSAM_HYBRID_TIMING
|
||||
gttic_(HybridCreateGaussianFactor);
|
||||
#endif
|
||||
DecisionTree<Key, GaussianFactorValuePair> newFactors(eliminationResults,
|
||||
correct);
|
||||
#if GTSAM_HYBRID_TIMING
|
||||
gttoc_(HybridCreateGaussianFactor);
|
||||
#endif
|
||||
|
||||
return std::make_shared<HybridGaussianFactor>(discreteSeparator, newFactors);
|
||||
}
|
||||
|
|
|
@ -21,55 +21,70 @@
|
|||
#include <algorithm>
|
||||
#include <unordered_set>
|
||||
|
||||
// #define DEBUG_SMOOTHER
|
||||
namespace gtsam {
|
||||
|
||||
/* ************************************************************************* */
|
||||
Ordering HybridSmoother::getOrdering(const HybridGaussianFactorGraph &factors,
|
||||
const KeySet &newFactorKeys) {
|
||||
const KeySet &lastKeysToEliminate) {
|
||||
// Get all the discrete keys from the factors
|
||||
KeySet allDiscrete = factors.discreteKeySet();
|
||||
|
||||
// Create KeyVector with continuous keys followed by discrete keys.
|
||||
KeyVector newKeysDiscreteLast;
|
||||
KeyVector lastKeys;
|
||||
|
||||
// Insert continuous keys first.
|
||||
for (auto &k : newFactorKeys) {
|
||||
for (auto &k : lastKeysToEliminate) {
|
||||
if (!allDiscrete.exists(k)) {
|
||||
newKeysDiscreteLast.push_back(k);
|
||||
lastKeys.push_back(k);
|
||||
}
|
||||
}
|
||||
|
||||
// Insert discrete keys at the end
|
||||
std::copy(allDiscrete.begin(), allDiscrete.end(),
|
||||
std::back_inserter(newKeysDiscreteLast));
|
||||
|
||||
const VariableIndex index(factors);
|
||||
std::back_inserter(lastKeys));
|
||||
|
||||
// Get an ordering where the new keys are eliminated last
|
||||
Ordering ordering = Ordering::ColamdConstrainedLast(
|
||||
index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()),
|
||||
true);
|
||||
factors, KeyVector(lastKeys.begin(), lastKeys.end()), true);
|
||||
|
||||
return ordering;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
||||
void HybridSmoother::update(const HybridGaussianFactorGraph &newFactors,
|
||||
std::optional<size_t> maxNrLeaves,
|
||||
const std::optional<Ordering> given_ordering) {
|
||||
const KeySet originalNewFactorKeys = newFactors.keys();
|
||||
#ifdef DEBUG_SMOOTHER
|
||||
std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size()
|
||||
<< std::endl;
|
||||
std::cout << "newFactors size: " << newFactors.size() << std::endl;
|
||||
#endif
|
||||
HybridGaussianFactorGraph updatedGraph;
|
||||
// Add the necessary conditionals from the previous timestep(s).
|
||||
std::tie(updatedGraph, hybridBayesNet_) =
|
||||
addConditionals(graph, hybridBayesNet_);
|
||||
addConditionals(newFactors, hybridBayesNet_);
|
||||
#ifdef DEBUG_SMOOTHER
|
||||
// print size of newFactors, updatedGraph, hybridBayesNet_
|
||||
std::cout << "updatedGraph size: " << updatedGraph.size() << std::endl;
|
||||
std::cout << "hybridBayesNet_ size after: " << hybridBayesNet_.size()
|
||||
<< std::endl;
|
||||
std::cout << "total size: " << updatedGraph.size() + hybridBayesNet_.size()
|
||||
<< std::endl;
|
||||
#endif
|
||||
|
||||
Ordering ordering;
|
||||
// If no ordering provided, then we compute one
|
||||
if (!given_ordering.has_value()) {
|
||||
// Get the keys from the new factors
|
||||
const KeySet newFactorKeys = graph.keys();
|
||||
KeySet continuousKeysToInclude; // Scheme 1: empty, 15sec/2000, 64sec/3000 (69s without TF)
|
||||
// continuousKeysToInclude = newFactors.keys(); // Scheme 2: all, 8sec/2000, 160sec/3000
|
||||
// continuousKeysToInclude = updatedGraph.keys(); // Scheme 3: all, stopped after 80sec/2000
|
||||
|
||||
// Since updatedGraph now has all the connected conditionals,
|
||||
// we can get the correct ordering.
|
||||
ordering = this->getOrdering(updatedGraph, newFactorKeys);
|
||||
ordering = this->getOrdering(updatedGraph, continuousKeysToInclude);
|
||||
} else {
|
||||
ordering = *given_ordering;
|
||||
}
|
||||
|
@ -77,25 +92,64 @@ void HybridSmoother::update(const HybridGaussianFactorGraph &graph,
|
|||
// Eliminate.
|
||||
HybridBayesNet bayesNetFragment = *updatedGraph.eliminateSequential(ordering);
|
||||
|
||||
#ifdef DEBUG_SMOOTHER_DETAIL
|
||||
for (auto conditional : bayesNetFragment) {
|
||||
auto e = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
|
||||
conditional);
|
||||
GTSAM_PRINT(*e);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef DEBUG_SMOOTHER
|
||||
// Print discrete keys in the bayesNetFragment:
|
||||
std::cout << "Discrete keys in bayesNetFragment: ";
|
||||
for (auto &key : HybridFactorGraph(bayesNetFragment).discreteKeySet()) {
|
||||
std::cout << DefaultKeyFormatter(key) << " ";
|
||||
}
|
||||
#endif
|
||||
|
||||
/// Prune
|
||||
if (maxNrLeaves) {
|
||||
// `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
|
||||
// all the conditionals with the same keys in bayesNetFragment.
|
||||
bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_);
|
||||
DiscreteValues newlyFixedValues;
|
||||
bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_,
|
||||
&newlyFixedValues);
|
||||
fixedValues_.insert(newlyFixedValues);
|
||||
}
|
||||
|
||||
#ifdef DEBUG_SMOOTHER
|
||||
// Print discrete keys in the bayesNetFragment:
|
||||
std::cout << "\nAfter pruning: ";
|
||||
for (auto &key : HybridFactorGraph(bayesNetFragment).discreteKeySet()) {
|
||||
std::cout << DefaultKeyFormatter(key) << " ";
|
||||
}
|
||||
std::cout << std::endl << std::endl;
|
||||
#endif
|
||||
|
||||
#ifdef DEBUG_SMOOTHER_DETAIL
|
||||
for (auto conditional : bayesNetFragment) {
|
||||
auto c = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
|
||||
conditional);
|
||||
GTSAM_PRINT(*c);
|
||||
}
|
||||
#endif
|
||||
|
||||
// Add the partial bayes net to the posterior bayes net.
|
||||
hybridBayesNet_.add(bayesNetFragment);
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
std::pair<HybridGaussianFactorGraph, HybridBayesNet>
|
||||
HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
|
||||
HybridSmoother::addConditionals(const HybridGaussianFactorGraph &newFactors,
|
||||
const HybridBayesNet &hybridBayesNet) const {
|
||||
HybridGaussianFactorGraph graph(originalGraph);
|
||||
HybridGaussianFactorGraph graph(newFactors);
|
||||
HybridBayesNet updatedHybridBayesNet(hybridBayesNet);
|
||||
|
||||
KeySet factorKeys = graph.keys();
|
||||
KeySet involvedKeys = newFactors.keys();
|
||||
auto involved = [&involvedKeys](const Key &key) {
|
||||
return involvedKeys.find(key) != involvedKeys.end();
|
||||
};
|
||||
|
||||
// If hybridBayesNet is not empty,
|
||||
// it means we have conditionals to add to the factor graph.
|
||||
|
@ -108,21 +162,36 @@ HybridSmoother::addConditionals(const HybridGaussianFactorGraph &originalGraph,
|
|||
|
||||
// NOTE(Varun) Using a for-range loop doesn't work since some of the
|
||||
// conditionals are invalid pointers
|
||||
|
||||
// First get all the keys involved.
|
||||
// We do this by iterating over all conditionals, and checking if their
|
||||
// frontals are involved in the factor graph. If yes, then also make the
|
||||
// parent keys involved in the factor graph.
|
||||
for (size_t i = 0; i < hybridBayesNet.size(); i++) {
|
||||
auto conditional = hybridBayesNet.at(i);
|
||||
|
||||
for (auto &key : conditional->frontals()) {
|
||||
if (std::find(factorKeys.begin(), factorKeys.end(), key) !=
|
||||
factorKeys.end()) {
|
||||
newConditionals.push_back(conditional);
|
||||
|
||||
// Add the conditional parents to factorKeys
|
||||
if (involved(key)) {
|
||||
// Add the conditional parents to involvedKeys
|
||||
// so we add those conditionals too.
|
||||
// NOTE: This assumes we have a structure where
|
||||
// variables depend on those in the future.
|
||||
for (auto &&parentKey : conditional->parents()) {
|
||||
factorKeys.insert(parentKey);
|
||||
involvedKeys.insert(parentKey);
|
||||
}
|
||||
// Break so we don't add parents twice.
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
#ifdef DEBUG_SMOOTHER
|
||||
PrintKeySet(involvedKeys);
|
||||
#endif
|
||||
|
||||
for (size_t i = 0; i < hybridBayesNet.size(); i++) {
|
||||
auto conditional = hybridBayesNet.at(i);
|
||||
|
||||
for (auto &key : conditional->frontals()) {
|
||||
if (involved(key)) {
|
||||
newConditionals.push_back(conditional);
|
||||
|
||||
// Remove the conditional from the updated Bayes net
|
||||
auto it = find(updatedHybridBayesNet.begin(),
|
||||
|
@ -151,4 +220,21 @@ const HybridBayesNet &HybridSmoother::hybridBayesNet() const {
|
|||
return hybridBayesNet_;
|
||||
}
|
||||
|
||||
/* ************************************************************************* */
|
||||
HybridValues HybridSmoother::optimize() const {
|
||||
// Solve for the MPE
|
||||
DiscreteValues mpe = hybridBayesNet_.mpe();
|
||||
|
||||
// Add fixed values to the MPE.
|
||||
mpe.insert(fixedValues_);
|
||||
|
||||
// Given the MPE, compute the optimal continuous values.
|
||||
GaussianBayesNet gbn = hybridBayesNet_.choose(mpe);
|
||||
const VectorValues continuous = gbn.optimize();
|
||||
if (std::find(gbn.begin(), gbn.end(), nullptr) != gbn.end()) {
|
||||
throw std::runtime_error("At least one nullptr factor in hybridBayesNet_");
|
||||
}
|
||||
return HybridValues(continuous, mpe);
|
||||
}
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -106,6 +106,9 @@ class GTSAM_EXPORT HybridSmoother {
|
|||
|
||||
/// Return the Bayes Net posterior.
|
||||
const HybridBayesNet& hybridBayesNet() const;
|
||||
|
||||
/// Optimize the hybrid Bayes Net, taking into accound fixed values.
|
||||
HybridValues optimize() const;
|
||||
};
|
||||
|
||||
} // namespace gtsam
|
||||
|
|
|
@ -28,6 +28,28 @@ using symbol_shorthand::M;
|
|||
using symbol_shorthand::X;
|
||||
using symbol_shorthand::Z;
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Test the HybridConditional constructor.
|
||||
TEST(HybridConditional, Constructor) {
|
||||
// Create a HybridGaussianConditional.
|
||||
const KeyVector continuousKeys{X(0), X(1)};
|
||||
const DiscreteKeys discreteKeys{{M(0), 2}};
|
||||
const size_t nFrontals = 1;
|
||||
const HybridConditional hc(continuousKeys, discreteKeys, nFrontals);
|
||||
|
||||
// Check Frontals:
|
||||
EXPECT_LONGS_EQUAL(1, hc.nrFrontals());
|
||||
const auto frontals = hc.frontals();
|
||||
EXPECT_LONGS_EQUAL(1, frontals.size());
|
||||
EXPECT_LONGS_EQUAL(X(0), *frontals.begin());
|
||||
|
||||
// Check parents:
|
||||
const auto parents = hc.parents();
|
||||
EXPECT_LONGS_EQUAL(2, parents.size());
|
||||
EXPECT_LONGS_EQUAL(X(1), *parents.begin());
|
||||
EXPECT_LONGS_EQUAL(M(0), *(parents.begin() + 1));
|
||||
}
|
||||
|
||||
/* ****************************************************************************/
|
||||
// Check invariants for all conditionals in a tiny Bayes net.
|
||||
TEST(HybridConditional, Invariants) {
|
||||
|
@ -43,6 +65,12 @@ TEST(HybridConditional, Invariants) {
|
|||
auto hc0 = bn.at(0);
|
||||
CHECK(hc0->isHybrid());
|
||||
|
||||
// Check parents:
|
||||
const auto parents = hc0->parents();
|
||||
EXPECT_LONGS_EQUAL(2, parents.size());
|
||||
EXPECT_LONGS_EQUAL(X(0), *parents.begin());
|
||||
EXPECT_LONGS_EQUAL(M(0), *(parents.begin() + 1));
|
||||
|
||||
// Check invariants as a HybridGaussianConditional.
|
||||
const auto conditional = hc0->asHybrid();
|
||||
EXPECT(HybridGaussianConditional::CheckInvariants(*conditional, values));
|
||||
|
|
|
@ -87,6 +87,16 @@ class GTSAM_UNSTABLE_EXPORT Constraint : public DiscreteFactor {
|
|||
this->operator*(df->toDecisionTreeFactor()));
|
||||
}
|
||||
|
||||
/// Multiply by a scalar
|
||||
virtual DiscreteFactor::shared_ptr operator*(double s) const override {
|
||||
return this->toDecisionTreeFactor() * s;
|
||||
}
|
||||
|
||||
/// Multiply by a DecisionTreeFactor and return a DecisionTreeFactor
|
||||
DecisionTreeFactor operator*(const DecisionTreeFactor& dtf) const override {
|
||||
return this->toDecisionTreeFactor() * dtf;
|
||||
}
|
||||
|
||||
/// divide by DiscreteFactor::shared_ptr f (safely)
|
||||
DiscreteFactor::shared_ptr operator/(
|
||||
const DiscreteFactor::shared_ptr& df) const override {
|
||||
|
@ -104,6 +114,9 @@ class GTSAM_UNSTABLE_EXPORT Constraint : public DiscreteFactor {
|
|||
return toDecisionTreeFactor().sum(keys);
|
||||
}
|
||||
|
||||
/// Find the max value
|
||||
double max() const override { return toDecisionTreeFactor().max(); }
|
||||
|
||||
DiscreteFactor::shared_ptr max(size_t nrFrontals) const override {
|
||||
return toDecisionTreeFactor().max(nrFrontals);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue