Formatted and fixed discrete examples

release/4.3a0
Frank dellaert 2020-07-09 20:46:12 -04:00
parent f280aec428
commit 3dcff34b46
4 changed files with 98 additions and 117 deletions

View File

@ -1,7 +1,4 @@
set (excluded_examples set (excluded_examples
DiscreteBayesNet_FG.cpp
UGM_chain.cpp
UGM_small.cpp
elaboratePoint2KalmanFilter.cpp elaboratePoint2KalmanFilter.cpp
) )

View File

@ -10,34 +10,43 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file DiscreteBayesNet_FG.cpp * @file DiscreteBayesNet_graph.cpp
* @brief Discrete Bayes Net example using Factor Graphs * @brief Discrete Bayes Net example using Factor Graphs
* @author Abhijit * @author Abhijit
* @date Jun 4, 2012 * @date Jun 4, 2012
* *
* We use the famous Rain/Cloudy/Sprinkler Example of [Russell & Norvig, 2009, p529] * We use the famous Rain/Cloudy/Sprinkler Example of [Russell & Norvig, 2009,
* You may be familiar with other graphical model packages like BNT (available * p529] You may be familiar with other graphical model packages like BNT
* at http://bnt.googlecode.com/svn/trunk/docs/usage.html) where this is used as an * (available at http://bnt.googlecode.com/svn/trunk/docs/usage.html) where this
* example. The following demo is same as that in the above link, except that * is used as an example. The following demo is same as that in the above link,
* everything is using GTSAM. * except that everything is using GTSAM.
*/ */
#include <gtsam/discrete/DiscreteFactorGraph.h> #include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteSequentialSolver.h> #include <gtsam/discrete/DiscreteMarginals.h>
#include <iomanip> #include <iomanip>
using namespace std; using namespace std;
using namespace gtsam; using namespace gtsam;
int main(int argc, char **argv) { int main(int argc, char **argv) {
// Define keys and a print function
Key C(1), S(2), R(3), W(4);
auto print = [=](DiscreteFactor::sharedValues values) {
cout << boolalpha << "Cloudy = " << static_cast<bool>((*values)[C])
<< " Sprinkler = " << static_cast<bool>((*values)[S])
<< " Rain = " << boolalpha << static_cast<bool>((*values)[R])
<< " WetGrass = " << static_cast<bool>((*values)[W]) << endl;
};
// We assume binary state variables // We assume binary state variables
// we have 0 == "False" and 1 == "True" // we have 0 == "False" and 1 == "True"
const size_t nrStates = 2; const size_t nrStates = 2;
// define variables // define variables
DiscreteKey Cloudy(1, nrStates), Sprinkler(2, nrStates), Rain(3, nrStates), DiscreteKey Cloudy(C, nrStates), Sprinkler(S, nrStates), Rain(R, nrStates),
WetGrass(4, nrStates); WetGrass(W, nrStates);
// create Factor Graph of the bayes net // create Factor Graph of the bayes net
DiscreteFactorGraph graph; DiscreteFactorGraph graph;
@ -49,8 +58,9 @@ int main(int argc, char **argv) {
graph.add(Sprinkler & Rain & WetGrass, graph.add(Sprinkler & Rain & WetGrass,
"1 0 0.1 0.9 0.1 0.9 0.001 0.99"); // P(WetGrass | Sprinkler, Rain) "1 0 0.1 0.9 0.1 0.9 0.001 0.99"); // P(WetGrass | Sprinkler, Rain)
// Alternatively we can also create a DiscreteBayesNet, add DiscreteConditional // Alternatively we can also create a DiscreteBayesNet, add
// factors and create a FactorGraph from it. (See testDiscreteBayesNet.cpp) // DiscreteConditional factors and create a FactorGraph from it. (See
// testDiscreteBayesNet.cpp)
// Since this is a relatively small distribution, we can as well print // Since this is a relatively small distribution, we can as well print
// the whole distribution.. // the whole distribution..
@ -63,57 +73,48 @@ int main(int argc, char **argv) {
for (size_t h = 0; h < nrStates; h++) for (size_t h = 0; h < nrStates; h++)
for (size_t c = 0; c < nrStates; c++) { for (size_t c = 0; c < nrStates; c++) {
DiscreteFactor::Values values; DiscreteFactor::Values values;
values[Cloudy.first] = c; values[C] = c;
values[Sprinkler.first] = h; values[S] = h;
values[Rain.first] = m; values[R] = m;
values[WetGrass.first] = a; values[W] = a;
double prodPot = graph(values); double prodPot = graph(values);
cout << boolalpha << setw(8) << (bool) c << setw(14) cout << setw(8) << static_cast<bool>(c) << setw(14)
<< (bool) h << setw(12) << (bool) m << setw(13) << static_cast<bool>(h) << setw(12) << static_cast<bool>(m)
<< (bool) a << setw(16) << prodPot << endl; << setw(13) << static_cast<bool>(a) << setw(16) << prodPot
<< endl;
} }
// "Most Probable Explanation", i.e., configuration with largest value // "Most Probable Explanation", i.e., configuration with largest value
DiscreteSequentialSolver solver(graph); DiscreteFactor::sharedValues mpe = graph.eliminateSequential()->optimize();
DiscreteFactor::sharedValues optimalDecoding = solver.optimize();
cout << "\nMost Probable Explanation (MPE):" << endl; cout << "\nMost Probable Explanation (MPE):" << endl;
cout << boolalpha << "Cloudy = " << (bool)(*optimalDecoding)[Cloudy.first] print(mpe);
<< " Sprinkler = " << (bool)(*optimalDecoding)[Sprinkler.first]
<< " Rain = " << boolalpha << (bool)(*optimalDecoding)[Rain.first]
<< " WetGrass = " << (bool)(*optimalDecoding)[WetGrass.first]<< endl;
// "Inference" We show an inference query like: probability that the Sprinkler
// was on; given that the grass is wet i.e. P( S | C=0) = ?
// "Inference" We show an inference query like: probability that the Sprinkler was on; // add evidence that it is not Cloudy
// given that the grass is wet i.e. P( S | W=1) =? graph.add(Cloudy, "1 0");
cout << "\nInference Query: Probability of Sprinkler being on given Grass is Wet" << endl;
// Method 1: we can compute the joint marginal P(S,W) and from that we can compute // solve again, now with evidence
// P(S | W=1) = P(S,W=1)/P(W=1) We do this in following three steps.. DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
DiscreteFactor::sharedValues mpe_with_evidence = chordal->optimize();
//Step1: Compute P(S,W) cout << "\nMPE given C=0:" << endl;
DiscreteFactorGraph jointFG; print(mpe_with_evidence);
jointFG = *solver.jointFactorGraph(DiscreteKeys(Sprinkler & WetGrass).indices());
DecisionTreeFactor probSW = jointFG.product();
//Step2: Compute P(W) // we can also calculate arbitrary marginals:
DiscreteFactor::shared_ptr probW = solver.marginalFactor(WetGrass.first); DiscreteMarginals marginals(graph);
cout << "\nP(S=1|C=0):" << marginals.marginalProbabilities(Sprinkler)[1]
//Step3: Computer P(S | W=1) = P(S,W=1)/P(W=1) << endl;
DiscreteFactor::Values values; cout << "\nP(R=0|C=0):" << marginals.marginalProbabilities(Rain)[0] << endl;
values[WetGrass.first] = 1; cout << "\nP(W=1|C=0):" << marginals.marginalProbabilities(WetGrass)[1]
<< endl;
//print P(S=0|W=1)
values[Sprinkler.first] = 0;
cout << "P(S=0|W=1) = " << probSW(values)/(*probW)(values) << endl;
//print P(S=1|W=1)
values[Sprinkler.first] = 1;
cout << "P(S=1|W=1) = " << probSW(values)/(*probW)(values) << endl;
// TODO: Method 2 : One way is to modify the factor graph to
// incorporate the evidence node and compute the marginal
// TODO: graph.addEvidence(Cloudy,0);
// We can also sample from it
cout << "\n10 samples:" << endl;
for (size_t i = 0; i < 10; i++) {
DiscreteFactor::sharedValues sample = chordal->sample();
print(sample);
}
return 0; return 0;
} }

View File

@ -10,7 +10,7 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file small.cpp * @file UGM_chain.cpp
* @brief UGM (undirected graphical model) examples: chain * @brief UGM (undirected graphical model) examples: chain
* @author Frank Dellaert * @author Frank Dellaert
* @author Abhijit Kundu * @author Abhijit Kundu
@ -19,10 +19,9 @@
* for more explanation. This code demos the same example using GTSAM. * for more explanation. This code demos the same example using GTSAM.
*/ */
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteSequentialSolver.h>
#include <gtsam/discrete/DiscreteMarginals.h>
#include <gtsam/base/timing.h> #include <gtsam/base/timing.h>
#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteMarginals.h>
#include <iomanip> #include <iomanip>
@ -30,7 +29,6 @@ using namespace std;
using namespace gtsam; using namespace gtsam;
int main(int argc, char** argv) { int main(int argc, char** argv) {
// Set Number of Nodes in the Graph // Set Number of Nodes in the Graph
const int nrNodes = 60; const int nrNodes = 60;
@ -51,10 +49,10 @@ int main(int argc, char** argv) {
// add node potentials // add node potentials
graph.add(nodes[0], ".3 .6 .1 0 0 0 0"); graph.add(nodes[0], ".3 .6 .1 0 0 0 0");
for (int i = 1; i < nrNodes; i++) for (int i = 1; i < nrNodes; i++) graph.add(nodes[i], "1 1 1 1 1 1 1");
graph.add(nodes[i], "1 1 1 1 1 1 1");
const std::string edgePotential = ".08 .9 .01 0 0 0 .01 " const std::string edgePotential =
".08 .9 .01 0 0 0 .01 "
".03 .95 .01 0 0 0 .01 " ".03 .95 .01 0 0 0 .01 "
".06 .06 .75 .05 .05 .02 .01 " ".06 .06 .75 .05 .05 .02 .01 "
"0 0 0 .3 .6 .09 .01 " "0 0 0 .3 .6 .09 .01 "
@ -71,39 +69,24 @@ int main(int argc, char** argv) {
// "Decoding", i.e., configuration with largest value // "Decoding", i.e., configuration with largest value
// We use sequential variable elimination // We use sequential variable elimination
DiscreteSequentialSolver solver(graph); DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
DiscreteFactor::sharedValues optimalDecoding = solver.optimize(); DiscreteFactor::sharedValues optimalDecoding = chordal->optimize();
optimalDecoding->print("\nMost Probable Explanation (optimalDecoding)\n"); optimalDecoding->print("\nMost Probable Explanation (optimalDecoding)\n");
// "Inference" Computing marginals for each node // "Inference" Computing marginals for each node
cout << "\nComputing Node Marginals ..(Sequential Elimination)" << endl;
gttic_(Sequential);
for (vector<DiscreteKey>::iterator itr = nodes.begin(); itr != nodes.end();
++itr) {
//Compute the marginal
Vector margProbs = solver.marginalProbabilities(*itr);
//Print the marginals
cout << "Node#" << setw(4) << itr->first << " : ";
print(margProbs);
}
gttoc_(Sequential);
// Here we'll make use of DiscreteMarginals class, which makes use of // Here we'll make use of DiscreteMarginals class, which makes use of
// bayes-tree based shortcut evaluation of marginals // bayes-tree based shortcut evaluation of marginals
DiscreteMarginals marginals(graph); DiscreteMarginals marginals(graph);
cout << "\nComputing Node Marginals ..(BayesTree based)" << endl; cout << "\nComputing Node Marginals ..(BayesTree based)" << endl;
gttic_(Multifrontal); gttic_(Multifrontal);
for (vector<DiscreteKey>::iterator itr = nodes.begin(); itr != nodes.end(); for (vector<DiscreteKey>::iterator it = nodes.begin(); it != nodes.end();
++itr) { ++it) {
// Compute the marginal // Compute the marginal
Vector margProbs = marginals.marginalProbabilities(*itr); Vector margProbs = marginals.marginalProbabilities(*it);
// Print the marginals // Print the marginals
cout << "Node#" << setw(4) << itr->first << " : "; cout << "Node#" << setw(4) << it->first << " : ";
print(margProbs); print(margProbs);
} }
gttoc_(Multifrontal); gttoc_(Multifrontal);
@ -111,4 +94,3 @@ int main(int argc, char** argv) {
tictoc_print_(); tictoc_print_();
return 0; return 0;
} }

View File

@ -10,15 +10,16 @@
* -------------------------------------------------------------------------- */ * -------------------------------------------------------------------------- */
/** /**
* @file small.cpp * @file UGM_small.cpp
* @brief UGM (undirected graphical model) examples: small * @brief UGM (undirected graphical model) examples: small
* @author Frank Dellaert * @author Frank Dellaert
* *
* See http://www.di.ens.fr/~mschmidt/Software/UGM/small.html * See http://www.di.ens.fr/~mschmidt/Software/UGM/small.html
*/ */
#include <gtsam/base/Vector.h>
#include <gtsam/discrete/DiscreteFactorGraph.h> #include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteSequentialSolver.h> #include <gtsam/discrete/DiscreteMarginals.h>
using namespace std; using namespace std;
using namespace gtsam; using namespace gtsam;
@ -61,24 +62,24 @@ int main(int argc, char** argv) {
// "Decoding", i.e., configuration with largest value (MPE) // "Decoding", i.e., configuration with largest value (MPE)
// We use sequential variable elimination // We use sequential variable elimination
DiscreteSequentialSolver solver(graph); DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
DiscreteFactor::sharedValues optimalDecoding = solver.optimize(); DiscreteFactor::sharedValues optimalDecoding = chordal->optimize();
optimalDecoding->print("\noptimalDecoding"); optimalDecoding->print("\noptimalDecoding");
// "Inference" Computing marginals // "Inference" Computing marginals
cout << "\nComputing Node Marginals .." << endl; cout << "\nComputing Node Marginals .." << endl;
Vector margProbs; DiscreteMarginals marginals(graph);
margProbs = solver.marginalProbabilities(Cathy); Vector margProbs = marginals.marginalProbabilities(Cathy);
print(margProbs, "Cathy's Node Marginal:"); print(margProbs, "Cathy's Node Marginal:");
margProbs = solver.marginalProbabilities(Heather); margProbs = marginals.marginalProbabilities(Heather);
print(margProbs, "Heather's Node Marginal"); print(margProbs, "Heather's Node Marginal");
margProbs = solver.marginalProbabilities(Mark); margProbs = marginals.marginalProbabilities(Mark);
print(margProbs, "Mark's Node Marginal"); print(margProbs, "Mark's Node Marginal");
margProbs = solver.marginalProbabilities(Allison); margProbs = marginals.marginalProbabilities(Allison);
print(margProbs, "Allison's Node Marginal"); print(margProbs, "Allison's Node Marginal");
return 0; return 0;