From 3dcff34b468988b97d47aefe3a0c0b26d78d82f3 Mon Sep 17 00:00:00 2001 From: Frank dellaert Date: Thu, 9 Jul 2020 20:46:12 -0400 Subject: [PATCH] Formatted and fixed discrete examples --- examples/CMakeLists.txt | 3 - examples/DiscreteBayesNet_FG.cpp | 119 ++++++++++++++++--------------- examples/UGM_chain.cpp | 74 ++++++++----------- examples/UGM_small.cpp | 19 ++--- 4 files changed, 98 insertions(+), 117 deletions(-) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 7251c2b6f..476f4ae21 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,7 +1,4 @@ set (excluded_examples - DiscreteBayesNet_FG.cpp - UGM_chain.cpp - UGM_small.cpp elaboratePoint2KalmanFilter.cpp ) diff --git a/examples/DiscreteBayesNet_FG.cpp b/examples/DiscreteBayesNet_FG.cpp index 6eb08c12e..9802b5984 100644 --- a/examples/DiscreteBayesNet_FG.cpp +++ b/examples/DiscreteBayesNet_FG.cpp @@ -10,110 +10,111 @@ * -------------------------------------------------------------------------- */ /** - * @file DiscreteBayesNet_FG.cpp + * @file DiscreteBayesNet_graph.cpp * @brief Discrete Bayes Net example using Factor Graphs * @author Abhijit * @date Jun 4, 2012 * - * We use the famous Rain/Cloudy/Sprinkler Example of [Russell & Norvig, 2009, p529] - * You may be familiar with other graphical model packages like BNT (available - * at http://bnt.googlecode.com/svn/trunk/docs/usage.html) where this is used as an - * example. The following demo is same as that in the above link, except that - * everything is using GTSAM. + * We use the famous Rain/Cloudy/Sprinkler Example of [Russell & Norvig, 2009, + * p529] You may be familiar with other graphical model packages like BNT + * (available at http://bnt.googlecode.com/svn/trunk/docs/usage.html) where this + * is used as an example. The following demo is same as that in the above link, + * except that everything is using GTSAM. */ #include -#include +#include + #include using namespace std; using namespace gtsam; int main(int argc, char **argv) { + // Define keys and a print function + Key C(1), S(2), R(3), W(4); + auto print = [=](DiscreteFactor::sharedValues values) { + cout << boolalpha << "Cloudy = " << static_cast((*values)[C]) + << " Sprinkler = " << static_cast((*values)[S]) + << " Rain = " << boolalpha << static_cast((*values)[R]) + << " WetGrass = " << static_cast((*values)[W]) << endl; + }; // We assume binary state variables // we have 0 == "False" and 1 == "True" const size_t nrStates = 2; // define variables - DiscreteKey Cloudy(1, nrStates), Sprinkler(2, nrStates), Rain(3, nrStates), - WetGrass(4, nrStates); + DiscreteKey Cloudy(C, nrStates), Sprinkler(S, nrStates), Rain(R, nrStates), + WetGrass(W, nrStates); // create Factor Graph of the bayes net DiscreteFactorGraph graph; // add factors - graph.add(Cloudy, "0.5 0.5"); //P(Cloudy) - graph.add(Cloudy & Sprinkler, "0.5 0.5 0.9 0.1"); //P(Sprinkler | Cloudy) - graph.add(Cloudy & Rain, "0.8 0.2 0.2 0.8"); //P(Rain | Cloudy) + graph.add(Cloudy, "0.5 0.5"); // P(Cloudy) + graph.add(Cloudy & Sprinkler, "0.5 0.5 0.9 0.1"); // P(Sprinkler | Cloudy) + graph.add(Cloudy & Rain, "0.8 0.2 0.2 0.8"); // P(Rain | Cloudy) graph.add(Sprinkler & Rain & WetGrass, - "1 0 0.1 0.9 0.1 0.9 0.001 0.99"); //P(WetGrass | Sprinkler, Rain) + "1 0 0.1 0.9 0.1 0.9 0.001 0.99"); // P(WetGrass | Sprinkler, Rain) - // Alternatively we can also create a DiscreteBayesNet, add DiscreteConditional - // factors and create a FactorGraph from it. (See testDiscreteBayesNet.cpp) + // Alternatively we can also create a DiscreteBayesNet, add + // DiscreteConditional factors and create a FactorGraph from it. (See + // testDiscreteBayesNet.cpp) // Since this is a relatively small distribution, we can as well print // the whole distribution.. cout << "Distribution of Example: " << endl; cout << setw(11) << "Cloudy(C)" << setw(14) << "Sprinkler(S)" << setw(10) - << "Rain(R)" << setw(14) << "WetGrass(W)" << setw(15) << "P(C,S,R,W)" - << endl; + << "Rain(R)" << setw(14) << "WetGrass(W)" << setw(15) << "P(C,S,R,W)" + << endl; for (size_t a = 0; a < nrStates; a++) for (size_t m = 0; m < nrStates; m++) for (size_t h = 0; h < nrStates; h++) for (size_t c = 0; c < nrStates; c++) { DiscreteFactor::Values values; - values[Cloudy.first] = c; - values[Sprinkler.first] = h; - values[Rain.first] = m; - values[WetGrass.first] = a; + values[C] = c; + values[S] = h; + values[R] = m; + values[W] = a; double prodPot = graph(values); - cout << boolalpha << setw(8) << (bool) c << setw(14) - << (bool) h << setw(12) << (bool) m << setw(13) - << (bool) a << setw(16) << prodPot << endl; + cout << setw(8) << static_cast(c) << setw(14) + << static_cast(h) << setw(12) << static_cast(m) + << setw(13) << static_cast(a) << setw(16) << prodPot + << endl; } - // "Most Probable Explanation", i.e., configuration with largest value - DiscreteSequentialSolver solver(graph); - DiscreteFactor::sharedValues optimalDecoding = solver.optimize(); - cout <<"\nMost Probable Explanation (MPE):" << endl; - cout << boolalpha << "Cloudy = " << (bool)(*optimalDecoding)[Cloudy.first] - << " Sprinkler = " << (bool)(*optimalDecoding)[Sprinkler.first] - << " Rain = " << boolalpha << (bool)(*optimalDecoding)[Rain.first] - << " WetGrass = " << (bool)(*optimalDecoding)[WetGrass.first]<< endl; + DiscreteFactor::sharedValues mpe = graph.eliminateSequential()->optimize(); + cout << "\nMost Probable Explanation (MPE):" << endl; + print(mpe); + // "Inference" We show an inference query like: probability that the Sprinkler + // was on; given that the grass is wet i.e. P( S | C=0) = ? - // "Inference" We show an inference query like: probability that the Sprinkler was on; - // given that the grass is wet i.e. P( S | W=1) =? - cout << "\nInference Query: Probability of Sprinkler being on given Grass is Wet" << endl; + // add evidence that it is not Cloudy + graph.add(Cloudy, "1 0"); - // Method 1: we can compute the joint marginal P(S,W) and from that we can compute - // P(S | W=1) = P(S,W=1)/P(W=1) We do this in following three steps.. + // solve again, now with evidence + DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential(); + DiscreteFactor::sharedValues mpe_with_evidence = chordal->optimize(); - //Step1: Compute P(S,W) - DiscreteFactorGraph jointFG; - jointFG = *solver.jointFactorGraph(DiscreteKeys(Sprinkler & WetGrass).indices()); - DecisionTreeFactor probSW = jointFG.product(); + cout << "\nMPE given C=0:" << endl; + print(mpe_with_evidence); - //Step2: Compute P(W) - DiscreteFactor::shared_ptr probW = solver.marginalFactor(WetGrass.first); - - //Step3: Computer P(S | W=1) = P(S,W=1)/P(W=1) - DiscreteFactor::Values values; - values[WetGrass.first] = 1; - - //print P(S=0|W=1) - values[Sprinkler.first] = 0; - cout << "P(S=0|W=1) = " << probSW(values)/(*probW)(values) << endl; - - //print P(S=1|W=1) - values[Sprinkler.first] = 1; - cout << "P(S=1|W=1) = " << probSW(values)/(*probW)(values) << endl; - - // TODO: Method 2 : One way is to modify the factor graph to - // incorporate the evidence node and compute the marginal - // TODO: graph.addEvidence(Cloudy,0); + // we can also calculate arbitrary marginals: + DiscreteMarginals marginals(graph); + cout << "\nP(S=1|C=0):" << marginals.marginalProbabilities(Sprinkler)[1] + << endl; + cout << "\nP(R=0|C=0):" << marginals.marginalProbabilities(Rain)[0] << endl; + cout << "\nP(W=1|C=0):" << marginals.marginalProbabilities(WetGrass)[1] + << endl; + // We can also sample from it + cout << "\n10 samples:" << endl; + for (size_t i = 0; i < 10; i++) { + DiscreteFactor::sharedValues sample = chordal->sample(); + print(sample); + } return 0; } diff --git a/examples/UGM_chain.cpp b/examples/UGM_chain.cpp index 4ce4e7af4..3a885a844 100644 --- a/examples/UGM_chain.cpp +++ b/examples/UGM_chain.cpp @@ -10,7 +10,7 @@ * -------------------------------------------------------------------------- */ /** - * @file small.cpp + * @file UGM_chain.cpp * @brief UGM (undirected graphical model) examples: chain * @author Frank Dellaert * @author Abhijit Kundu @@ -19,10 +19,9 @@ * for more explanation. This code demos the same example using GTSAM. */ -#include -#include -#include #include +#include +#include #include @@ -30,9 +29,8 @@ using namespace std; using namespace gtsam; int main(int argc, char** argv) { - - // Set Number of Nodes in the Graph - const int nrNodes = 60; + // Set Number of Nodes in the Graph + const int nrNodes = 60; // Each node takes 1 of 7 possible states denoted by 0-6 in following order: // ["VideoGames" "Industry" "GradSchool" "VideoGames(with PhD)" @@ -40,70 +38,55 @@ int main(int argc, char** argv) { const size_t nrStates = 7; // define variables - vector nodes; - for (int i = 0; i < nrNodes; i++){ - DiscreteKey dk(i, nrStates); - nodes.push_back(dk); - } + vector nodes; + for (int i = 0; i < nrNodes; i++) { + DiscreteKey dk(i, nrStates); + nodes.push_back(dk); + } // create graph DiscreteFactorGraph graph; // add node potentials graph.add(nodes[0], ".3 .6 .1 0 0 0 0"); - for (int i = 1; i < nrNodes; i++) - graph.add(nodes[i], "1 1 1 1 1 1 1"); + for (int i = 1; i < nrNodes; i++) graph.add(nodes[i], "1 1 1 1 1 1 1"); - const std::string edgePotential = ".08 .9 .01 0 0 0 .01 " - ".03 .95 .01 0 0 0 .01 " - ".06 .06 .75 .05 .05 .02 .01 " - "0 0 0 .3 .6 .09 .01 " - "0 0 0 .02 .95 .02 .01 " - "0 0 0 .01 .01 .97 .01 " - "0 0 0 0 0 0 1"; + const std::string edgePotential = + ".08 .9 .01 0 0 0 .01 " + ".03 .95 .01 0 0 0 .01 " + ".06 .06 .75 .05 .05 .02 .01 " + "0 0 0 .3 .6 .09 .01 " + "0 0 0 .02 .95 .02 .01 " + "0 0 0 .01 .01 .97 .01 " + "0 0 0 0 0 0 1"; // add edge potentials for (int i = 0; i < nrNodes - 1; i++) graph.add(nodes[i] & nodes[i + 1], edgePotential); cout << "Created Factor Graph with " << nrNodes << " variable nodes and " - << graph.size() << " factors (Unary+Edge)."; + << graph.size() << " factors (Unary+Edge)."; // "Decoding", i.e., configuration with largest value // We use sequential variable elimination - DiscreteSequentialSolver solver(graph); - DiscreteFactor::sharedValues optimalDecoding = solver.optimize(); + DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential(); + DiscreteFactor::sharedValues optimalDecoding = chordal->optimize(); optimalDecoding->print("\nMost Probable Explanation (optimalDecoding)\n"); // "Inference" Computing marginals for each node - - - cout << "\nComputing Node Marginals ..(Sequential Elimination)" << endl; - gttic_(Sequential); - for (vector::iterator itr = nodes.begin(); itr != nodes.end(); - ++itr) { - //Compute the marginal - Vector margProbs = solver.marginalProbabilities(*itr); - - //Print the marginals - cout << "Node#" << setw(4) << itr->first << " : "; - print(margProbs); - } - gttoc_(Sequential); - // Here we'll make use of DiscreteMarginals class, which makes use of // bayes-tree based shortcut evaluation of marginals DiscreteMarginals marginals(graph); cout << "\nComputing Node Marginals ..(BayesTree based)" << endl; gttic_(Multifrontal); - for (vector::iterator itr = nodes.begin(); itr != nodes.end(); - ++itr) { - //Compute the marginal - Vector margProbs = marginals.marginalProbabilities(*itr); + for (vector::iterator it = nodes.begin(); it != nodes.end(); + ++it) { + // Compute the marginal + Vector margProbs = marginals.marginalProbabilities(*it); - //Print the marginals - cout << "Node#" << setw(4) << itr->first << " : "; + // Print the marginals + cout << "Node#" << setw(4) << it->first << " : "; print(margProbs); } gttoc_(Multifrontal); @@ -111,4 +94,3 @@ int main(int argc, char** argv) { tictoc_print_(); return 0; } - diff --git a/examples/UGM_small.cpp b/examples/UGM_small.cpp index f5338bc67..27a6205a3 100644 --- a/examples/UGM_small.cpp +++ b/examples/UGM_small.cpp @@ -10,15 +10,16 @@ * -------------------------------------------------------------------------- */ /** - * @file small.cpp + * @file UGM_small.cpp * @brief UGM (undirected graphical model) examples: small * @author Frank Dellaert * * See http://www.di.ens.fr/~mschmidt/Software/UGM/small.html */ +#include #include -#include +#include using namespace std; using namespace gtsam; @@ -61,24 +62,24 @@ int main(int argc, char** argv) { // "Decoding", i.e., configuration with largest value (MPE) // We use sequential variable elimination - DiscreteSequentialSolver solver(graph); - DiscreteFactor::sharedValues optimalDecoding = solver.optimize(); + DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential(); + DiscreteFactor::sharedValues optimalDecoding = chordal->optimize(); optimalDecoding->print("\noptimalDecoding"); // "Inference" Computing marginals cout << "\nComputing Node Marginals .." << endl; - Vector margProbs; + DiscreteMarginals marginals(graph); - margProbs = solver.marginalProbabilities(Cathy); + Vector margProbs = marginals.marginalProbabilities(Cathy); print(margProbs, "Cathy's Node Marginal:"); - margProbs = solver.marginalProbabilities(Heather); + margProbs = marginals.marginalProbabilities(Heather); print(margProbs, "Heather's Node Marginal"); - margProbs = solver.marginalProbabilities(Mark); + margProbs = marginals.marginalProbabilities(Mark); print(margProbs, "Mark's Node Marginal"); - margProbs = solver.marginalProbabilities(Allison); + margProbs = marginals.marginalProbabilities(Allison); print(margProbs, "Allison's Node Marginal"); return 0;