Fix examples
parent
e3c98b0faf
commit
99a97da5f7
|
@ -53,10 +53,9 @@ int main(int argc, char **argv) {
|
||||||
// Create solver and eliminate
|
// Create solver and eliminate
|
||||||
Ordering ordering;
|
Ordering ordering;
|
||||||
ordering += Key(0), Key(1), Key(2), Key(3), Key(4), Key(5), Key(6), Key(7);
|
ordering += Key(0), Key(1), Key(2), Key(3), Key(4), Key(5), Key(6), Key(7);
|
||||||
DiscreteBayesNet::shared_ptr chordal = fg.eliminateSequential(ordering);
|
|
||||||
|
|
||||||
// solve
|
// solve
|
||||||
auto mpe = chordal->optimize();
|
auto mpe = fg.optimize();
|
||||||
GTSAM_PRINT(mpe);
|
GTSAM_PRINT(mpe);
|
||||||
|
|
||||||
// We can also build a Bayes tree (directed junction tree).
|
// We can also build a Bayes tree (directed junction tree).
|
||||||
|
@ -69,14 +68,14 @@ int main(int argc, char **argv) {
|
||||||
fg.add(Dyspnea, "0 1");
|
fg.add(Dyspnea, "0 1");
|
||||||
|
|
||||||
// solve again, now with evidence
|
// solve again, now with evidence
|
||||||
DiscreteBayesNet::shared_ptr chordal2 = fg.eliminateSequential(ordering);
|
auto mpe2 = fg.optimize();
|
||||||
auto mpe2 = chordal2->optimize();
|
|
||||||
GTSAM_PRINT(mpe2);
|
GTSAM_PRINT(mpe2);
|
||||||
|
|
||||||
// We can also sample from it
|
// We can also sample from it
|
||||||
|
DiscreteBayesNet::shared_ptr chordal = fg.eliminateSequential(ordering);
|
||||||
cout << "\n10 samples:" << endl;
|
cout << "\n10 samples:" << endl;
|
||||||
for (size_t i = 0; i < 10; i++) {
|
for (size_t i = 0; i < 10; i++) {
|
||||||
auto sample = chordal2->sample();
|
auto sample = chordal->sample();
|
||||||
GTSAM_PRINT(sample);
|
GTSAM_PRINT(sample);
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
|
|
|
@ -85,7 +85,7 @@ int main(int argc, char **argv) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// "Most Probable Explanation", i.e., configuration with largest value
|
// "Most Probable Explanation", i.e., configuration with largest value
|
||||||
auto mpe = graph.eliminateSequential()->optimize();
|
auto mpe = graph.optimize();
|
||||||
cout << "\nMost Probable Explanation (MPE):" << endl;
|
cout << "\nMost Probable Explanation (MPE):" << endl;
|
||||||
print(mpe);
|
print(mpe);
|
||||||
|
|
||||||
|
@ -96,8 +96,7 @@ int main(int argc, char **argv) {
|
||||||
graph.add(Cloudy, "1 0");
|
graph.add(Cloudy, "1 0");
|
||||||
|
|
||||||
// solve again, now with evidence
|
// solve again, now with evidence
|
||||||
DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
|
auto mpe_with_evidence = graph.optimize();
|
||||||
auto mpe_with_evidence = chordal->optimize();
|
|
||||||
|
|
||||||
cout << "\nMPE given C=0:" << endl;
|
cout << "\nMPE given C=0:" << endl;
|
||||||
print(mpe_with_evidence);
|
print(mpe_with_evidence);
|
||||||
|
@ -110,7 +109,8 @@ int main(int argc, char **argv) {
|
||||||
cout << "\nP(W=1|C=0):" << marginals.marginalProbabilities(WetGrass)[1]
|
cout << "\nP(W=1|C=0):" << marginals.marginalProbabilities(WetGrass)[1]
|
||||||
<< endl;
|
<< endl;
|
||||||
|
|
||||||
// We can also sample from it
|
// We can also sample from the eliminated graph
|
||||||
|
DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
|
||||||
cout << "\n10 samples:" << endl;
|
cout << "\n10 samples:" << endl;
|
||||||
for (size_t i = 0; i < 10; i++) {
|
for (size_t i = 0; i < 10; i++) {
|
||||||
auto sample = chordal->sample();
|
auto sample = chordal->sample();
|
||||||
|
|
|
@ -59,16 +59,16 @@ int main(int argc, char **argv) {
|
||||||
// Convert to factor graph
|
// Convert to factor graph
|
||||||
DiscreteFactorGraph factorGraph(hmm);
|
DiscreteFactorGraph factorGraph(hmm);
|
||||||
|
|
||||||
|
// Do max-prodcut
|
||||||
|
auto mpe = factorGraph.optimize();
|
||||||
|
GTSAM_PRINT(mpe);
|
||||||
|
|
||||||
// Create solver and eliminate
|
// Create solver and eliminate
|
||||||
// This will create a DAG ordered with arrow of time reversed
|
// This will create a DAG ordered with arrow of time reversed
|
||||||
DiscreteBayesNet::shared_ptr chordal =
|
DiscreteBayesNet::shared_ptr chordal =
|
||||||
factorGraph.eliminateSequential(ordering);
|
factorGraph.eliminateSequential(ordering);
|
||||||
chordal->print("Eliminated");
|
chordal->print("Eliminated");
|
||||||
|
|
||||||
// solve
|
|
||||||
auto mpe = chordal->optimize();
|
|
||||||
GTSAM_PRINT(mpe);
|
|
||||||
|
|
||||||
// We can also sample from it
|
// We can also sample from it
|
||||||
cout << "\n10 samples:" << endl;
|
cout << "\n10 samples:" << endl;
|
||||||
for (size_t k = 0; k < 10; k++) {
|
for (size_t k = 0; k < 10; k++) {
|
||||||
|
|
|
@ -68,9 +68,8 @@ int main(int argc, char** argv) {
|
||||||
<< graph.size() << " factors (Unary+Edge).";
|
<< graph.size() << " factors (Unary+Edge).";
|
||||||
|
|
||||||
// "Decoding", i.e., configuration with largest value
|
// "Decoding", i.e., configuration with largest value
|
||||||
// We use sequential variable elimination
|
// Uses max-product.
|
||||||
DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
|
auto optimalDecoding = graph.optimize();
|
||||||
auto optimalDecoding = chordal->optimize();
|
|
||||||
optimalDecoding.print("\nMost Probable Explanation (optimalDecoding)\n");
|
optimalDecoding.print("\nMost Probable Explanation (optimalDecoding)\n");
|
||||||
|
|
||||||
// "Inference" Computing marginals for each node
|
// "Inference" Computing marginals for each node
|
||||||
|
|
|
@ -61,9 +61,8 @@ int main(int argc, char** argv) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// "Decoding", i.e., configuration with largest value (MPE)
|
// "Decoding", i.e., configuration with largest value (MPE)
|
||||||
// We use sequential variable elimination
|
// Uses max-product
|
||||||
DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
|
auto optimalDecoding = graph.optimize();
|
||||||
auto optimalDecoding = chordal->optimize();
|
|
||||||
GTSAM_PRINT(optimalDecoding);
|
GTSAM_PRINT(optimalDecoding);
|
||||||
|
|
||||||
// "Inference" Computing marginals
|
// "Inference" Computing marginals
|
||||||
|
|
|
@ -165,11 +165,11 @@ void solveStaged(size_t addMutex = 2) {
|
||||||
root->print(""/*scheduler.studentName(s)*/);
|
root->print(""/*scheduler.studentName(s)*/);
|
||||||
|
|
||||||
// solve root node only
|
// solve root node only
|
||||||
DiscreteValues values;
|
size_t bestSlot = root->argmax();
|
||||||
size_t bestSlot = root->solve(values);
|
|
||||||
|
|
||||||
// get corresponding count
|
// get corresponding count
|
||||||
DiscreteKey dkey = scheduler.studentKey(6 - s);
|
DiscreteKey dkey = scheduler.studentKey(6 - s);
|
||||||
|
DiscreteValues values;
|
||||||
values[dkey.first] = bestSlot;
|
values[dkey.first] = bestSlot;
|
||||||
size_t count = (*root)(values);
|
size_t count = (*root)(values);
|
||||||
|
|
||||||
|
@ -319,11 +319,11 @@ void accomodateStudent() {
|
||||||
// GTSAM_PRINT(*chordal);
|
// GTSAM_PRINT(*chordal);
|
||||||
|
|
||||||
// solve root node only
|
// solve root node only
|
||||||
DiscreteValues values;
|
size_t bestSlot = root->argmax();
|
||||||
size_t bestSlot = root->solve(values);
|
|
||||||
|
|
||||||
// get corresponding count
|
// get corresponding count
|
||||||
DiscreteKey dkey = scheduler.studentKey(0);
|
DiscreteKey dkey = scheduler.studentKey(0);
|
||||||
|
DiscreteValues values;
|
||||||
values[dkey.first] = bestSlot;
|
values[dkey.first] = bestSlot;
|
||||||
size_t count = (*root)(values);
|
size_t count = (*root)(values);
|
||||||
cout << boost::format("%s = %d (%d), count = %d") % scheduler.studentName(0)
|
cout << boost::format("%s = %d (%d), count = %d") % scheduler.studentName(0)
|
||||||
|
|
|
@ -190,11 +190,11 @@ void solveStaged(size_t addMutex = 2) {
|
||||||
root->print(""/*scheduler.studentName(s)*/);
|
root->print(""/*scheduler.studentName(s)*/);
|
||||||
|
|
||||||
// solve root node only
|
// solve root node only
|
||||||
DiscreteValues values;
|
size_t bestSlot = root->argmax();
|
||||||
size_t bestSlot = root->solve(values);
|
|
||||||
|
|
||||||
// get corresponding count
|
// get corresponding count
|
||||||
DiscreteKey dkey = scheduler.studentKey(NRSTUDENTS - 1 - s);
|
DiscreteKey dkey = scheduler.studentKey(NRSTUDENTS - 1 - s);
|
||||||
|
DiscreteValues values;
|
||||||
values[dkey.first] = bestSlot;
|
values[dkey.first] = bestSlot;
|
||||||
size_t count = (*root)(values);
|
size_t count = (*root)(values);
|
||||||
|
|
||||||
|
|
|
@ -212,11 +212,11 @@ void solveStaged(size_t addMutex = 2) {
|
||||||
root->print(""/*scheduler.studentName(s)*/);
|
root->print(""/*scheduler.studentName(s)*/);
|
||||||
|
|
||||||
// solve root node only
|
// solve root node only
|
||||||
DiscreteValues values;
|
size_t bestSlot = root->argmax();
|
||||||
size_t bestSlot = root->solve(values);
|
|
||||||
|
|
||||||
// get corresponding count
|
// get corresponding count
|
||||||
DiscreteKey dkey = scheduler.studentKey(NRSTUDENTS - 1 - s);
|
DiscreteKey dkey = scheduler.studentKey(NRSTUDENTS - 1 - s);
|
||||||
|
DiscreteValues values;
|
||||||
values[dkey.first] = bestSlot;
|
values[dkey.first] = bestSlot;
|
||||||
double count = (*root)(values);
|
double count = (*root)(values);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue