Fixed all examples

release/4.3a0
Frank Dellaert 2021-11-20 16:34:53 -05:00
parent 8206d8d09d
commit 371fe3e865
8 changed files with 29 additions and 29 deletions

View File

@ -56,8 +56,8 @@ int main(int argc, char **argv) {
DiscreteBayesNet::shared_ptr chordal = fg.eliminateSequential(ordering); DiscreteBayesNet::shared_ptr chordal = fg.eliminateSequential(ordering);
// solve // solve
autompe = chordal->optimize(); auto mpe = chordal->optimize();
GTSAM_PRINT(*mpe); GTSAM_PRINT(mpe);
// We can also build a Bayes tree (directed junction tree). // We can also build a Bayes tree (directed junction tree).
// The elimination order above will do fine: // The elimination order above will do fine:
@ -70,14 +70,14 @@ int main(int argc, char **argv) {
// solve again, now with evidence // solve again, now with evidence
DiscreteBayesNet::shared_ptr chordal2 = fg.eliminateSequential(ordering); DiscreteBayesNet::shared_ptr chordal2 = fg.eliminateSequential(ordering);
autompe2 = chordal2->optimize(); auto mpe2 = chordal2->optimize();
GTSAM_PRINT(*mpe2); GTSAM_PRINT(mpe2);
// We can also sample from it // We can also sample from it
cout << "\n10 samples:" << endl; cout << "\n10 samples:" << endl;
for (size_t i = 0; i < 10; i++) { for (size_t i = 0; i < 10; i++) {
autosample = chordal2->sample(); auto sample = chordal2->sample();
GTSAM_PRINT(*sample); GTSAM_PRINT(sample);
} }
return 0; return 0;
} }

View File

@ -34,10 +34,10 @@ int main(int argc, char **argv) {
// Define keys and a print function // Define keys and a print function
Key C(1), S(2), R(3), W(4); Key C(1), S(2), R(3), W(4);
auto print = [=](const DiscreteFactor::Values& values) { auto print = [=](const DiscreteFactor::Values& values) {
cout << boolalpha << "Cloudy = " << static_cast<bool>(values[C]) cout << boolalpha << "Cloudy = " << static_cast<bool>(values.at(C))
<< " Sprinkler = " << static_cast<bool>(values[S]) << " Sprinkler = " << static_cast<bool>(values.at(S))
<< " Rain = " << boolalpha << static_cast<bool>(values[R]) << " Rain = " << boolalpha << static_cast<bool>(values.at(R))
<< " WetGrass = " << static_cast<bool>(values[W]) << endl; << " WetGrass = " << static_cast<bool>(values.at(W)) << endl;
}; };
// We assume binary state variables // We assume binary state variables
@ -85,7 +85,7 @@ int main(int argc, char **argv) {
} }
// "Most Probable Explanation", i.e., configuration with largest value // "Most Probable Explanation", i.e., configuration with largest value
autompe = graph.eliminateSequential()->optimize(); auto mpe = graph.eliminateSequential()->optimize();
cout << "\nMost Probable Explanation (MPE):" << endl; cout << "\nMost Probable Explanation (MPE):" << endl;
print(mpe); print(mpe);
@ -97,7 +97,7 @@ int main(int argc, char **argv) {
// solve again, now with evidence // solve again, now with evidence
DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential(); DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
autompe_with_evidence = chordal->optimize(); auto mpe_with_evidence = chordal->optimize();
cout << "\nMPE given C=0:" << endl; cout << "\nMPE given C=0:" << endl;
print(mpe_with_evidence); print(mpe_with_evidence);
@ -113,7 +113,7 @@ int main(int argc, char **argv) {
// We can also sample from it // We can also sample from it
cout << "\n10 samples:" << endl; cout << "\n10 samples:" << endl;
for (size_t i = 0; i < 10; i++) { for (size_t i = 0; i < 10; i++) {
autosample = chordal->sample(); auto sample = chordal->sample();
print(sample); print(sample);
} }
return 0; return 0;

View File

@ -66,14 +66,14 @@ int main(int argc, char **argv) {
chordal->print("Eliminated"); chordal->print("Eliminated");
// solve // solve
autompe = chordal->optimize(); auto mpe = chordal->optimize();
GTSAM_PRINT(*mpe); GTSAM_PRINT(mpe);
// We can also sample from it // We can also sample from it
cout << "\n10 samples:" << endl; cout << "\n10 samples:" << endl;
for (size_t k = 0; k < 10; k++) { for (size_t k = 0; k < 10; k++) {
autosample = chordal->sample(); auto sample = chordal->sample();
GTSAM_PRINT(*sample); GTSAM_PRINT(sample);
} }
// Or compute the marginals. This re-eliminates the FG into a Bayes tree // Or compute the marginals. This re-eliminates the FG into a Bayes tree

View File

@ -70,8 +70,8 @@ int main(int argc, char** argv) {
// "Decoding", i.e., configuration with largest value // "Decoding", i.e., configuration with largest value
// We use sequential variable elimination // We use sequential variable elimination
DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential(); DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
autooptimalDecoding = chordal->optimize(); auto optimalDecoding = chordal->optimize();
optimalDecoding->print("\nMost Probable Explanation (optimalDecoding)\n"); optimalDecoding.print("\nMost Probable Explanation (optimalDecoding)\n");
// "Inference" Computing marginals for each node // "Inference" Computing marginals for each node
// Here we'll make use of DiscreteMarginals class, which makes use of // Here we'll make use of DiscreteMarginals class, which makes use of

View File

@ -63,8 +63,8 @@ int main(int argc, char** argv) {
// "Decoding", i.e., configuration with largest value (MPE) // "Decoding", i.e., configuration with largest value (MPE)
// We use sequential variable elimination // We use sequential variable elimination
DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential(); DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
autooptimalDecoding = chordal->optimize(); auto optimalDecoding = chordal->optimize();
optimalDecoding->print("\noptimalDecoding"); GTSAM_PRINT(optimalDecoding);
// "Inference" Computing marginals // "Inference" Computing marginals
cout << "\nComputing Node Marginals .." << endl; cout << "\nComputing Node Marginals .." << endl;

View File

@ -122,7 +122,7 @@ void runLargeExample() {
// SETDEBUG("timing-verbose", true); // SETDEBUG("timing-verbose", true);
SETDEBUG("DiscreteConditional::DiscreteConditional", true); SETDEBUG("DiscreteConditional::DiscreteConditional", true);
gttic(large); gttic(large);
autoMPE = scheduler.optimalAssignment(); auto MPE = scheduler.optimalAssignment();
gttoc(large); gttoc(large);
tictoc_finishedIteration(); tictoc_finishedIteration();
tictoc_print(); tictoc_print();
@ -225,7 +225,7 @@ void sampleSolutions() {
// now, sample schedules // now, sample schedules
for (size_t n = 0; n < 500; n++) { for (size_t n = 0; n < 500; n++) {
vector<size_t> stats(19, 0); vector<size_t> stats(19, 0);
vector<Scheduler::sharedValues> samples; vector<Scheduler::Values> samples;
for (size_t i = 0; i < 7; i++) { for (size_t i = 0; i < 7; i++) {
samples.push_back(samplers[i]->sample()); samples.push_back(samplers[i]->sample());
schedulers[i].accumulateStats(samples[i], stats); schedulers[i].accumulateStats(samples[i], stats);

View File

@ -129,7 +129,7 @@ void runLargeExample() {
tictoc_finishedIteration(); tictoc_finishedIteration();
tictoc_print(); tictoc_print();
for (size_t i=0;i<100;i++) { for (size_t i=0;i<100;i++) {
autoassignment = chordal->sample(); auto assignment = chordal->sample();
vector<size_t> stats(scheduler.nrFaculty()); vector<size_t> stats(scheduler.nrFaculty());
scheduler.accumulateStats(assignment, stats); scheduler.accumulateStats(assignment, stats);
size_t max = *max_element(stats.begin(), stats.end()); size_t max = *max_element(stats.begin(), stats.end());
@ -143,7 +143,7 @@ void runLargeExample() {
} }
#else #else
gttic(large); gttic(large);
autoMPE = scheduler.optimalAssignment(); auto MPE = scheduler.optimalAssignment();
gttoc(large); gttoc(large);
tictoc_finishedIteration(); tictoc_finishedIteration();
tictoc_print(); tictoc_print();
@ -234,7 +234,7 @@ void sampleSolutions() {
// now, sample schedules // now, sample schedules
for (size_t n = 0; n < 500; n++) { for (size_t n = 0; n < 500; n++) {
vector<size_t> stats(19, 0); vector<size_t> stats(19, 0);
vector<Scheduler::sharedValues> samples; vector<Scheduler::Values> samples;
for (size_t i = 0; i < NRSTUDENTS; i++) { for (size_t i = 0; i < NRSTUDENTS; i++) {
samples.push_back(samplers[i]->sample()); samples.push_back(samplers[i]->sample());
schedulers[i].accumulateStats(samples[i], stats); schedulers[i].accumulateStats(samples[i], stats);

View File

@ -153,7 +153,7 @@ void runLargeExample() {
tictoc_finishedIteration(); tictoc_finishedIteration();
tictoc_print(); tictoc_print();
for (size_t i=0;i<100;i++) { for (size_t i=0;i<100;i++) {
autoassignment = sample(*chordal); auto assignment = sample(*chordal);
vector<size_t> stats(scheduler.nrFaculty()); vector<size_t> stats(scheduler.nrFaculty());
scheduler.accumulateStats(assignment, stats); scheduler.accumulateStats(assignment, stats);
size_t max = *max_element(stats.begin(), stats.end()); size_t max = *max_element(stats.begin(), stats.end());
@ -167,7 +167,7 @@ void runLargeExample() {
} }
#else #else
gttic(large); gttic(large);
autoMPE = scheduler.optimalAssignment(); auto MPE = scheduler.optimalAssignment();
gttoc(large); gttoc(large);
tictoc_finishedIteration(); tictoc_finishedIteration();
tictoc_print(); tictoc_print();
@ -259,7 +259,7 @@ void sampleSolutions() {
// now, sample schedules // now, sample schedules
for (size_t n = 0; n < 10000; n++) { for (size_t n = 0; n < 10000; n++) {
vector<size_t> stats(nrFaculty, 0); vector<size_t> stats(nrFaculty, 0);
vector<Scheduler::sharedValues> samples; vector<Scheduler::Values> samples;
for (size_t i = 0; i < NRSTUDENTS; i++) { for (size_t i = 0; i < NRSTUDENTS; i++) {
samples.push_back(samplers[i]->sample()); samples.push_back(samplers[i]->sample());
schedulers[i].accumulateStats(samples[i], stats); schedulers[i].accumulateStats(samples[i], stats);