Fixed all examples

release/4.3a0
Frank Dellaert 2021-11-20 16:34:53 -05:00
parent 8206d8d09d
commit 371fe3e865
8 changed files with 29 additions and 29 deletions

View File

@ -57,7 +57,7 @@ int main(int argc, char **argv) {
// solve
auto mpe = chordal->optimize();
GTSAM_PRINT(*mpe);
GTSAM_PRINT(mpe);
// We can also build a Bayes tree (directed junction tree).
// The elimination order above will do fine:
@ -71,13 +71,13 @@ int main(int argc, char **argv) {
// solve again, now with evidence
DiscreteBayesNet::shared_ptr chordal2 = fg.eliminateSequential(ordering);
auto mpe2 = chordal2->optimize();
GTSAM_PRINT(*mpe2);
GTSAM_PRINT(mpe2);
// We can also sample from it
cout << "\n10 samples:" << endl;
for (size_t i = 0; i < 10; i++) {
auto sample = chordal2->sample();
GTSAM_PRINT(*sample);
GTSAM_PRINT(sample);
}
return 0;
}

View File

@ -34,10 +34,10 @@ int main(int argc, char **argv) {
// Define keys and a print function
Key C(1), S(2), R(3), W(4);
auto print = [=](const DiscreteFactor::Values& values) {
cout << boolalpha << "Cloudy = " << static_cast<bool>(values[C])
<< " Sprinkler = " << static_cast<bool>(values[S])
<< " Rain = " << boolalpha << static_cast<bool>(values[R])
<< " WetGrass = " << static_cast<bool>(values[W]) << endl;
cout << boolalpha << "Cloudy = " << static_cast<bool>(values.at(C))
<< " Sprinkler = " << static_cast<bool>(values.at(S))
<< " Rain = " << boolalpha << static_cast<bool>(values.at(R))
<< " WetGrass = " << static_cast<bool>(values.at(W)) << endl;
};
// We assume binary state variables

View File

@ -67,13 +67,13 @@ int main(int argc, char **argv) {
// solve
auto mpe = chordal->optimize();
GTSAM_PRINT(*mpe);
GTSAM_PRINT(mpe);
// We can also sample from it
cout << "\n10 samples:" << endl;
for (size_t k = 0; k < 10; k++) {
auto sample = chordal->sample();
GTSAM_PRINT(*sample);
GTSAM_PRINT(sample);
}
// Or compute the marginals. This re-eliminates the FG into a Bayes tree

View File

@ -71,7 +71,7 @@ int main(int argc, char** argv) {
// We use sequential variable elimination
DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
auto optimalDecoding = chordal->optimize();
optimalDecoding->print("\nMost Probable Explanation (optimalDecoding)\n");
optimalDecoding.print("\nMost Probable Explanation (optimalDecoding)\n");
// "Inference" Computing marginals for each node
// Here we'll make use of DiscreteMarginals class, which makes use of

View File

@ -64,7 +64,7 @@ int main(int argc, char** argv) {
// We use sequential variable elimination
DiscreteBayesNet::shared_ptr chordal = graph.eliminateSequential();
auto optimalDecoding = chordal->optimize();
optimalDecoding->print("\noptimalDecoding");
GTSAM_PRINT(optimalDecoding);
// "Inference" Computing marginals
cout << "\nComputing Node Marginals .." << endl;

View File

@ -225,7 +225,7 @@ void sampleSolutions() {
// now, sample schedules
for (size_t n = 0; n < 500; n++) {
vector<size_t> stats(19, 0);
vector<Scheduler::sharedValues> samples;
vector<Scheduler::Values> samples;
for (size_t i = 0; i < 7; i++) {
samples.push_back(samplers[i]->sample());
schedulers[i].accumulateStats(samples[i], stats);

View File

@ -234,7 +234,7 @@ void sampleSolutions() {
// now, sample schedules
for (size_t n = 0; n < 500; n++) {
vector<size_t> stats(19, 0);
vector<Scheduler::sharedValues> samples;
vector<Scheduler::Values> samples;
for (size_t i = 0; i < NRSTUDENTS; i++) {
samples.push_back(samplers[i]->sample());
schedulers[i].accumulateStats(samples[i], stats);

View File

@ -259,7 +259,7 @@ void sampleSolutions() {
// now, sample schedules
for (size_t n = 0; n < 10000; n++) {
vector<size_t> stats(nrFaculty, 0);
vector<Scheduler::sharedValues> samples;
vector<Scheduler::Values> samples;
for (size_t i = 0; i < NRSTUDENTS; i++) {
samples.push_back(samplers[i]->sample());
schedulers[i].accumulateStats(samples[i], stats);