From 9e676b215e8879086660402a8405530e73909a82 Mon Sep 17 00:00:00 2001 From: mkielo3 Date: Thu, 20 Feb 2025 21:19:18 +0000 Subject: [PATCH] Updated elaboratePoint2KalmanFilter.cpp example and easyPoint2KF bugfix --- examples/CMakeLists.txt | 4 - examples/easyPoint2KalmanFilter.cpp | 2 +- examples/elaboratePoint2KalmanFilter.cpp | 213 +++++++++++++---------- 3 files changed, 123 insertions(+), 96 deletions(-) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 280b82265..8da95722c 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,7 +1,3 @@ -set (excluded_examples - "elaboratePoint2KalmanFilter.cpp" -) - # if GTSAM_ENABLE_BOOST_SERIALIZATION is not set then SolverComparer.cpp will not compile if (NOT GTSAM_ENABLE_BOOST_SERIALIZATION) list (APPEND excluded_examples diff --git a/examples/easyPoint2KalmanFilter.cpp b/examples/easyPoint2KalmanFilter.cpp index 7693fa4e4..d2dc6d241 100644 --- a/examples/easyPoint2KalmanFilter.cpp +++ b/examples/easyPoint2KalmanFilter.cpp @@ -100,7 +100,7 @@ int main() { Symbol x2('x',2); difference = Point2(1,0); BetweenFactor factor3(x1, x2, difference, Q); - Point2 x2_predict = ekf.predict(factor1); + Point2 x2_predict = ekf.predict(factor3); traits::Print(x2_predict, "X2 Predict"); // Update diff --git a/examples/elaboratePoint2KalmanFilter.cpp b/examples/elaboratePoint2KalmanFilter.cpp index bc4e6e340..426280599 100644 --- a/examples/elaboratePoint2KalmanFilter.cpp +++ b/examples/elaboratePoint2KalmanFilter.cpp @@ -22,12 +22,12 @@ #include #include -//#include #include #include #include #include #include +#include #include @@ -55,17 +55,21 @@ int main() { // Create new state variable Symbol x0('x',0); - ordering->insert(x0, 0); + ordering->push_back(x0); // Initialize state x0 (2D point) at origin by adding a prior factor, i.e., Bayes net P(x0) // This is equivalent to x_0 and P_0 Point2 x_initial(0,0); - SharedDiagonal P_initial = noiseModel::Diagonal::Sigmas((Vec(2) << 0.1, 0.1)); - PriorFactor factor1(x0, x_initial, P_initial); + SharedDiagonal P_initial = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + + // Create a JacobianFactor directly - this represents the prior constraint on x0 + JacobianFactor::shared_ptr factor1( + new JacobianFactor(x0, P_initial->R(), Vector::Zero(2), + noiseModel::Unit::Create(2))); + // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x0, x_initial); - linearFactorGraph->push_back(factor1.linearize(linearizationPoints, *ordering)); - + linearFactorGraph->push_back(factor1); // Now predict the state at t=1, i.e. argmax_{x1} P(x1) = P(x1|x0) P(x0) // In Kalman Filter notation, this is x_{t+1|t} and P_{t+1|t} @@ -88,14 +92,14 @@ int main() { // = (F - I)*x_{t} + B*u_{t} // = B*u_{t} (for our example) Symbol x1('x',1); - ordering->insert(x1, 1); + ordering->push_back(x1); Point2 difference(1,0); - SharedDiagonal Q = noiseModel::Diagonal::Sigmas((Vec(2) << 0.1, 0.1)); + SharedDiagonal Q = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); BetweenFactor factor2(x0, x1, difference, Q); // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x1, x_initial); - linearFactorGraph->push_back(factor2.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor2.linearize(linearizationPoints)); // We have now made the small factor graph f1-(x0)-f2-(x1) // where factor f1 is just the prior from time t0, P(x0) @@ -110,13 +114,13 @@ int main() { // system, the initial estimate is not important. // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) ) - GaussianSequentialSolver solver0(*linearFactorGraph); - GaussianBayesNet::shared_ptr linearBayesNet = solver0.eliminate(); + GaussianBayesNet::shared_ptr bayesNet = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& x1Conditional = bayesNet->back(); // This should be P(x1) // Extract the current estimate of x1,P1 from the Bayes Network - VectorValues result = optimize(*linearBayesNet); - Point2 x1_predict = linearizationPoints.at(x1).retract(result[ordering->at(x1)]); - x1_predict.print("X1 Predict"); + VectorValues result = bayesNet->optimize(); + Point2 x1_predict = linearizationPoints.at(x1) + Point2(result[x1]); + traits::Print(x1_predict, "X1 Predict"); // Update the new linearization point to the new estimate linearizationPoints.update(x1, x1_predict); @@ -139,14 +143,24 @@ int main() { // -> b'' = b' - F(dx1' - dx1'') // = || F*dx1'' - (b' - F(dx1' - dx1'')) ||^2 // = || F*dx1'' - (b' - F(x_predict - x_inital)) ||^2 - const GaussianConditional::shared_ptr& cg0 = linearBayesNet->back(); - assert(cg0->nrFrontals() == 1); - assert(cg0->nrParents() == 0); - linearFactorGraph->add(0, cg0->R(), cg0->d() - cg0->R()*result[ordering->at(x1)], noiseModel::Diagonal::Sigmas(cg0->get_sigmas(), true)); + JacobianFactor::shared_ptr newPrior(new JacobianFactor( + x1, + x1Conditional->R(), + x1Conditional->d() - x1Conditional->R() * result[x1], + x1Conditional->get_model())); - // Create the desired ordering + // Ensure correct number of rows, that there is one variable, and that variable is x1 + assert(newPrior->rows() == x1Conditional->R().rows()); + assert(newPrior->size() == 1); + assert(*newPrior->begin() == x1); + + // Create a new, empty graph and add the new prior + linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); + linearFactorGraph->push_back(newPrior); + + // Reset ordering for the next step ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x1, 0); + ordering->push_back(x1); // Now, a measurement, z1, has been received, and the Kalman Filter should be "Updated"/"Corrected" // This is equivalent to saying P(x1|z1) ~ P(z1|x1)*P(x1) ~ f3(x1)*f4(x1;z1) @@ -169,10 +183,10 @@ int main() { // = (x_{t} - z_{t}) * R^-1 * (x_{t} - z_{t})^T // This can be modeled using the PriorFactor, where the mean is z_{t} and the covariance is R. Point2 z1(1.0, 0.0); - SharedDiagonal R1 = noiseModel::Diagonal::Sigmas((Vec(2) << 0.25, 0.25)); + SharedDiagonal R1 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished()); PriorFactor factor4(x1, z1, R1); // Linearize the factor and add it to the linear factor graph - linearFactorGraph->push_back(factor4.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor4.linearize(linearizationPoints)); // We have now made the small factor graph f3-(x1)-f4 // where factor f3 is the prior from previous time ( P(x1) ) @@ -182,13 +196,13 @@ int main() { // We solve as before... // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) ) - GaussianSequentialSolver solver1(*linearFactorGraph); - linearBayesNet = solver1.eliminate(); + GaussianBayesNet::shared_ptr updatedBayesNet = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& updatedConditional = updatedBayesNet->back(); // Extract the current estimate of x1 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x1_update = linearizationPoints.at(x1).retract(result[ordering->at(x1)]); - x1_update.print("X1 Update"); + VectorValues updatedResult = updatedBayesNet->optimize(); + Point2 x1_update = linearizationPoints.at(x1) + Point2(updatedResult[x1]); + traits::Print(x1_update, "X1 Update"); // Update the linearization point to the new estimate linearizationPoints.update(x1, x1_update); @@ -205,65 +219,76 @@ int main() { // Convert the root conditional, P(x1) in this case, into a Prior for the next step // The linearization point of this prior must be moved to the new estimate of x, and the key/index needs to be reset to 0, // the first key in the next iteration - const GaussianConditional::shared_ptr& cg1 = linearBayesNet->back(); - assert(cg1->nrFrontals() == 1); - assert(cg1->nrParents() == 0); - JacobianFactor tmpPrior1 = JacobianFactor(*cg1); - linearFactorGraph->add(0, tmpPrior1.getA(tmpPrior1.begin()), tmpPrior1.getb() - tmpPrior1.getA(tmpPrior1.begin()) * result[ordering->at(x1)], tmpPrior1.get_model()); + JacobianFactor::shared_ptr updatedPrior(new JacobianFactor( + x1, + updatedConditional->R(), + updatedConditional->d() - updatedConditional->R() * updatedResult[x1], + updatedConditional->get_model())); + + // Ensure correct number of rows, that there is one variable, and that variable is x1 + assert(updatedPrior->rows() == updatedConditional->R().rows()); + assert(updatedPrior->size() == 1); + assert(*updatedPrior->begin() == x1); + + linearFactorGraph->push_back(updatedPrior); // Create a key for the new state Symbol x2('x',2); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x1, 0); - ordering->insert(x2, 1); - - // Create a nonlinear factor describing the motion model - difference = Point2(1,0); - Q = noiseModel::Diagonal::Sigmas((Vec(2) <, 0.1, 0.1)); - BetweenFactor factor6(x1, x2, difference, Q); + ordering->push_back(x1); + ordering->push_back(x2); + + // Create a nonlinear factor describing the motion model (moving right again) + Point2 difference2(1,0); + SharedDiagonal Q2 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + BetweenFactor factor6(x1, x2, difference2, Q2); // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x2, x1_update); - linearFactorGraph->push_back(factor6.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor6.linearize(linearizationPoints)); // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x1,x2) = P(x1|x2)*P(x2) ) - GaussianSequentialSolver solver2(*linearFactorGraph); - linearBayesNet = solver2.eliminate(); - - // Extract the current estimate of x2 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x2_predict = linearizationPoints.at(x2).retract(result[ordering->at(x2)]); - x2_predict.print("X2 Predict"); + GaussianBayesNet::shared_ptr predictionBayesNet2 = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& x2Conditional = predictionBayesNet2->back(); + + // Extract the predicted state + VectorValues prediction2Result = predictionBayesNet2->optimize(); + Point2 x2_predict = linearizationPoints.at(x2) + Point2(prediction2Result[x2]); + traits::Print(x2_predict, "X2 Predict"); // Update the linearization point to the new estimate linearizationPoints.update(x2, x2_predict); - - // Now add the next measurement // Create a new, empty graph and add the prior from the previous step linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - const GaussianConditional::shared_ptr& cg2 = linearBayesNet->back(); - assert(cg2->nrFrontals() == 1); - assert(cg2->nrParents() == 0); - JacobianFactor tmpPrior2 = JacobianFactor(*cg2); - linearFactorGraph->add(0, tmpPrior2.getA(tmpPrior2.begin()), tmpPrior2.getb() - tmpPrior2.getA(tmpPrior2.begin()) * result[ordering->at(x2)], tmpPrior2.get_model()); + JacobianFactor::shared_ptr prior2(new JacobianFactor( + x2, + x2Conditional->R(), + x2Conditional->d() - x2Conditional->R() * prediction2Result[x2], + x2Conditional->get_model())); + + assert(prior2->rows() == x2Conditional->R().rows()); + assert(prior2->size() == 1); + assert(*prior2->begin() == x2); + + linearFactorGraph->push_back(prior2); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x2, 0); + ordering->push_back(x2); // And update using z2 ... Point2 z2(2.0, 0.0); - SharedDiagonal R2 = noiseModel::Diagonal::Sigmas((Vec(2) << 0.25, 0.25)); + SharedDiagonal R2 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished()); PriorFactor factor8(x2, z2, R2); // Linearize the factor and add it to the linear factor graph - linearFactorGraph->push_back(factor8.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor8.linearize(linearizationPoints)); // We have now made the small factor graph f7-(x2)-f8 // where factor f7 is the prior from previous time ( P(x2) ) @@ -273,13 +298,13 @@ int main() { // We solve as before... // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) ) - GaussianSequentialSolver solver3(*linearFactorGraph); - linearBayesNet = solver3.eliminate(); + GaussianBayesNet::shared_ptr updatedBayesNet2 = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& updatedConditional2 = updatedBayesNet2->back(); // Extract the current estimate of x2 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x2_update = linearizationPoints.at(x2).retract(result[ordering->at(x2)]); - x2_update.print("X2 Update"); + VectorValues updatedResult2 = updatedBayesNet2->optimize(); + Point2 x2_update = linearizationPoints.at(x2) + Point2(updatedResult2[x2]); + traits::Print(x2_update, "X2 Update"); // Update the linearization point to the new estimate linearizationPoints.update(x2, x2_update); @@ -294,37 +319,41 @@ int main() { linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - const GaussianConditional::shared_ptr& cg3 = linearBayesNet->back(); - assert(cg3->nrFrontals() == 1); - assert(cg3->nrParents() == 0); - JacobianFactor tmpPrior3 = JacobianFactor(*cg3); - linearFactorGraph->add(0, tmpPrior3.getA(tmpPrior3.begin()), tmpPrior3.getb() - tmpPrior3.getA(tmpPrior3.begin()) * result[ordering->at(x2)], tmpPrior3.get_model()); + Matrix updatedR2 = updatedConditional2->R(); + Vector updatedD2 = updatedConditional2->d() - updatedR2 * updatedResult2[x2]; + JacobianFactor::shared_ptr updatedPrior2(new JacobianFactor( + x2, + updatedR2, + updatedD2, + updatedConditional2->get_model())); + + linearFactorGraph->push_back(updatedPrior2); // Create a key for the new state Symbol x3('x',3); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x2, 0); - ordering->insert(x3, 1); + ordering->push_back(x2); + ordering->push_back(x3); // Create a nonlinear factor describing the motion model - difference = Point2(1,0); - Q = noiseModel::Diagonal::Sigmas((Vec(2) << 0.1, 0.1)); - BetweenFactor factor10(x2, x3, difference, Q); + Point2 difference3(1,0); + SharedDiagonal Q3 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + BetweenFactor factor10(x2, x3, difference3, Q3); // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x3, x2_update); - linearFactorGraph->push_back(factor10.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor10.linearize(linearizationPoints)); // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x1,x2) = P(x1|x2)*P(x2) ) - GaussianSequentialSolver solver4(*linearFactorGraph); - linearBayesNet = solver4.eliminate(); + GaussianBayesNet::shared_ptr predictionBayesNet3 = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& x3Conditional = predictionBayesNet3->back(); // Extract the current estimate of x3 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x3_predict = linearizationPoints.at(x3).retract(result[ordering->at(x3)]); - x3_predict.print("X3 Predict"); + VectorValues prediction3Result = predictionBayesNet3->optimize(); + Point2 x3_predict = linearizationPoints.at(x3) + Point2(prediction3Result[x3]); + traits::Print(x3_predict, "X3 Predict"); // Update the linearization point to the new estimate linearizationPoints.update(x3, x3_predict); @@ -336,23 +365,25 @@ int main() { linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - const GaussianConditional::shared_ptr& cg4 = linearBayesNet->back(); - assert(cg4->nrFrontals() == 1); - assert(cg4->nrParents() == 0); - JacobianFactor tmpPrior4 = JacobianFactor(*cg4); - linearFactorGraph->add(0, tmpPrior4.getA(tmpPrior4.begin()), tmpPrior4.getb() - tmpPrior4.getA(tmpPrior4.begin()) * result[ordering->at(x3)], tmpPrior4.get_model()); + JacobianFactor::shared_ptr prior3(new JacobianFactor( + x3, + x3Conditional->R(), + x3Conditional->d() - x3Conditional->R() * prediction3Result[x3], + x3Conditional->get_model())); + linearFactorGraph->push_back(prior3); + // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x3, 0); + ordering->push_back(x3); // And update using z3 ... Point2 z3(3.0, 0.0); - SharedDiagonal R3 = noiseModel::Diagonal::Sigmas((Vec(2) << 0.25, 0.25)); + SharedDiagonal R3 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished()); PriorFactor factor12(x3, z3, R3); // Linearize the factor and add it to the linear factor graph - linearFactorGraph->push_back(factor12.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor12.linearize(linearizationPoints)); // We have now made the small factor graph f11-(x3)-f12 // where factor f11 is the prior from previous time ( P(x3) ) @@ -362,13 +393,13 @@ int main() { // We solve as before... // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) ) - GaussianSequentialSolver solver5(*linearFactorGraph); - linearBayesNet = solver5.eliminate(); + GaussianBayesNet::shared_ptr updatedBayesNet3 = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& updatedConditional3 = updatedBayesNet3->back(); // Extract the current estimate of x2 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x3_update = linearizationPoints.at(x3).retract(result[ordering->at(x3)]); - x3_update.print("X3 Update"); + VectorValues updatedResult3 = updatedBayesNet3->optimize(); + Point2 x3_update = linearizationPoints.at(x3) + Point2(updatedResult3[x3]); + traits::Print(x3_update, "X3 Update"); // Update the linearization point to the new estimate linearizationPoints.update(x3, x3_update);