From 9e676b215e8879086660402a8405530e73909a82 Mon Sep 17 00:00:00 2001 From: mkielo3 Date: Thu, 20 Feb 2025 21:19:18 +0000 Subject: [PATCH 1/4] Updated elaboratePoint2KalmanFilter.cpp example and easyPoint2KF bugfix --- examples/CMakeLists.txt | 4 - examples/easyPoint2KalmanFilter.cpp | 2 +- examples/elaboratePoint2KalmanFilter.cpp | 213 +++++++++++++---------- 3 files changed, 123 insertions(+), 96 deletions(-) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 280b82265..8da95722c 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,7 +1,3 @@ -set (excluded_examples - "elaboratePoint2KalmanFilter.cpp" -) - # if GTSAM_ENABLE_BOOST_SERIALIZATION is not set then SolverComparer.cpp will not compile if (NOT GTSAM_ENABLE_BOOST_SERIALIZATION) list (APPEND excluded_examples diff --git a/examples/easyPoint2KalmanFilter.cpp b/examples/easyPoint2KalmanFilter.cpp index 7693fa4e4..d2dc6d241 100644 --- a/examples/easyPoint2KalmanFilter.cpp +++ b/examples/easyPoint2KalmanFilter.cpp @@ -100,7 +100,7 @@ int main() { Symbol x2('x',2); difference = Point2(1,0); BetweenFactor factor3(x1, x2, difference, Q); - Point2 x2_predict = ekf.predict(factor1); + Point2 x2_predict = ekf.predict(factor3); traits::Print(x2_predict, "X2 Predict"); // Update diff --git a/examples/elaboratePoint2KalmanFilter.cpp b/examples/elaboratePoint2KalmanFilter.cpp index bc4e6e340..426280599 100644 --- a/examples/elaboratePoint2KalmanFilter.cpp +++ b/examples/elaboratePoint2KalmanFilter.cpp @@ -22,12 +22,12 @@ #include #include -//#include #include #include #include #include #include +#include #include @@ -55,17 +55,21 @@ int main() { // Create new state variable Symbol x0('x',0); - ordering->insert(x0, 0); + ordering->push_back(x0); // Initialize state x0 (2D point) at origin by adding a prior factor, i.e., Bayes net P(x0) // This is equivalent to x_0 and P_0 Point2 x_initial(0,0); - SharedDiagonal P_initial = noiseModel::Diagonal::Sigmas((Vec(2) << 0.1, 0.1)); - PriorFactor factor1(x0, x_initial, P_initial); + SharedDiagonal P_initial = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + + // Create a JacobianFactor directly - this represents the prior constraint on x0 + JacobianFactor::shared_ptr factor1( + new JacobianFactor(x0, P_initial->R(), Vector::Zero(2), + noiseModel::Unit::Create(2))); + // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x0, x_initial); - linearFactorGraph->push_back(factor1.linearize(linearizationPoints, *ordering)); - + linearFactorGraph->push_back(factor1); // Now predict the state at t=1, i.e. argmax_{x1} P(x1) = P(x1|x0) P(x0) // In Kalman Filter notation, this is x_{t+1|t} and P_{t+1|t} @@ -88,14 +92,14 @@ int main() { // = (F - I)*x_{t} + B*u_{t} // = B*u_{t} (for our example) Symbol x1('x',1); - ordering->insert(x1, 1); + ordering->push_back(x1); Point2 difference(1,0); - SharedDiagonal Q = noiseModel::Diagonal::Sigmas((Vec(2) << 0.1, 0.1)); + SharedDiagonal Q = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); BetweenFactor factor2(x0, x1, difference, Q); // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x1, x_initial); - linearFactorGraph->push_back(factor2.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor2.linearize(linearizationPoints)); // We have now made the small factor graph f1-(x0)-f2-(x1) // where factor f1 is just the prior from time t0, P(x0) @@ -110,13 +114,13 @@ int main() { // system, the initial estimate is not important. // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) ) - GaussianSequentialSolver solver0(*linearFactorGraph); - GaussianBayesNet::shared_ptr linearBayesNet = solver0.eliminate(); + GaussianBayesNet::shared_ptr bayesNet = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& x1Conditional = bayesNet->back(); // This should be P(x1) // Extract the current estimate of x1,P1 from the Bayes Network - VectorValues result = optimize(*linearBayesNet); - Point2 x1_predict = linearizationPoints.at(x1).retract(result[ordering->at(x1)]); - x1_predict.print("X1 Predict"); + VectorValues result = bayesNet->optimize(); + Point2 x1_predict = linearizationPoints.at(x1) + Point2(result[x1]); + traits::Print(x1_predict, "X1 Predict"); // Update the new linearization point to the new estimate linearizationPoints.update(x1, x1_predict); @@ -139,14 +143,24 @@ int main() { // -> b'' = b' - F(dx1' - dx1'') // = || F*dx1'' - (b' - F(dx1' - dx1'')) ||^2 // = || F*dx1'' - (b' - F(x_predict - x_inital)) ||^2 - const GaussianConditional::shared_ptr& cg0 = linearBayesNet->back(); - assert(cg0->nrFrontals() == 1); - assert(cg0->nrParents() == 0); - linearFactorGraph->add(0, cg0->R(), cg0->d() - cg0->R()*result[ordering->at(x1)], noiseModel::Diagonal::Sigmas(cg0->get_sigmas(), true)); + JacobianFactor::shared_ptr newPrior(new JacobianFactor( + x1, + x1Conditional->R(), + x1Conditional->d() - x1Conditional->R() * result[x1], + x1Conditional->get_model())); - // Create the desired ordering + // Ensure correct number of rows, that there is one variable, and that variable is x1 + assert(newPrior->rows() == x1Conditional->R().rows()); + assert(newPrior->size() == 1); + assert(*newPrior->begin() == x1); + + // Create a new, empty graph and add the new prior + linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); + linearFactorGraph->push_back(newPrior); + + // Reset ordering for the next step ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x1, 0); + ordering->push_back(x1); // Now, a measurement, z1, has been received, and the Kalman Filter should be "Updated"/"Corrected" // This is equivalent to saying P(x1|z1) ~ P(z1|x1)*P(x1) ~ f3(x1)*f4(x1;z1) @@ -169,10 +183,10 @@ int main() { // = (x_{t} - z_{t}) * R^-1 * (x_{t} - z_{t})^T // This can be modeled using the PriorFactor, where the mean is z_{t} and the covariance is R. Point2 z1(1.0, 0.0); - SharedDiagonal R1 = noiseModel::Diagonal::Sigmas((Vec(2) << 0.25, 0.25)); + SharedDiagonal R1 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished()); PriorFactor factor4(x1, z1, R1); // Linearize the factor and add it to the linear factor graph - linearFactorGraph->push_back(factor4.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor4.linearize(linearizationPoints)); // We have now made the small factor graph f3-(x1)-f4 // where factor f3 is the prior from previous time ( P(x1) ) @@ -182,13 +196,13 @@ int main() { // We solve as before... // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) ) - GaussianSequentialSolver solver1(*linearFactorGraph); - linearBayesNet = solver1.eliminate(); + GaussianBayesNet::shared_ptr updatedBayesNet = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& updatedConditional = updatedBayesNet->back(); // Extract the current estimate of x1 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x1_update = linearizationPoints.at(x1).retract(result[ordering->at(x1)]); - x1_update.print("X1 Update"); + VectorValues updatedResult = updatedBayesNet->optimize(); + Point2 x1_update = linearizationPoints.at(x1) + Point2(updatedResult[x1]); + traits::Print(x1_update, "X1 Update"); // Update the linearization point to the new estimate linearizationPoints.update(x1, x1_update); @@ -205,65 +219,76 @@ int main() { // Convert the root conditional, P(x1) in this case, into a Prior for the next step // The linearization point of this prior must be moved to the new estimate of x, and the key/index needs to be reset to 0, // the first key in the next iteration - const GaussianConditional::shared_ptr& cg1 = linearBayesNet->back(); - assert(cg1->nrFrontals() == 1); - assert(cg1->nrParents() == 0); - JacobianFactor tmpPrior1 = JacobianFactor(*cg1); - linearFactorGraph->add(0, tmpPrior1.getA(tmpPrior1.begin()), tmpPrior1.getb() - tmpPrior1.getA(tmpPrior1.begin()) * result[ordering->at(x1)], tmpPrior1.get_model()); + JacobianFactor::shared_ptr updatedPrior(new JacobianFactor( + x1, + updatedConditional->R(), + updatedConditional->d() - updatedConditional->R() * updatedResult[x1], + updatedConditional->get_model())); + + // Ensure correct number of rows, that there is one variable, and that variable is x1 + assert(updatedPrior->rows() == updatedConditional->R().rows()); + assert(updatedPrior->size() == 1); + assert(*updatedPrior->begin() == x1); + + linearFactorGraph->push_back(updatedPrior); // Create a key for the new state Symbol x2('x',2); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x1, 0); - ordering->insert(x2, 1); - - // Create a nonlinear factor describing the motion model - difference = Point2(1,0); - Q = noiseModel::Diagonal::Sigmas((Vec(2) <, 0.1, 0.1)); - BetweenFactor factor6(x1, x2, difference, Q); + ordering->push_back(x1); + ordering->push_back(x2); + + // Create a nonlinear factor describing the motion model (moving right again) + Point2 difference2(1,0); + SharedDiagonal Q2 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + BetweenFactor factor6(x1, x2, difference2, Q2); // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x2, x1_update); - linearFactorGraph->push_back(factor6.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor6.linearize(linearizationPoints)); // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x1,x2) = P(x1|x2)*P(x2) ) - GaussianSequentialSolver solver2(*linearFactorGraph); - linearBayesNet = solver2.eliminate(); - - // Extract the current estimate of x2 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x2_predict = linearizationPoints.at(x2).retract(result[ordering->at(x2)]); - x2_predict.print("X2 Predict"); + GaussianBayesNet::shared_ptr predictionBayesNet2 = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& x2Conditional = predictionBayesNet2->back(); + + // Extract the predicted state + VectorValues prediction2Result = predictionBayesNet2->optimize(); + Point2 x2_predict = linearizationPoints.at(x2) + Point2(prediction2Result[x2]); + traits::Print(x2_predict, "X2 Predict"); // Update the linearization point to the new estimate linearizationPoints.update(x2, x2_predict); - - // Now add the next measurement // Create a new, empty graph and add the prior from the previous step linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - const GaussianConditional::shared_ptr& cg2 = linearBayesNet->back(); - assert(cg2->nrFrontals() == 1); - assert(cg2->nrParents() == 0); - JacobianFactor tmpPrior2 = JacobianFactor(*cg2); - linearFactorGraph->add(0, tmpPrior2.getA(tmpPrior2.begin()), tmpPrior2.getb() - tmpPrior2.getA(tmpPrior2.begin()) * result[ordering->at(x2)], tmpPrior2.get_model()); + JacobianFactor::shared_ptr prior2(new JacobianFactor( + x2, + x2Conditional->R(), + x2Conditional->d() - x2Conditional->R() * prediction2Result[x2], + x2Conditional->get_model())); + + assert(prior2->rows() == x2Conditional->R().rows()); + assert(prior2->size() == 1); + assert(*prior2->begin() == x2); + + linearFactorGraph->push_back(prior2); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x2, 0); + ordering->push_back(x2); // And update using z2 ... Point2 z2(2.0, 0.0); - SharedDiagonal R2 = noiseModel::Diagonal::Sigmas((Vec(2) << 0.25, 0.25)); + SharedDiagonal R2 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished()); PriorFactor factor8(x2, z2, R2); // Linearize the factor and add it to the linear factor graph - linearFactorGraph->push_back(factor8.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor8.linearize(linearizationPoints)); // We have now made the small factor graph f7-(x2)-f8 // where factor f7 is the prior from previous time ( P(x2) ) @@ -273,13 +298,13 @@ int main() { // We solve as before... // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) ) - GaussianSequentialSolver solver3(*linearFactorGraph); - linearBayesNet = solver3.eliminate(); + GaussianBayesNet::shared_ptr updatedBayesNet2 = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& updatedConditional2 = updatedBayesNet2->back(); // Extract the current estimate of x2 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x2_update = linearizationPoints.at(x2).retract(result[ordering->at(x2)]); - x2_update.print("X2 Update"); + VectorValues updatedResult2 = updatedBayesNet2->optimize(); + Point2 x2_update = linearizationPoints.at(x2) + Point2(updatedResult2[x2]); + traits::Print(x2_update, "X2 Update"); // Update the linearization point to the new estimate linearizationPoints.update(x2, x2_update); @@ -294,37 +319,41 @@ int main() { linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - const GaussianConditional::shared_ptr& cg3 = linearBayesNet->back(); - assert(cg3->nrFrontals() == 1); - assert(cg3->nrParents() == 0); - JacobianFactor tmpPrior3 = JacobianFactor(*cg3); - linearFactorGraph->add(0, tmpPrior3.getA(tmpPrior3.begin()), tmpPrior3.getb() - tmpPrior3.getA(tmpPrior3.begin()) * result[ordering->at(x2)], tmpPrior3.get_model()); + Matrix updatedR2 = updatedConditional2->R(); + Vector updatedD2 = updatedConditional2->d() - updatedR2 * updatedResult2[x2]; + JacobianFactor::shared_ptr updatedPrior2(new JacobianFactor( + x2, + updatedR2, + updatedD2, + updatedConditional2->get_model())); + + linearFactorGraph->push_back(updatedPrior2); // Create a key for the new state Symbol x3('x',3); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x2, 0); - ordering->insert(x3, 1); + ordering->push_back(x2); + ordering->push_back(x3); // Create a nonlinear factor describing the motion model - difference = Point2(1,0); - Q = noiseModel::Diagonal::Sigmas((Vec(2) << 0.1, 0.1)); - BetweenFactor factor10(x2, x3, difference, Q); + Point2 difference3(1,0); + SharedDiagonal Q3 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + BetweenFactor factor10(x2, x3, difference3, Q3); // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x3, x2_update); - linearFactorGraph->push_back(factor10.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor10.linearize(linearizationPoints)); // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x1,x2) = P(x1|x2)*P(x2) ) - GaussianSequentialSolver solver4(*linearFactorGraph); - linearBayesNet = solver4.eliminate(); + GaussianBayesNet::shared_ptr predictionBayesNet3 = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& x3Conditional = predictionBayesNet3->back(); // Extract the current estimate of x3 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x3_predict = linearizationPoints.at(x3).retract(result[ordering->at(x3)]); - x3_predict.print("X3 Predict"); + VectorValues prediction3Result = predictionBayesNet3->optimize(); + Point2 x3_predict = linearizationPoints.at(x3) + Point2(prediction3Result[x3]); + traits::Print(x3_predict, "X3 Predict"); // Update the linearization point to the new estimate linearizationPoints.update(x3, x3_predict); @@ -336,23 +365,25 @@ int main() { linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - const GaussianConditional::shared_ptr& cg4 = linearBayesNet->back(); - assert(cg4->nrFrontals() == 1); - assert(cg4->nrParents() == 0); - JacobianFactor tmpPrior4 = JacobianFactor(*cg4); - linearFactorGraph->add(0, tmpPrior4.getA(tmpPrior4.begin()), tmpPrior4.getb() - tmpPrior4.getA(tmpPrior4.begin()) * result[ordering->at(x3)], tmpPrior4.get_model()); + JacobianFactor::shared_ptr prior3(new JacobianFactor( + x3, + x3Conditional->R(), + x3Conditional->d() - x3Conditional->R() * prediction3Result[x3], + x3Conditional->get_model())); + linearFactorGraph->push_back(prior3); + // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->insert(x3, 0); + ordering->push_back(x3); // And update using z3 ... Point2 z3(3.0, 0.0); - SharedDiagonal R3 = noiseModel::Diagonal::Sigmas((Vec(2) << 0.25, 0.25)); + SharedDiagonal R3 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished()); PriorFactor factor12(x3, z3, R3); // Linearize the factor and add it to the linear factor graph - linearFactorGraph->push_back(factor12.linearize(linearizationPoints, *ordering)); + linearFactorGraph->push_back(factor12.linearize(linearizationPoints)); // We have now made the small factor graph f11-(x3)-f12 // where factor f11 is the prior from previous time ( P(x3) ) @@ -362,13 +393,13 @@ int main() { // We solve as before... // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x0,x1) = P(x0|x1)*P(x1) ) - GaussianSequentialSolver solver5(*linearFactorGraph); - linearBayesNet = solver5.eliminate(); + GaussianBayesNet::shared_ptr updatedBayesNet3 = linearFactorGraph->eliminateSequential(*ordering); + const GaussianConditional::shared_ptr& updatedConditional3 = updatedBayesNet3->back(); // Extract the current estimate of x2 from the Bayes Network - result = optimize(*linearBayesNet); - Point2 x3_update = linearizationPoints.at(x3).retract(result[ordering->at(x3)]); - x3_update.print("X3 Update"); + VectorValues updatedResult3 = updatedBayesNet3->optimize(); + Point2 x3_update = linearizationPoints.at(x3) + Point2(updatedResult3[x3]); + traits::Print(x3_update, "X3 Update"); // Update the linearization point to the new estimate linearizationPoints.update(x3, x3_update); From 53b1ce388535270f2bd06135b9e1edc86e81329b Mon Sep 17 00:00:00 2001 From: mkielo3 Date: Thu, 20 Feb 2025 22:12:46 +0000 Subject: [PATCH 2/4] Addressed PR feedback: applied suggested improvements --- examples/elaboratePoint2KalmanFilter.cpp | 69 ++++++++++++------------ gtsam/nonlinear/nonlinear.i | 8 +++ 2 files changed, 44 insertions(+), 33 deletions(-) diff --git a/examples/elaboratePoint2KalmanFilter.cpp b/examples/elaboratePoint2KalmanFilter.cpp index 426280599..5fc73f38e 100644 --- a/examples/elaboratePoint2KalmanFilter.cpp +++ b/examples/elaboratePoint2KalmanFilter.cpp @@ -33,6 +33,7 @@ using namespace std; using namespace gtsam; +using symbol_shorthand::X; int main() { @@ -54,18 +55,20 @@ int main() { // i.e., we should get 0,0 0,1 0,2 if there is no noise // Create new state variable - Symbol x0('x',0); + Key x0 = X(0); ordering->push_back(x0); // Initialize state x0 (2D point) at origin by adding a prior factor, i.e., Bayes net P(x0) // This is equivalent to x_0 and P_0 Point2 x_initial(0,0); - SharedDiagonal P_initial = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + SharedDiagonal P_initial = noiseModel::Isotropic::Sigma(2, 0.1); // Create a JacobianFactor directly - this represents the prior constraint on x0 - JacobianFactor::shared_ptr factor1( - new JacobianFactor(x0, P_initial->R(), Vector::Zero(2), - noiseModel::Unit::Create(2))); + auto factor1 = std::make_shared( + x0, + P_initial->R(), + Vector::Zero(2), + noiseModel::Unit::Create(2)); // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x0, x_initial); @@ -91,11 +94,11 @@ int main() { // so, difference = x_{t+1} - x_{t} = F*x_{t} + B*u_{t} - I*x_{t} // = (F - I)*x_{t} + B*u_{t} // = B*u_{t} (for our example) - Symbol x1('x',1); + Key x1 = X(1); ordering->push_back(x1); Point2 difference(1,0); - SharedDiagonal Q = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + SharedDiagonal Q = noiseModel::Isotropic::Sigma(2, 0.1); BetweenFactor factor2(x0, x1, difference, Q); // Linearize the factor and add it to the linear factor graph linearizationPoints.insert(x1, x_initial); @@ -119,7 +122,7 @@ int main() { // Extract the current estimate of x1,P1 from the Bayes Network VectorValues result = bayesNet->optimize(); - Point2 x1_predict = linearizationPoints.at(x1) + Point2(result[x1]); + Point2 x1_predict = linearizationPoints.at(x1) + result[x1]; traits::Print(x1_predict, "X1 Predict"); // Update the new linearization point to the new estimate @@ -143,11 +146,11 @@ int main() { // -> b'' = b' - F(dx1' - dx1'') // = || F*dx1'' - (b' - F(dx1' - dx1'')) ||^2 // = || F*dx1'' - (b' - F(x_predict - x_inital)) ||^2 - JacobianFactor::shared_ptr newPrior(new JacobianFactor( + auto newPrior = std::make_shared( x1, x1Conditional->R(), x1Conditional->d() - x1Conditional->R() * result[x1], - x1Conditional->get_model())); + x1Conditional->get_model()); // Ensure correct number of rows, that there is one variable, and that variable is x1 assert(newPrior->rows() == x1Conditional->R().rows()); @@ -183,7 +186,7 @@ int main() { // = (x_{t} - z_{t}) * R^-1 * (x_{t} - z_{t})^T // This can be modeled using the PriorFactor, where the mean is z_{t} and the covariance is R. Point2 z1(1.0, 0.0); - SharedDiagonal R1 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished()); + SharedDiagonal R1 = noiseModel::Isotropic::Sigma(2, 0.25); PriorFactor factor4(x1, z1, R1); // Linearize the factor and add it to the linear factor graph linearFactorGraph->push_back(factor4.linearize(linearizationPoints)); @@ -201,7 +204,7 @@ int main() { // Extract the current estimate of x1 from the Bayes Network VectorValues updatedResult = updatedBayesNet->optimize(); - Point2 x1_update = linearizationPoints.at(x1) + Point2(updatedResult[x1]); + Point2 x1_update = linearizationPoints.at(x1) + updatedResult[x1]; traits::Print(x1_update, "X1 Update"); // Update the linearization point to the new estimate @@ -219,11 +222,11 @@ int main() { // Convert the root conditional, P(x1) in this case, into a Prior for the next step // The linearization point of this prior must be moved to the new estimate of x, and the key/index needs to be reset to 0, // the first key in the next iteration - JacobianFactor::shared_ptr updatedPrior(new JacobianFactor( + auto updatedPrior = std::make_shared( x1, updatedConditional->R(), updatedConditional->d() - updatedConditional->R() * updatedResult[x1], - updatedConditional->get_model())); + updatedConditional->get_model()); // Ensure correct number of rows, that there is one variable, and that variable is x1 assert(updatedPrior->rows() == updatedConditional->R().rows()); @@ -233,7 +236,7 @@ int main() { linearFactorGraph->push_back(updatedPrior); // Create a key for the new state - Symbol x2('x',2); + Key x2 = X(2); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); @@ -242,7 +245,7 @@ int main() { // Create a nonlinear factor describing the motion model (moving right again) Point2 difference2(1,0); - SharedDiagonal Q2 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + SharedDiagonal Q2 = noiseModel::Isotropic::Sigma(2, 0.1); BetweenFactor factor6(x1, x2, difference2, Q2); // Linearize the factor and add it to the linear factor graph @@ -255,7 +258,7 @@ int main() { // Extract the predicted state VectorValues prediction2Result = predictionBayesNet2->optimize(); - Point2 x2_predict = linearizationPoints.at(x2) + Point2(prediction2Result[x2]); + Point2 x2_predict = linearizationPoints.at(x2) + prediction2Result[x2]; traits::Print(x2_predict, "X2 Predict"); // Update the linearization point to the new estimate @@ -266,11 +269,11 @@ int main() { linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - JacobianFactor::shared_ptr prior2(new JacobianFactor( + auto prior2 = std::make_shared( x2, x2Conditional->R(), - x2Conditional->d() - x2Conditional->R() * prediction2Result[x2], - x2Conditional->get_model())); + x2Conditional->d() - x2Conditional->R() * prediction2Result[x2], + x2Conditional->get_model()); assert(prior2->rows() == x2Conditional->R().rows()); assert(prior2->size() == 1); @@ -303,7 +306,7 @@ int main() { // Extract the current estimate of x2 from the Bayes Network VectorValues updatedResult2 = updatedBayesNet2->optimize(); - Point2 x2_update = linearizationPoints.at(x2) + Point2(updatedResult2[x2]); + Point2 x2_update = linearizationPoints.at(x2) + updatedResult2[x2]; traits::Print(x2_update, "X2 Update"); // Update the linearization point to the new estimate @@ -321,16 +324,16 @@ int main() { // Convert the root conditional, P(x1) in this case, into a Prior for the next step Matrix updatedR2 = updatedConditional2->R(); Vector updatedD2 = updatedConditional2->d() - updatedR2 * updatedResult2[x2]; - JacobianFactor::shared_ptr updatedPrior2(new JacobianFactor( + auto updatedPrior2 = std::make_shared( x2, updatedR2, updatedD2, - updatedConditional2->get_model())); + updatedConditional2->get_model()); linearFactorGraph->push_back(updatedPrior2); // Create a key for the new state - Symbol x3('x',3); + Key x3 = X(3); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); @@ -339,7 +342,7 @@ int main() { // Create a nonlinear factor describing the motion model Point2 difference3(1,0); - SharedDiagonal Q3 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.1, 0.1).finished()); + SharedDiagonal Q3 = noiseModel::Isotropic::Sigma(2, 0.1); BetweenFactor factor10(x2, x3, difference3, Q3); // Linearize the factor and add it to the linear factor graph @@ -352,7 +355,7 @@ int main() { // Extract the current estimate of x3 from the Bayes Network VectorValues prediction3Result = predictionBayesNet3->optimize(); - Point2 x3_predict = linearizationPoints.at(x3) + Point2(prediction3Result[x3]); + Point2 x3_predict = linearizationPoints.at(x3) + prediction3Result[x3]; traits::Print(x3_predict, "X3 Predict"); // Update the linearization point to the new estimate @@ -365,11 +368,11 @@ int main() { linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - JacobianFactor::shared_ptr prior3(new JacobianFactor( - x3, - x3Conditional->R(), - x3Conditional->d() - x3Conditional->R() * prediction3Result[x3], - x3Conditional->get_model())); + auto prior3 = std::make_shared( + x3, + x3Conditional->R(), + x3Conditional->d() - x3Conditional->R() * prediction3Result[x3], + x3Conditional->get_model()); linearFactorGraph->push_back(prior3); @@ -379,7 +382,7 @@ int main() { // And update using z3 ... Point2 z3(3.0, 0.0); - SharedDiagonal R3 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished()); + SharedDiagonal R3 = noiseModel::Isotropic::Sigma(2, 0.25); PriorFactor factor12(x3, z3, R3); // Linearize the factor and add it to the linear factor graph @@ -398,7 +401,7 @@ int main() { // Extract the current estimate of x2 from the Bayes Network VectorValues updatedResult3 = updatedBayesNet3->optimize(); - Point2 x3_update = linearizationPoints.at(x3) + Point2(updatedResult3[x3]); + Point2 x3_update = linearizationPoints.at(x3) + updatedResult3[x3]; traits::Print(x3_update, "X3 Update"); // Update the linearization point to the new estimate diff --git a/gtsam/nonlinear/nonlinear.i b/gtsam/nonlinear/nonlinear.i index b684b1aad..38e07808d 100644 --- a/gtsam/nonlinear/nonlinear.i +++ b/gtsam/nonlinear/nonlinear.i @@ -725,5 +725,13 @@ virtual class BatchFixedLagSmoother : gtsam::FixedLagSmoother { VALUE calculateEstimate(size_t key) const; }; +#include +template +class ExtendedKalmanFilter { + ExtendedKalmanFilter(size_t key_initial, T x_initial, gtsam::noiseModel::Gaussian* P_initial); + T predict(const gtsam::NoiseModelFactor& motionFactor); + T update(const gtsam::NoiseModelFactor& measurementFactor); + gtsam::JacobianFactor* Density() const; +}; } // namespace gtsam From 2e194501d5bb1acd0b3aae008bed2facc04989c8 Mon Sep 17 00:00:00 2001 From: mkielo3 Date: Thu, 20 Feb 2025 22:17:55 +0000 Subject: [PATCH 3/4] Reverted unintended changes to nonlinear.i --- gtsam/nonlinear/nonlinear.i | 8 -------- 1 file changed, 8 deletions(-) diff --git a/gtsam/nonlinear/nonlinear.i b/gtsam/nonlinear/nonlinear.i index 38e07808d..b684b1aad 100644 --- a/gtsam/nonlinear/nonlinear.i +++ b/gtsam/nonlinear/nonlinear.i @@ -725,13 +725,5 @@ virtual class BatchFixedLagSmoother : gtsam::FixedLagSmoother { VALUE calculateEstimate(size_t key) const; }; -#include -template -class ExtendedKalmanFilter { - ExtendedKalmanFilter(size_t key_initial, T x_initial, gtsam::noiseModel::Gaussian* P_initial); - T predict(const gtsam::NoiseModelFactor& motionFactor); - T update(const gtsam::NoiseModelFactor& measurementFactor); - gtsam::JacobianFactor* Density() const; -}; } // namespace gtsam From b6a1af03e81bf7d067941dc573e423830231e0bf Mon Sep 17 00:00:00 2001 From: mkielo3 Date: Thu, 20 Feb 2025 23:25:00 +0000 Subject: [PATCH 4/4] Removed make_shared & x variables --- examples/elaboratePoint2KalmanFilter.cpp | 139 +++++++++-------------- 1 file changed, 51 insertions(+), 88 deletions(-) diff --git a/examples/elaboratePoint2KalmanFilter.cpp b/examples/elaboratePoint2KalmanFilter.cpp index 5fc73f38e..881048fe1 100644 --- a/examples/elaboratePoint2KalmanFilter.cpp +++ b/examples/elaboratePoint2KalmanFilter.cpp @@ -55,24 +55,19 @@ int main() { // i.e., we should get 0,0 0,1 0,2 if there is no noise // Create new state variable - Key x0 = X(0); - ordering->push_back(x0); + ordering->push_back(X(0)); // Initialize state x0 (2D point) at origin by adding a prior factor, i.e., Bayes net P(x0) // This is equivalent to x_0 and P_0 Point2 x_initial(0,0); SharedDiagonal P_initial = noiseModel::Isotropic::Sigma(2, 0.1); - // Create a JacobianFactor directly - this represents the prior constraint on x0 - auto factor1 = std::make_shared( - x0, + // Linearize the factor and add it to the linear factor graph + linearizationPoints.insert(X(0), x_initial); + linearFactorGraph->add(X(0), P_initial->R(), Vector::Zero(2), noiseModel::Unit::Create(2)); - - // Linearize the factor and add it to the linear factor graph - linearizationPoints.insert(x0, x_initial); - linearFactorGraph->push_back(factor1); // Now predict the state at t=1, i.e. argmax_{x1} P(x1) = P(x1|x0) P(x0) // In Kalman Filter notation, this is x_{t+1|t} and P_{t+1|t} @@ -94,14 +89,13 @@ int main() { // so, difference = x_{t+1} - x_{t} = F*x_{t} + B*u_{t} - I*x_{t} // = (F - I)*x_{t} + B*u_{t} // = B*u_{t} (for our example) - Key x1 = X(1); - ordering->push_back(x1); + ordering->push_back(X(1)); Point2 difference(1,0); SharedDiagonal Q = noiseModel::Isotropic::Sigma(2, 0.1); - BetweenFactor factor2(x0, x1, difference, Q); + BetweenFactor factor2(X(0), X(1), difference, Q); // Linearize the factor and add it to the linear factor graph - linearizationPoints.insert(x1, x_initial); + linearizationPoints.insert(X(1), x_initial); linearFactorGraph->push_back(factor2.linearize(linearizationPoints)); // We have now made the small factor graph f1-(x0)-f2-(x1) @@ -122,11 +116,11 @@ int main() { // Extract the current estimate of x1,P1 from the Bayes Network VectorValues result = bayesNet->optimize(); - Point2 x1_predict = linearizationPoints.at(x1) + result[x1]; + Point2 x1_predict = linearizationPoints.at(X(1)) + result[X(1)]; traits::Print(x1_predict, "X1 Predict"); // Update the new linearization point to the new estimate - linearizationPoints.update(x1, x1_predict); + linearizationPoints.update(X(1), x1_predict); @@ -146,24 +140,18 @@ int main() { // -> b'' = b' - F(dx1' - dx1'') // = || F*dx1'' - (b' - F(dx1' - dx1'')) ||^2 // = || F*dx1'' - (b' - F(x_predict - x_inital)) ||^2 - auto newPrior = std::make_shared( - x1, - x1Conditional->R(), - x1Conditional->d() - x1Conditional->R() * result[x1], - x1Conditional->get_model()); - - // Ensure correct number of rows, that there is one variable, and that variable is x1 - assert(newPrior->rows() == x1Conditional->R().rows()); - assert(newPrior->size() == 1); - assert(*newPrior->begin() == x1); // Create a new, empty graph and add the new prior linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); - linearFactorGraph->push_back(newPrior); + linearFactorGraph->add( + X(1), + x1Conditional->R(), + x1Conditional->d() - x1Conditional->R() * result[X(1)], + x1Conditional->get_model()); // Reset ordering for the next step ordering = Ordering::shared_ptr(new Ordering); - ordering->push_back(x1); + ordering->push_back(X(1)); // Now, a measurement, z1, has been received, and the Kalman Filter should be "Updated"/"Corrected" // This is equivalent to saying P(x1|z1) ~ P(z1|x1)*P(x1) ~ f3(x1)*f4(x1;z1) @@ -187,7 +175,7 @@ int main() { // This can be modeled using the PriorFactor, where the mean is z_{t} and the covariance is R. Point2 z1(1.0, 0.0); SharedDiagonal R1 = noiseModel::Isotropic::Sigma(2, 0.25); - PriorFactor factor4(x1, z1, R1); + PriorFactor factor4(X(1), z1, R1); // Linearize the factor and add it to the linear factor graph linearFactorGraph->push_back(factor4.linearize(linearizationPoints)); @@ -204,11 +192,11 @@ int main() { // Extract the current estimate of x1 from the Bayes Network VectorValues updatedResult = updatedBayesNet->optimize(); - Point2 x1_update = linearizationPoints.at(x1) + updatedResult[x1]; + Point2 x1_update = linearizationPoints.at(X(1)) + updatedResult[X(1)]; traits::Print(x1_update, "X1 Update"); // Update the linearization point to the new estimate - linearizationPoints.update(x1, x1_update); + linearizationPoints.update(X(1), x1_update); @@ -222,34 +210,24 @@ int main() { // Convert the root conditional, P(x1) in this case, into a Prior for the next step // The linearization point of this prior must be moved to the new estimate of x, and the key/index needs to be reset to 0, // the first key in the next iteration - auto updatedPrior = std::make_shared( - x1, + linearFactorGraph->add( + X(1), updatedConditional->R(), - updatedConditional->d() - updatedConditional->R() * updatedResult[x1], + updatedConditional->d() - updatedConditional->R() * updatedResult[X(1)], updatedConditional->get_model()); - - // Ensure correct number of rows, that there is one variable, and that variable is x1 - assert(updatedPrior->rows() == updatedConditional->R().rows()); - assert(updatedPrior->size() == 1); - assert(*updatedPrior->begin() == x1); - - linearFactorGraph->push_back(updatedPrior); - - // Create a key for the new state - Key x2 = X(2); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->push_back(x1); - ordering->push_back(x2); + ordering->push_back(X(1)); + ordering->push_back(X(2)); // Create a nonlinear factor describing the motion model (moving right again) Point2 difference2(1,0); SharedDiagonal Q2 = noiseModel::Isotropic::Sigma(2, 0.1); - BetweenFactor factor6(x1, x2, difference2, Q2); + BetweenFactor factor6(X(1), X(2), difference2, Q2); // Linearize the factor and add it to the linear factor graph - linearizationPoints.insert(x2, x1_update); + linearizationPoints.insert(X(2), x1_update); linearFactorGraph->push_back(factor6.linearize(linearizationPoints)); // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x1,x2) = P(x1|x2)*P(x2) ) @@ -258,37 +236,31 @@ int main() { // Extract the predicted state VectorValues prediction2Result = predictionBayesNet2->optimize(); - Point2 x2_predict = linearizationPoints.at(x2) + prediction2Result[x2]; + Point2 x2_predict = linearizationPoints.at(X(2)) + prediction2Result[X(2)]; traits::Print(x2_predict, "X2 Predict"); // Update the linearization point to the new estimate - linearizationPoints.update(x2, x2_predict); + linearizationPoints.update(X(2), x2_predict); // Now add the next measurement // Create a new, empty graph and add the prior from the previous step linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - auto prior2 = std::make_shared( - x2, + linearFactorGraph->add( + X(2), x2Conditional->R(), - x2Conditional->d() - x2Conditional->R() * prediction2Result[x2], + x2Conditional->d() - x2Conditional->R() * prediction2Result[X(2)], x2Conditional->get_model()); - assert(prior2->rows() == x2Conditional->R().rows()); - assert(prior2->size() == 1); - assert(*prior2->begin() == x2); - - linearFactorGraph->push_back(prior2); - // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->push_back(x2); + ordering->push_back(X(2)); // And update using z2 ... Point2 z2(2.0, 0.0); SharedDiagonal R2 = noiseModel::Diagonal::Sigmas((gtsam::Vector2() << 0.25, 0.25).finished()); - PriorFactor factor8(x2, z2, R2); + PriorFactor factor8(X(2), z2, R2); // Linearize the factor and add it to the linear factor graph linearFactorGraph->push_back(factor8.linearize(linearizationPoints)); @@ -306,11 +278,11 @@ int main() { // Extract the current estimate of x2 from the Bayes Network VectorValues updatedResult2 = updatedBayesNet2->optimize(); - Point2 x2_update = linearizationPoints.at(x2) + updatedResult2[x2]; + Point2 x2_update = linearizationPoints.at(X(2)) + updatedResult2[X(2)]; traits::Print(x2_update, "X2 Update"); // Update the linearization point to the new estimate - linearizationPoints.update(x2, x2_update); + linearizationPoints.update(X(2), x2_update); @@ -322,31 +294,24 @@ int main() { linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - Matrix updatedR2 = updatedConditional2->R(); - Vector updatedD2 = updatedConditional2->d() - updatedR2 * updatedResult2[x2]; - auto updatedPrior2 = std::make_shared( - x2, - updatedR2, - updatedD2, + linearFactorGraph->add( + X(2), + updatedConditional2->R(), + updatedConditional2->d() - updatedConditional2->R() * updatedResult2[X(2)], updatedConditional2->get_model()); - linearFactorGraph->push_back(updatedPrior2); - - // Create a key for the new state - Key x3 = X(3); - // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->push_back(x2); - ordering->push_back(x3); + ordering->push_back(X(2)); + ordering->push_back(X(3)); // Create a nonlinear factor describing the motion model Point2 difference3(1,0); SharedDiagonal Q3 = noiseModel::Isotropic::Sigma(2, 0.1); - BetweenFactor factor10(x2, x3, difference3, Q3); + BetweenFactor factor10(X(2), X(3), difference3, Q3); // Linearize the factor and add it to the linear factor graph - linearizationPoints.insert(x3, x2_update); + linearizationPoints.insert(X(3), x2_update); linearFactorGraph->push_back(factor10.linearize(linearizationPoints)); // Solve the linear factor graph, converting it into a linear Bayes Network ( P(x1,x2) = P(x1|x2)*P(x2) ) @@ -355,11 +320,11 @@ int main() { // Extract the current estimate of x3 from the Bayes Network VectorValues prediction3Result = predictionBayesNet3->optimize(); - Point2 x3_predict = linearizationPoints.at(x3) + prediction3Result[x3]; + Point2 x3_predict = linearizationPoints.at(X(3)) + prediction3Result[X(3)]; traits::Print(x3_predict, "X3 Predict"); // Update the linearization point to the new estimate - linearizationPoints.update(x3, x3_predict); + linearizationPoints.update(X(3), x3_predict); @@ -368,22 +333,20 @@ int main() { linearFactorGraph = GaussianFactorGraph::shared_ptr(new GaussianFactorGraph); // Convert the root conditional, P(x1) in this case, into a Prior for the next step - auto prior3 = std::make_shared( - x3, + linearFactorGraph->add( + X(3), x3Conditional->R(), - x3Conditional->d() - x3Conditional->R() * prediction3Result[x3], + x3Conditional->d() - x3Conditional->R() * prediction3Result[X(3)], x3Conditional->get_model()); - - linearFactorGraph->push_back(prior3); // Create the desired ordering ordering = Ordering::shared_ptr(new Ordering); - ordering->push_back(x3); + ordering->push_back(X(3)); // And update using z3 ... Point2 z3(3.0, 0.0); SharedDiagonal R3 = noiseModel::Isotropic::Sigma(2, 0.25); - PriorFactor factor12(x3, z3, R3); + PriorFactor factor12(X(3), z3, R3); // Linearize the factor and add it to the linear factor graph linearFactorGraph->push_back(factor12.linearize(linearizationPoints)); @@ -401,11 +364,11 @@ int main() { // Extract the current estimate of x2 from the Bayes Network VectorValues updatedResult3 = updatedBayesNet3->optimize(); - Point2 x3_update = linearizationPoints.at(x3) + updatedResult3[x3]; + Point2 x3_update = linearizationPoints.at(X(3)) + updatedResult3[X(3)]; traits::Print(x3_update, "X3 Update"); // Update the linearization point to the new estimate - linearizationPoints.update(x3, x3_update); + linearizationPoints.update(X(3), x3_update);