From 1565833c2c46b961680270b726cf0d5db85ed429 Mon Sep 17 00:00:00 2001 From: Richard Roberts Date: Mon, 6 Aug 2012 21:42:26 +0000 Subject: [PATCH] More consistent order of private/protected/public - typedefs, private/protected variables, public interface, private/protected functions --- gtsam/base/DerivedValue.h | 7 +- gtsam/inference/BayesTree.h | 92 +++++++++---------- gtsam/inference/EliminationTree.h | 58 ++++++------ gtsam/inference/Factor.h | 17 ++-- gtsam/linear/JacobianFactor.h | 8 +- gtsam/nonlinear/DoglegOptimizer.h | 9 +- gtsam/nonlinear/GaussNewtonOptimizer.h | 9 +- gtsam/nonlinear/LevenbergMarquardtOptimizer.h | 53 +++++------ gtsam/nonlinear/Marginals.h | 12 ++- gtsam/nonlinear/NonlinearOptimizer.h | 28 +++--- gtsam/nonlinear/Values.h | 3 - 11 files changed, 139 insertions(+), 157 deletions(-) diff --git a/gtsam/base/DerivedValue.h b/gtsam/base/DerivedValue.h index 4f6dae9c7..52420ee93 100644 --- a/gtsam/base/DerivedValue.h +++ b/gtsam/base/DerivedValue.h @@ -24,9 +24,6 @@ namespace gtsam { template class DerivedValue : public Value { -private: - /// Fake Tag struct for singleton pool allocator. In fact, it is never used! - struct PoolTag { }; protected: DerivedValue() {} @@ -119,6 +116,10 @@ protected: return *this; } +private: + /// Fake Tag struct for singleton pool allocator. In fact, it is never used! + struct PoolTag { }; + }; } /* namespace gtsam */ diff --git a/gtsam/inference/BayesTree.h b/gtsam/inference/BayesTree.h index 34e5c0c45..8b6de3fe6 100644 --- a/gtsam/inference/BayesTree.h +++ b/gtsam/inference/BayesTree.h @@ -94,51 +94,6 @@ namespace gtsam { /** Map from keys to Clique */ typedef std::deque Nodes; - protected: - - /** Map from keys to Clique */ - Nodes nodes_; - - /** private helper method for saving the Tree to a text file in GraphViz format */ - void saveGraph(std::ostream &s, sharedClique clique, const IndexFormatter& indexFormatter, - int parentnum = 0) const; - - /** Gather data on a single clique */ - void getCliqueData(CliqueData& stats, sharedClique clique) const; - - /** Root clique */ - sharedClique root_; - - /** remove a clique: warning, can result in a forest */ - void removeClique(sharedClique clique); - - /** add a clique (top down) */ - sharedClique addClique(const sharedConditional& conditional, const sharedClique& parent_clique = sharedClique()); - - /** add a clique (top down) */ - void addClique(const sharedClique& clique, const sharedClique& parent_clique = sharedClique()); - - /** add a clique (bottom up) */ - sharedClique addClique(const sharedConditional& conditional, std::list& child_cliques); - - /** - * Add a conditional to the front of a clique, i.e. a conditional whose - * parents are already in the clique or its separators. This function does - * not check for this condition, it just updates the data structures. - */ - static void addToCliqueFront(BayesTree& bayesTree, - const sharedConditional& conditional, const sharedClique& clique); - - /** Fill the nodes index for a subtree */ - void fillNodesIndex(const sharedClique& subtree); - - /** Helper function to build a non-symbolic tree (e.g. Gaussian) using a - * symbolic tree, used in the BT(BN) constructor. - */ - void recursiveTreeBuild(const boost::shared_ptr >& symbolic, - const std::vector >& conditionals, - const typename BayesTree::sharedClique& parent); - public: /// @name Standard Constructors @@ -184,8 +139,6 @@ namespace gtsam { /// @name Standard Interface /// @{ - public: - /** * Find parent clique of a conditional. It will look at all parents and * return the one with the lowest index in the ordering. @@ -284,6 +237,51 @@ namespace gtsam { sharedClique insert(const sharedConditional& clique, std::list& children, bool isRootClique = false); + + protected: + + /** Map from keys to Clique */ + Nodes nodes_; + + /** private helper method for saving the Tree to a text file in GraphViz format */ + void saveGraph(std::ostream &s, sharedClique clique, const IndexFormatter& indexFormatter, + int parentnum = 0) const; + + /** Gather data on a single clique */ + void getCliqueData(CliqueData& stats, sharedClique clique) const; + + /** Root clique */ + sharedClique root_; + + /** remove a clique: warning, can result in a forest */ + void removeClique(sharedClique clique); + + /** add a clique (top down) */ + sharedClique addClique(const sharedConditional& conditional, const sharedClique& parent_clique = sharedClique()); + + /** add a clique (top down) */ + void addClique(const sharedClique& clique, const sharedClique& parent_clique = sharedClique()); + + /** add a clique (bottom up) */ + sharedClique addClique(const sharedConditional& conditional, std::list& child_cliques); + + /** + * Add a conditional to the front of a clique, i.e. a conditional whose + * parents are already in the clique or its separators. This function does + * not check for this condition, it just updates the data structures. + */ + static void addToCliqueFront(BayesTree& bayesTree, + const sharedConditional& conditional, const sharedClique& clique); + + /** Fill the nodes index for a subtree */ + void fillNodesIndex(const sharedClique& subtree); + + /** Helper function to build a non-symbolic tree (e.g. Gaussian) using a + * symbolic tree, used in the BT(BN) constructor. + */ + void recursiveTreeBuild(const boost::shared_ptr >& symbolic, + const std::vector >& conditionals, + const typename BayesTree::sharedClique& parent); private: diff --git a/gtsam/inference/EliminationTree.h b/gtsam/inference/EliminationTree.h index 495fb40c8..286f85268 100644 --- a/gtsam/inference/EliminationTree.h +++ b/gtsam/inference/EliminationTree.h @@ -73,39 +73,11 @@ private: Factors factors_; ///< factors associated with root SubTrees subTrees_; ///< sub-trees +public: + /// @name Standard Constructors /// @{ - /** default constructor, private, as you should use Create below */ - EliminationTree(Index key = 0) : key_(key) {} - - /// @} - /// @name Advanced Interface - /// @{ - - /** - * Static internal function to build a vector of parent pointers using the - * algorithm of Gilbert et al., 2001, BIT. - */ - static std::vector ComputeParents(const VariableIndex& structure); - - /** add a factor, for Create use only */ - void add(const sharedFactor& factor) { factors_.push_back(factor); } - - /** add a subtree, for Create use only */ - void add(const shared_ptr& child) { subTrees_.push_back(child); } - - /** - * Recursive routine that eliminates the factors arranged in an elimination tree - * @param Conditionals is a vector of shared pointers that will be modified in place - */ - sharedFactor eliminate_(Eliminate function, Conditionals& conditionals) const; - - /// Allow access to constructor and add methods for testing purposes - friend class ::EliminationTreeTester; - -public: - /** * Named constructor to build the elimination tree of a factor graph using * pre-computed column structure. @@ -151,6 +123,32 @@ public: /// @} +private: + + /** default constructor, private, as you should use Create below */ + EliminationTree(Index key = 0) : key_(key) {} + + /** + * Static internal function to build a vector of parent pointers using the + * algorithm of Gilbert et al., 2001, BIT. + */ + static std::vector ComputeParents(const VariableIndex& structure); + + /** add a factor, for Create use only */ + void add(const sharedFactor& factor) { factors_.push_back(factor); } + + /** add a subtree, for Create use only */ + void add(const shared_ptr& child) { subTrees_.push_back(child); } + + /** + * Recursive routine that eliminates the factors arranged in an elimination tree + * @param Conditionals is a vector of shared pointers that will be modified in place + */ + sharedFactor eliminate_(Eliminate function, Conditionals& conditionals) const; + + /// Allow access to constructor and add methods for testing purposes + friend class ::EliminationTreeTester; + }; diff --git a/gtsam/inference/Factor.h b/gtsam/inference/Factor.h index d81c911a3..950f6e65f 100644 --- a/gtsam/inference/Factor.h +++ b/gtsam/inference/Factor.h @@ -81,15 +81,6 @@ protected: /// The keys involved in this factor std::vector keys_; - friend class JacobianFactor; - friend class HessianFactor; - -protected: - - /// Internal consistency check that is run frequently when in debug mode. - /// If NDEBUG is defined, this is empty and optimized out. - void assertInvariants() const; - public: /// @name Standard Constructors @@ -212,9 +203,15 @@ public: iterator begin() { return keys_.begin(); } ///TODO: comment iterator end() { return keys_.end(); } ///TODO: comment +protected: + friend class JacobianFactor; + friend class HessianFactor; + + /// Internal consistency check that is run frequently when in debug mode. + /// If NDEBUG is defined, this is empty and optimized out. + void assertInvariants() const; private: - /** Serialization function */ friend class boost::serialization::access; template diff --git a/gtsam/linear/JacobianFactor.h b/gtsam/linear/JacobianFactor.h index c5f726252..43200d561 100644 --- a/gtsam/linear/JacobianFactor.h +++ b/gtsam/linear/JacobianFactor.h @@ -78,11 +78,9 @@ namespace gtsam { * \f[ E(x) = \frac{1}{2} (A_1 x_{j1} + A_2 x_{j2} - b)^T \Sigma^{-1} (A_1 x_{j1} + A_2 x_{j2} - b) . \f] */ class JacobianFactor : public GaussianFactor { - public: - typedef Matrix AbMatrix; - typedef VerticalBlockView BlockAb; - protected: + typedef Matrix AbMatrix; + typedef VerticalBlockView BlockAb; noiseModel::Diagonal::shared_ptr model_; // Gaussian noise model with diagonal covariance matrix std::vector firstNonzeroBlocks_; @@ -97,8 +95,6 @@ namespace gtsam { typedef BlockAb::Column BVector; typedef BlockAb::constColumn constBVector; - public: - /** Copy constructor */ JacobianFactor(const JacobianFactor& gf); diff --git a/gtsam/nonlinear/DoglegOptimizer.h b/gtsam/nonlinear/DoglegOptimizer.h index 70d55d192..71551dce1 100644 --- a/gtsam/nonlinear/DoglegOptimizer.h +++ b/gtsam/nonlinear/DoglegOptimizer.h @@ -67,7 +67,6 @@ private: */ class DoglegState : public NonlinearOptimizerState { public: - double Delta; DoglegState() {} @@ -86,8 +85,11 @@ protected: */ class DoglegOptimizer : public NonlinearOptimizer { -public: +protected: + DoglegParams params_; + DoglegState state_; +public: typedef boost::shared_ptr shared_ptr; /// @name Standard interface @@ -143,9 +145,6 @@ public: /// @} protected: - DoglegParams params_; - DoglegState state_; - /** Access the parameters (base class version) */ virtual const NonlinearOptimizerParams& _params() const { return params_; } diff --git a/gtsam/nonlinear/GaussNewtonOptimizer.h b/gtsam/nonlinear/GaussNewtonOptimizer.h index 1a1984ffb..b68ac4fc5 100644 --- a/gtsam/nonlinear/GaussNewtonOptimizer.h +++ b/gtsam/nonlinear/GaussNewtonOptimizer.h @@ -43,8 +43,11 @@ protected: */ class GaussNewtonOptimizer : public NonlinearOptimizer { -public: +protected: + GaussNewtonParams params_; + GaussNewtonState state_; +public: /// @name Standard interface /// @{ @@ -94,10 +97,6 @@ public: /// @} protected: - - GaussNewtonParams params_; - GaussNewtonState state_; - /** Access the parameters (base class version) */ virtual const NonlinearOptimizerParams& _params() const { return params_; } diff --git a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h index 13927d39f..62d05839d 100644 --- a/gtsam/nonlinear/LevenbergMarquardtOptimizer.h +++ b/gtsam/nonlinear/LevenbergMarquardtOptimizer.h @@ -42,12 +42,6 @@ public: DAMPED }; -private: - VerbosityLM verbosityLMTranslator(const std::string &s) const; - std::string verbosityLMTranslator(VerbosityLM value) const; - -public: - double lambdaInitial; ///< The initial Levenberg-Marquardt damping term (default: 1e-5) double lambdaFactor; ///< The amount by which to multiply or divide lambda when adjusting lambda (default: 10.0) double lambdaUpperBound; ///< The maximum lambda to try before assuming the optimization has failed (default: 1e5) @@ -67,6 +61,10 @@ public: inline void setlambdaFactor(double value) { lambdaFactor = value; } inline void setlambdaUpperBound(double value) { lambdaUpperBound = value; } inline void setVerbosityLM(const std::string &s) { verbosityLM = verbosityLMTranslator(s); } + +private: + VerbosityLM verbosityLMTranslator(const std::string &s) const; + std::string verbosityLMTranslator(VerbosityLM value) const; }; /** @@ -74,20 +72,18 @@ public: */ class LevenbergMarquardtState : public NonlinearOptimizerState { -protected: - - LevenbergMarquardtState(const NonlinearFactorGraph& graph, const Values& initialValues, const LevenbergMarquardtParams& params, unsigned int iterations = 0) : - NonlinearOptimizerState(graph, initialValues, iterations), lambda(params.lambdaInitial) {} - - friend class LevenbergMarquardtOptimizer; - public: - double lambda; LevenbergMarquardtState() {} virtual ~LevenbergMarquardtState() {} + +protected: + LevenbergMarquardtState(const NonlinearFactorGraph& graph, const Values& initialValues, const LevenbergMarquardtParams& params, unsigned int iterations = 0) : + NonlinearOptimizerState(graph, initialValues, iterations), lambda(params.lambdaInitial) {} + + friend class LevenbergMarquardtOptimizer; }; /** @@ -96,26 +92,11 @@ public: class LevenbergMarquardtOptimizer : public NonlinearOptimizer { protected: - LevenbergMarquardtParams params_; ///< LM parameters LevenbergMarquardtState state_; ///< optimization state std::vector dimensions_; ///< undocumented - /** Access the parameters (base class version) */ - virtual const NonlinearOptimizerParams& _params() const { return params_; } - - /** Access the state (base class version) */ - virtual const NonlinearOptimizerState& _state() const { return state_; } - - /** Internal function for computing a COLAMD ordering if no ordering is specified */ - LevenbergMarquardtParams ensureHasOrdering(LevenbergMarquardtParams params, const NonlinearFactorGraph& graph, const Values& values) const { - if(!params.ordering) - params.ordering = *graph.orderingCOLAMD(values); - return params; - } - public: - typedef boost::shared_ptr shared_ptr; /// @name Standard interface @@ -176,6 +157,20 @@ public: const LevenbergMarquardtState& state() const { return state_; } /// @} + +protected: + /** Access the parameters (base class version) */ + virtual const NonlinearOptimizerParams& _params() const { return params_; } + + /** Access the state (base class version) */ + virtual const NonlinearOptimizerState& _state() const { return state_; } + + /** Internal function for computing a COLAMD ordering if no ordering is specified */ + LevenbergMarquardtParams ensureHasOrdering(LevenbergMarquardtParams params, const NonlinearFactorGraph& graph, const Values& values) const { + if(!params.ordering) + params.ordering = *graph.orderingCOLAMD(values); + return params; + } }; } diff --git a/gtsam/nonlinear/Marginals.h b/gtsam/nonlinear/Marginals.h index c792167c5..588531830 100644 --- a/gtsam/nonlinear/Marginals.h +++ b/gtsam/nonlinear/Marginals.h @@ -88,11 +88,6 @@ protected: BlockView blockView_; Ordering indices_; - JointMarginal(const Matrix& fullMatrix, const std::vector& dims, const Ordering& indices) : - fullMatrix_(fullMatrix), blockView_(fullMatrix_, dims.begin(), dims.end()), indices_(indices) {} - - friend class Marginals; - public: /** A block view of the joint marginal - this stores a reference to the * JointMarginal object, so the JointMarginal object must be kept in scope @@ -129,6 +124,13 @@ public: /** Print */ void print(const std::string& s = "", const KeyFormatter& formatter = DefaultKeyFormatter) const; + +protected: + JointMarginal(const Matrix& fullMatrix, const std::vector& dims, const Ordering& indices) : + fullMatrix_(fullMatrix), blockView_(fullMatrix_, dims.begin(), dims.end()), indices_(indices) {} + + friend class Marginals; + }; } /* namespace gtsam */ diff --git a/gtsam/nonlinear/NonlinearOptimizer.h b/gtsam/nonlinear/NonlinearOptimizer.h index 03f3a5e34..488d3d832 100644 --- a/gtsam/nonlinear/NonlinearOptimizer.h +++ b/gtsam/nonlinear/NonlinearOptimizer.h @@ -177,23 +177,9 @@ Values::const_shared_ptr result = DoglegOptimizer(graph, initialValues, params). class NonlinearOptimizer { protected: - NonlinearFactorGraph graph_; - /** A default implementation of the optimization loop, which calls iterate() - * until checkConvergence returns true. - */ - void defaultOptimize(); - - virtual const NonlinearOptimizerState& _state() const = 0; - - virtual const NonlinearOptimizerParams& _params() const = 0; - - /** Constructor for initial construction of base classes. */ - NonlinearOptimizer(const NonlinearFactorGraph& graph) : graph_(graph) {} - public: - /** A shared pointer to this class */ typedef boost::shared_ptr shared_ptr; @@ -243,6 +229,20 @@ public: virtual void iterate() = 0; /// @} + +protected: + /** A default implementation of the optimization loop, which calls iterate() + * until checkConvergence returns true. + */ + void defaultOptimize(); + + virtual const NonlinearOptimizerState& _state() const = 0; + + virtual const NonlinearOptimizerParams& _params() const = 0; + + /** Constructor for initial construction of base classes. */ + NonlinearOptimizer(const NonlinearFactorGraph& graph) : graph_(graph) {} + }; /** Check whether the relative error decrease is less than relativeErrorTreshold, diff --git a/gtsam/nonlinear/Values.h b/gtsam/nonlinear/Values.h index a01718da4..c96831f05 100644 --- a/gtsam/nonlinear/Values.h +++ b/gtsam/nonlinear/Values.h @@ -123,8 +123,6 @@ namespace gtsam { typedef KeyValuePair value_type; - public: - /** A filtered view of a Values, returned from Values::filter. */ template class Filtered; @@ -196,7 +194,6 @@ namespace gtsam { /** Get a zero VectorValues of the correct structure */ VectorValues zeroVectors(const Ordering& ordering) const; - public: const_iterator begin() const { return boost::make_transform_iterator(values_.begin(), &make_const_deref_pair); } const_iterator end() const { return boost::make_transform_iterator(values_.end(), &make_const_deref_pair); } iterator begin() { return boost::make_transform_iterator(values_.begin(), &make_deref_pair); }