gtsam  4.0.0
gtsam
NonlinearConjugateGradientOptimizer.h
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
19 #pragma once
20 
21 #include <gtsam/base/Manifold.h>
23 #include <boost/tuple/tuple.hpp>
24 
25 namespace gtsam {
26 
29 
30  /* a class for the nonlinearConjugateGradient template */
31  class System {
32  public:
33  typedef Values State;
34  typedef VectorValues Gradient;
36 
37  protected:
38  const NonlinearFactorGraph &graph_;
39 
40  public:
41  System(const NonlinearFactorGraph &graph) :
42  graph_(graph) {
43  }
44  double error(const State &state) const;
45  Gradient gradient(const State &state) const;
46  State advance(const State &current, const double alpha,
47  const Gradient &g) const;
48  };
49 
50 public:
51 
52  typedef NonlinearOptimizer Base;
54  typedef boost::shared_ptr<NonlinearConjugateGradientOptimizer> shared_ptr;
55 
56 protected:
57  Parameters params_;
58 
59  const NonlinearOptimizerParams& _params() const override {
60  return params_;
61  }
62 
63 public:
64 
67  const Values& initialValues, const Parameters& params = Parameters());
68 
71  }
72 
77  GaussianFactorGraph::shared_ptr iterate() override;
78 
83  const Values& optimize() override;
84 };
85 
87 template<class S, class V, class W>
88 double lineSearch(const S &system, const V currentValues, const W &gradient) {
89 
90  /* normalize it such that it becomes a unit vector */
91  const double g = gradient.norm();
92 
93  // perform the golden section search algorithm to decide the the optimal step size
94  // detail refer to http://en.wikipedia.org/wiki/Golden_section_search
95  const double phi = 0.5 * (1.0 + std::sqrt(5.0)), resphi = 2.0 - phi, tau =
96  1e-5;
97  double minStep = -1.0 / g, maxStep = 0, newStep = minStep
98  + (maxStep - minStep) / (phi + 1.0);
99 
100  V newValues = system.advance(currentValues, newStep, gradient);
101  double newError = system.error(newValues);
102 
103  while (true) {
104  const bool flag = (maxStep - newStep > newStep - minStep) ? true : false;
105  const double testStep =
106  flag ? newStep + resphi * (maxStep - newStep) :
107  newStep - resphi * (newStep - minStep);
108 
109  if ((maxStep - minStep)
110  < tau * (std::fabs(testStep) + std::fabs(newStep))) {
111  return 0.5 * (minStep + maxStep);
112  }
113 
114  const V testValues = system.advance(currentValues, testStep, gradient);
115  const double testError = system.error(testValues);
116 
117  // update the working range
118  if (testError >= newError) {
119  if (flag)
120  maxStep = testStep;
121  else
122  minStep = testStep;
123  } else {
124  if (flag) {
125  minStep = newStep;
126  newStep = testStep;
127  newError = testError;
128  } else {
129  maxStep = newStep;
130  newStep = testStep;
131  newError = testError;
132  }
133  }
134  }
135  return 0.0;
136 }
137 
147 template<class S, class V>
148 boost::tuple<V, int> nonlinearConjugateGradient(const S &system,
149  const V &initial, const NonlinearOptimizerParams &params,
150  const bool singleIteration, const bool gradientDescent = false) {
151 
152  // GTSAM_CONCEPT_MANIFOLD_TYPE(V);
153 
154  size_t iteration = 0;
155 
156  // check if we're already close enough
157  double currentError = system.error(initial);
158  if (currentError <= params.errorTol) {
159  if (params.verbosity >= NonlinearOptimizerParams::ERROR) {
160  std::cout << "Exiting, as error = " << currentError << " < "
161  << params.errorTol << std::endl;
162  }
163  return boost::tie(initial, iteration);
164  }
165 
166  V currentValues = initial;
167  typename S::Gradient currentGradient = system.gradient(currentValues),
168  prevGradient, direction = currentGradient;
169 
170  /* do one step of gradient descent */
171  V prevValues = currentValues;
172  double prevError = currentError;
173  double alpha = lineSearch(system, currentValues, direction);
174  currentValues = system.advance(prevValues, alpha, direction);
175  currentError = system.error(currentValues);
176 
177  // Maybe show output
178  if (params.verbosity >= NonlinearOptimizerParams::ERROR)
179  std::cout << "Initial error: " << currentError << std::endl;
180 
181  // Iterative loop
182  do {
183  if (gradientDescent == true) {
184  direction = system.gradient(currentValues);
185  } else {
186  prevGradient = currentGradient;
187  currentGradient = system.gradient(currentValues);
188  // Polak-Ribiere: beta = g'*(g_n-g_n-1)/g_n-1'*g_n-1
189  const double beta = std::max(0.0,
190  currentGradient.dot(currentGradient - prevGradient)
191  / prevGradient.dot(prevGradient));
192  direction = currentGradient + (beta * direction);
193  }
194 
195  alpha = lineSearch(system, currentValues, direction);
196 
197  prevValues = currentValues;
198  prevError = currentError;
199 
200  currentValues = system.advance(prevValues, alpha, direction);
201  currentError = system.error(currentValues);
202 
203  // Maybe show output
204  if (params.verbosity >= NonlinearOptimizerParams::ERROR)
205  std::cout << "iteration: " << iteration << ", currentError: " << currentError << std::endl;
206  } while (++iteration < params.maxIterations && !singleIteration
208  params.errorTol, prevError, currentError, params.verbosity));
209 
210  // Printing if verbose
211  if (params.verbosity >= NonlinearOptimizerParams::ERROR
212  && iteration >= params.maxIterations)
213  std::cout
214  << "nonlinearConjugateGradient: Terminating because reached maximum iterations"
215  << std::endl;
216 
217  return boost::tie(currentValues, iteration);
218 }
219 
220 } // \ namespace gtsam
221 
An implementation of the nonlinear CG method using the template below.
Definition: NonlinearConjugateGradientOptimizer.h:28
Base class and basic functions for Manifold types.
A non-templated config holding any types of Manifold-group elements.
Definition: Values.h:70
This is the abstract interface for classes that can optimize for the maximum-likelihood estimate of a...
Definition: NonlinearOptimizer.h:75
double absoluteErrorTol
The maximum absolute error decrease to stop iterating (default 1e-5)
Definition: NonlinearOptimizerParams.h:43
Point3 optimize(const NonlinearFactorGraph &graph, const Values &values, Key landmarkKey)
Optimize for triangulation.
Definition: triangulation.cpp:73
boost::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactorGraph.h:74
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, NonlinearOptimizerParams::Verbosity verbosity)
Check whether the relative error decrease is less than relativeErrorTreshold, the absolute error decr...
Definition: NonlinearOptimizer.cpp:169
size_t maxIterations
The maximum iterations to stop iterating (default 100)
Definition: NonlinearOptimizerParams.h:41
Base class and parameters for nonlinear optimization algorithms.
virtual ~NonlinearConjugateGradientOptimizer()
Destructor.
Definition: NonlinearConjugateGradientOptimizer.h:70
This class represents a collection of vector-valued variables associated each with a unique integer i...
Definition: VectorValues.h:73
double lineSearch(const S &system, const V currentValues, const W &gradient)
Implement the golden-section line search algorithm.
Definition: NonlinearConjugateGradientOptimizer.h:88
double errorTol
The maximum total error to stop iterating (default 0.0)
Definition: NonlinearOptimizerParams.h:44
A non-linear factor graph is a graph of non-Gaussian, i.e.
Definition: NonlinearFactorGraph.h:77
Verbosity verbosity
The printing verbosity during optimization (default SILENT)
Definition: NonlinearOptimizerParams.h:45
boost::tuple< V, int > nonlinearConjugateGradient(const S &system, const V &initial, const NonlinearOptimizerParams &params, const bool singleIteration, const bool gradientDescent=false)
Implement the nonlinear conjugate gradient method using the Polak-Ribiere formula suggested in http:/...
Definition: NonlinearConjugateGradientOptimizer.h:148
Global functions in a separate testing namespace.
Definition: chartTesting.h:28
The common parameters for Nonlinear optimizers.
Definition: NonlinearOptimizerParams.h:34
double relativeErrorTol
The maximum relative error decrease to stop iterating (default 1e-5)
Definition: NonlinearOptimizerParams.h:42