23#include <boost/tuple/tuple.hpp>
44 double error(
const State &state)
const;
45 Gradient gradient(
const State &state)
const;
46 State advance(
const State ¤t,
const double alpha,
47 const Gradient &g)
const;
54 typedef boost::shared_ptr<NonlinearConjugateGradientOptimizer> shared_ptr;
87template<
class S,
class V,
class W>
88double lineSearch(
const S &system,
const V currentValues,
const W &gradient) {
91 const double g = gradient.norm();
95 const double phi = 0.5 * (1.0 + std::sqrt(5.0)), resphi = 2.0 - phi, tau =
97 double minStep = -1.0 / g, maxStep = 0, newStep = minStep
98 + (maxStep - minStep) / (phi + 1.0);
100 V newValues = system.advance(currentValues, newStep, gradient);
101 double newError = system.error(newValues);
104 const bool flag = (maxStep - newStep > newStep - minStep) ?
true :
false;
105 const double testStep =
106 flag ? newStep + resphi * (maxStep - newStep) :
107 newStep - resphi * (newStep - minStep);
109 if ((maxStep - minStep)
110 < tau * (std::abs(testStep) + std::abs(newStep))) {
111 return 0.5 * (minStep + maxStep);
114 const V testValues = system.advance(currentValues, testStep, gradient);
115 const double testError = system.error(testValues);
118 if (testError >= newError) {
127 newError = testError;
131 newError = testError;
147template<
class S,
class V>
150 const bool singleIteration,
const bool gradientDescent =
false) {
154 size_t iteration = 0;
157 double currentError = system.error(initial);
158 if (currentError <= params.
errorTol) {
159 if (params.
verbosity >= NonlinearOptimizerParams::ERROR) {
160 std::cout <<
"Exiting, as error = " << currentError <<
" < "
163 return boost::tie(initial, iteration);
166 V currentValues = initial;
167 typename S::Gradient currentGradient = system.gradient(currentValues),
168 prevGradient, direction = currentGradient;
171 V prevValues = currentValues;
172 double prevError = currentError;
173 double alpha =
lineSearch(system, currentValues, direction);
174 currentValues = system.advance(prevValues, alpha, direction);
175 currentError = system.error(currentValues);
178 if (params.
verbosity >= NonlinearOptimizerParams::ERROR)
179 std::cout <<
"Initial error: " << currentError << std::endl;
183 if (gradientDescent ==
true) {
184 direction = system.gradient(currentValues);
186 prevGradient = currentGradient;
187 currentGradient = system.gradient(currentValues);
189 const double beta = std::max(0.0,
190 currentGradient.dot(currentGradient - prevGradient)
191 / prevGradient.dot(prevGradient));
192 direction = currentGradient + (beta * direction);
195 alpha =
lineSearch(system, currentValues, direction);
197 prevValues = currentValues;
198 prevError = currentError;
200 currentValues = system.advance(prevValues, alpha, direction);
201 currentError = system.error(currentValues);
208 if (params.
verbosity >= NonlinearOptimizerParams::ERROR)
209 std::cout <<
"iteration: " << iteration <<
", currentError: " << currentError << std::endl;
210 }
while (++iteration < params.
maxIterations && !singleIteration
215 if (params.
verbosity >= NonlinearOptimizerParams::ERROR
218 <<
"nonlinearConjugateGradient: Terminating because reached maximum iterations"
221 return boost::tie(currentValues, iteration);
Base class and basic functions for Manifold types.
Base class and parameters for nonlinear optimization algorithms.
Global functions in a separate testing namespace.
Definition: chartTesting.h:28
double lineSearch(const S &system, const V currentValues, const W &gradient)
Implement the golden-section line search algorithm.
Definition: NonlinearConjugateGradientOptimizer.h:88
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, NonlinearOptimizerParams::Verbosity verbosity)
Check whether the relative error decrease is less than relativeErrorTreshold, the absolute error decr...
Definition: NonlinearOptimizer.cpp:185
Point3 optimize(const NonlinearFactorGraph &graph, const Values &values, Key landmarkKey)
Optimize for triangulation.
Definition: triangulation.cpp:73
boost::tuple< V, int > nonlinearConjugateGradient(const S &system, const V &initial, const NonlinearOptimizerParams ¶ms, const bool singleIteration, const bool gradientDescent=false)
Implement the nonlinear conjugate gradient method using the Polak-Ribiere formula suggested in http:/...
Definition: NonlinearConjugateGradientOptimizer.h:148
boost::shared_ptr< This > shared_ptr
shared_ptr to this class
Definition: GaussianFactorGraph.h:75
This class represents a collection of vector-valued variables associated each with a unique integer i...
Definition: VectorValues.h:74
An implementation of the nonlinear CG method using the template below.
Definition: NonlinearConjugateGradientOptimizer.h:28
~NonlinearConjugateGradientOptimizer() override
Destructor.
Definition: NonlinearConjugateGradientOptimizer.h:70
A non-linear factor graph is a graph of non-Gaussian, i.e.
Definition: NonlinearFactorGraph.h:78
This is the abstract interface for classes that can optimize for the maximum-likelihood estimate of a...
Definition: NonlinearOptimizer.h:75
The common parameters for Nonlinear optimizers.
Definition: NonlinearOptimizerParams.h:34
double absoluteErrorTol
The maximum absolute error decrease to stop iterating (default 1e-5)
Definition: NonlinearOptimizerParams.h:43
IterationHook iterationHook
Optional user-provided iteration hook to be called after each optimization iteration (Default: none).
Definition: NonlinearOptimizerParams.h:94
size_t maxIterations
The maximum iterations to stop iterating (default 100)
Definition: NonlinearOptimizerParams.h:41
Verbosity verbosity
The printing verbosity during optimization (default SILENT)
Definition: NonlinearOptimizerParams.h:45
double relativeErrorTol
The maximum relative error decrease to stop iterating (default 1e-5)
Definition: NonlinearOptimizerParams.h:42
double errorTol
The maximum total error to stop iterating (default 0.0)
Definition: NonlinearOptimizerParams.h:44
A non-templated config holding any types of Manifold-group elements.
Definition: Values.h:63