File: itkGenericConjugateGradientOptimizer.h

package info (click to toggle)
elastix 5.2.0-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 42,480 kB
  • sloc: cpp: 68,403; lisp: 4,118; python: 1,013; xml: 182; sh: 177; makefile: 33
file content (267 lines) | stat: -rw-r--r-- 9,856 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
/*=========================================================================
 *
 *  Copyright UMC Utrecht and contributors
 *
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *
 *        http://www.apache.org/licenses/LICENSE-2.0.txt
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 *
 *=========================================================================*/

#ifndef itkGenericConjugateGradientOptimizer_h
#define itkGenericConjugateGradientOptimizer_h

#include "itkScaledSingleValuedNonLinearOptimizer.h"
#include "itkLineSearchOptimizer.h"
#include <vector>
#include <map>

namespace itk
{
/**
 * \class GenericConjugateGradientOptimizer
 * \brief A set of conjugate gradient algorithms.
 *
 * The steplength is determined at each iteration by means of a
 * line search routine. The itk::MoreThuenteLineSearchOptimizer works well.
 *
 *
 * \ingroup Numerics Optimizers
 */

class GenericConjugateGradientOptimizer : public ScaledSingleValuedNonLinearOptimizer
{
public:
  ITK_DISALLOW_COPY_AND_MOVE(GenericConjugateGradientOptimizer);

  using Self = GenericConjugateGradientOptimizer;
  using Superclass = ScaledSingleValuedNonLinearOptimizer;
  using Pointer = SmartPointer<Self>;
  using ConstPointer = SmartPointer<const Self>;

  itkNewMacro(Self);
  itkTypeMacro(GenericConjugateGradientOptimizer, ScaledSingleValuedNonLinearOptimizer);

  using Superclass::ParametersType;
  using Superclass::DerivativeType;
  using Superclass::CostFunctionType;
  using Superclass::ScaledCostFunctionType;
  using Superclass::MeasureType;
  using Superclass::ScalesType;

  using LineSearchOptimizerType = LineSearchOptimizer;
  using LineSearchOptimizerPointer = LineSearchOptimizerType::Pointer;

  /** Typedef for a function that computes \f$\beta\f$, given the previousGradient,
   * the current gradient, and the previous search direction */
  using ComputeBetaFunctionType = double (Self::*)(const DerivativeType &,
                                                   const DerivativeType &,
                                                   const ParametersType &);
  using BetaDefinitionType = std::string;
  using BetaDefinitionMapType = std::map<BetaDefinitionType, ComputeBetaFunctionType>;

  enum class StopConditionType : unsigned int
  {
    MetricError,
    LineSearchError,
    MaximumNumberOfIterations,
    GradientMagnitudeTolerance,
    ValueTolerance,
    InfiniteBeta,
    Unknown
  };

  void
  StartOptimization() override;

  virtual void
  ResumeOptimization();

  virtual void
  StopOptimization();

  /** Get information about optimization process: */
  itkGetConstMacro(CurrentIteration, unsigned long);
  itkGetConstMacro(CurrentValue, MeasureType);
  itkGetConstReferenceMacro(CurrentGradient, DerivativeType);
  itkGetConstMacro(InLineSearch, bool);
  itkGetConstReferenceMacro(StopCondition, StopConditionType);
  itkGetConstMacro(CurrentStepLength, double);

  /** Setting: the line search optimizer */
  itkSetObjectMacro(LineSearchOptimizer, LineSearchOptimizerType);
  itkGetModifiableObjectMacro(LineSearchOptimizer, LineSearchOptimizerType);

  /** Setting: the maximum number of iterations */
  itkGetConstMacro(MaximumNumberOfIterations, unsigned long);
  itkSetClampMacro(MaximumNumberOfIterations, unsigned long, 1, NumericTraits<unsigned long>::max());

  /** Setting: the mininum gradient magnitude. By default 1e-5.
   *
   * The optimizer stops when:
   * \f$ \|CurrentGradient\| <
   *   GradientMagnitudeTolerance * \max(1, \|CurrentPosition\| ) \f$
   */
  itkGetConstMacro(GradientMagnitudeTolerance, double);
  itkSetMacro(GradientMagnitudeTolerance, double)

    /** Setting: a stopping criterion, the value tolerance. By default 1e-5.
     *
     * The optimizer stops when
     * \f[ 2.0 * | f_k - f_{k-1} | \le
     *   ValueTolerance * ( |f_k| + |f_{k-1}| + 1e-20 ) \f]
     * is satisfied MaxNrOfItWithoutImprovement times in a row.
     */
    itkGetConstMacro(ValueTolerance, double);
  itkSetMacro(ValueTolerance, double);

  /** Setting: the maximum number of iterations in a row that
   * satisfy the value tolerance criterion. By default (if never set)
   * equal to the number of parameters. */
  virtual void
  SetMaxNrOfItWithoutImprovement(unsigned long arg);

  itkGetConstMacro(MaxNrOfItWithoutImprovement, unsigned long);

  /** Setting: the definition of \f$\beta\f$, by default "DaiYuanHestenesStiefel" */
  void
  SetBetaDefinition(const BetaDefinitionType & arg);

  itkGetConstReferenceMacro(BetaDefinition, BetaDefinitionType);

protected:
  GenericConjugateGradientOptimizer();
  ~GenericConjugateGradientOptimizer() override = default;

  void
  PrintSelf(std::ostream & os, Indent indent) const override;

  DerivativeType    m_CurrentGradient{};
  MeasureType       m_CurrentValue{ 0.0 };
  unsigned long     m_CurrentIteration{ 0 };
  StopConditionType m_StopCondition{ StopConditionType::Unknown };
  bool              m_Stop{ false };
  double            m_CurrentStepLength{ 0.0 };

  /** Flag that is true as long as the method
   * SetMaxNrOfItWithoutImprovement is never called */
  bool m_UseDefaultMaxNrOfItWithoutImprovement{ true };

  /** Is true when the LineSearchOptimizer has been started. */
  bool m_InLineSearch{ false };
  itkSetMacro(InLineSearch, bool);

  /** Flag that says if the previous gradient and search direction are known.
   * Typically 'true' at the start of optimization, or when a stopped optimisation
   * is resumed (in the latter case the previous gradient and search direction
   * may of course still be valid, but to be safe it is assumed that they are not). */
  bool m_PreviousGradientAndSearchDirValid{ false };

  /** The name of the BetaDefinition */
  BetaDefinitionType m_BetaDefinition{};

  /** A mapping that links the names of the BetaDefinitions to functions that
   * compute \f$\beta\f$. */
  BetaDefinitionMapType m_BetaDefinitionMap{};

  /** Function to add a new beta definition. The first argument should be a name
   * via which a user can select this \f$\beta\f$ definition. The second argument is a
   * pointer to a method that computes \f$\beta\f$.
   * Called in the constructor of this class, and possibly by subclasses.
   */
  void
  AddBetaDefinition(const BetaDefinitionType & name, ComputeBetaFunctionType function);

  /**
   * Compute the search direction:
   *    \f[ d_{k} = - g_{k} + \beta_{k} d_{k-1} \f]
   *
   * In the first iteration the search direction is computed as:
   *    \f[ d_{0} = - g_{0} \f]
   *
   * On calling, searchDir should equal \f$d_{k-1}\f$. On return searchDir
   * equals \f$d_{k}\f$.
   */
  virtual void
  ComputeSearchDirection(const DerivativeType & previousGradient,
                         const DerivativeType & gradient,
                         ParametersType &       searchDir);

  /** Perform a line search along the search direction. On calling, \f$x, f\f$, and \f$g\f$ should
   * contain the current position, the cost function value at this position, and
   * the derivative. On return the step, \f$x\f$ (new position), \f$f\f$ (value at \f$x\f$), and \f$g\f$
   * (derivative at \f$x\f$) are updated. */
  virtual void
  LineSearch(const ParametersType searchDir, double & step, ParametersType & x, MeasureType & f, DerivativeType & g);

  /** Check if convergence has occurred;
   * The firstLineSearchDone bool allows the implementation of TestConvergence to
   * decide to skip a few convergence checks when no line search has performed yet
   * (so, before the actual optimisation begins)  */
  virtual bool
  TestConvergence(bool firstLineSearchDone);

  /** Compute \f$\beta\f$ according to the user set \f$\beta\f$-definition */
  virtual double
  ComputeBeta(const DerivativeType & previousGradient,
              const DerivativeType & gradient,
              const ParametersType & previousSearchDir);

  /** Different definitions of \f$\beta\f$ */

  /** "SteepestDescent: beta=0 */
  double
  ComputeBetaSD(const DerivativeType & previousGradient,
                const DerivativeType & gradient,
                const ParametersType & previousSearchDir);

  /** "FletcherReeves" */
  double
  ComputeBetaFR(const DerivativeType & previousGradient,
                const DerivativeType & gradient,
                const ParametersType & previousSearchDir);

  /** "PolakRibiere" */
  double
  ComputeBetaPR(const DerivativeType & previousGradient,
                const DerivativeType & gradient,
                const ParametersType & previousSearchDir);

  /** "DaiYuan" */
  double
  ComputeBetaDY(const DerivativeType & previousGradient,
                const DerivativeType & gradient,
                const ParametersType & previousSearchDir);

  /** "HestenesStiefel" */
  double
  ComputeBetaHS(const DerivativeType & previousGradient,
                const DerivativeType & gradient,
                const ParametersType & previousSearchDir);

  /** "DaiYuanHestenesStiefel" */
  double
  ComputeBetaDYHS(const DerivativeType & previousGradient,
                  const DerivativeType & gradient,
                  const ParametersType & previousSearchDir);

private:
  unsigned long m_MaximumNumberOfIterations{ 100 };
  double        m_ValueTolerance{ 1e-5 };
  double        m_GradientMagnitudeTolerance{ 1e-5 };
  unsigned long m_MaxNrOfItWithoutImprovement{ 10 };

  LineSearchOptimizerPointer m_LineSearchOptimizer{ nullptr };
};

} // end namespace itk

#endif //#ifndef itkGenericConjugateGradientOptimizer_h