File: itkGradientDescentOptimizer.h

package info (click to toggle)
insighttoolkit5 5.4.3-5
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 704,384 kB
  • sloc: cpp: 783,592; ansic: 628,724; xml: 44,704; fortran: 34,250; python: 22,874; sh: 4,078; pascal: 2,636; lisp: 2,158; makefile: 464; yacc: 328; asm: 205; perl: 203; lex: 146; tcl: 132; javascript: 98; csh: 81
file content (199 lines) | stat: -rw-r--r-- 6,079 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
/*=========================================================================
 *
 *  Copyright NumFOCUS
 *
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *
 *         https://www.apache.org/licenses/LICENSE-2.0.txt
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 *
 *=========================================================================*/
#ifndef itkGradientDescentOptimizer_h
#define itkGradientDescentOptimizer_h

#include "itkIntTypes.h"
#include "itkSingleValuedNonLinearOptimizer.h"
#include "ITKOptimizersExport.h"
#include <string>
namespace itk
{
/** \class GradientDescentOptimizerEnums
 * \brief Contains all enum classes in the GradientDescentOptimizer class.
 * \ingroup ITKOptimizers
 */
class GradientDescentOptimizerEnums
{
public:
  /** \class StopConditionGradientDescentOptimizer
   * \ingroup ITKOptimizers
   * Codes of stopping conditions */
  enum class StopConditionGradientDescentOptimizer : uint8_t
  {
    MaximumNumberOfIterations,
    MetricError
  };
};
// Define how to print enumeration
extern ITKOptimizers_EXPORT std::ostream &
                            operator<<(std::ostream & out, const GradientDescentOptimizerEnums::StopConditionGradientDescentOptimizer value);

/** \class GradientDescentOptimizer
 * \brief Implement a gradient descent optimizer
 *
 * GradientDescentOptimizer implements a simple gradient descent optimizer.
 * At each iteration the current position is updated according to
 *
 * \f[
 *        p_{n+1} = p_n
 *                + \mbox{learningRate}
                  \, \frac{\partial f(p_n) }{\partial p_n}
 * \f]
 *
 * The learning rate is a fixed scalar defined via SetLearningRate().
 * The optimizer steps through a user defined number of iterations;
 * no convergence checking is done.
 *
 * Additionally, user can scale each component,
 * \f$ \partial f / \partial p \f$,
 * by setting a scaling vector using method SetScale().
 *
 * \sa RegularStepGradientDescentOptimizer
 *
 * \ingroup Numerics Optimizers
 * \ingroup ITKOptimizers
 */
class ITKOptimizers_EXPORT GradientDescentOptimizer : public SingleValuedNonLinearOptimizer
{
public:
  ITK_DISALLOW_COPY_AND_MOVE(GradientDescentOptimizer);

  /** Standard class type aliases. */
  using Self = GradientDescentOptimizer;
  using Superclass = SingleValuedNonLinearOptimizer;
  using Pointer = SmartPointer<Self>;
  using ConstPointer = SmartPointer<const Self>;

  /** Method for creation through the object factory. */
  itkNewMacro(Self);

  /** \see LightObject::GetNameOfClass() */
  itkOverrideGetNameOfClassMacro(GradientDescentOptimizer);

  using StopConditionGradientDescentOptimizerEnum =
    GradientDescentOptimizerEnums::StopConditionGradientDescentOptimizer;
#if !defined(ITK_LEGACY_REMOVE)
  // We need to expose the enum values at the class level
  // for backwards compatibility
  static constexpr StopConditionGradientDescentOptimizerEnum MaximumNumberOfIterations =
    StopConditionGradientDescentOptimizerEnum::MaximumNumberOfIterations;
  static constexpr StopConditionGradientDescentOptimizerEnum MetricError =
    StopConditionGradientDescentOptimizerEnum::MetricError;
#endif

  /** Methods to configure the cost function. */
  itkGetConstReferenceMacro(Maximize, bool);
  itkSetMacro(Maximize, bool);
  itkBooleanMacro(Maximize);
  bool
  GetMinimize() const
  {
    return !m_Maximize;
  }
  void
  SetMinimize(bool v)
  {
    this->SetMaximize(!v);
  }
  void
  MinimizeOn()
  {
    this->MaximizeOff();
  }
  void
  MinimizeOff()
  {
    this->MaximizeOn();
  }

  /** Advance one step following the gradient direction. */
  virtual void
  AdvanceOneStep();

  /** Start optimization. */
  void
  StartOptimization() override;

  /** Resume previously stopped optimization with current parameters
   * \sa StopOptimization. */
  void
  ResumeOptimization();

  /** Stop optimization.
   * \sa ResumeOptimization */
  void
  StopOptimization();

  /** Set the learning rate. */
  itkSetMacro(LearningRate, double);

  /** Get the learning rate. */
  itkGetConstReferenceMacro(LearningRate, double);

  /** Set the number of iterations. */
  itkSetMacro(NumberOfIterations, SizeValueType);

  /** Get the number of iterations. */
  itkGetConstReferenceMacro(NumberOfIterations, SizeValueType);

  /** Get the current iteration number. */
  itkGetConstMacro(CurrentIteration, SizeValueType);

  /** Get the current value. */
  itkGetConstReferenceMacro(Value, double);

  /** Get Stop condition. */
  itkGetConstReferenceMacro(StopCondition, StopConditionGradientDescentOptimizerEnum);
  const std::string
  GetStopConditionDescription() const override;

  /** Get Gradient condition. */
  itkGetConstReferenceMacro(Gradient, DerivativeType);

protected:
  GradientDescentOptimizer();
  ~GradientDescentOptimizer() override = default;
  void
  PrintSelf(std::ostream & os, Indent indent) const override;

  // made protected so subclass can access
  DerivativeType m_Gradient{};

  bool m_Maximize{ false };

  double m_LearningRate{ 1.0 };

private:
  bool                                      m_Stop{ false };
  double                                    m_Value{ 0.0 };
  StopConditionGradientDescentOptimizerEnum m_StopCondition{
    StopConditionGradientDescentOptimizerEnum::MaximumNumberOfIterations
  };
  SizeValueType      m_NumberOfIterations{ 100 };
  SizeValueType      m_CurrentIteration{ 0 };
  std::ostringstream m_StopConditionDescription{};
};

// Define how to print enumeration
extern ITKOptimizers_EXPORT std::ostream &
                            operator<<(std::ostream & out, const GradientDescentOptimizer::StopConditionGradientDescentOptimizerEnum value);

} // end namespace itk

#endif