File: itkRegularStepGradientDescentOptimizerv4.h

package info (click to toggle)
insighttoolkit5 5.4.3-5
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 704,384 kB
  • sloc: cpp: 783,592; ansic: 628,724; xml: 44,704; fortran: 34,250; python: 22,874; sh: 4,078; pascal: 2,636; lisp: 2,158; makefile: 464; yacc: 328; asm: 205; perl: 203; lex: 146; tcl: 132; javascript: 98; csh: 81
file content (156 lines) | stat: -rw-r--r-- 5,925 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
/*=========================================================================
 *
 *  Copyright NumFOCUS
 *
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *
 *         https://www.apache.org/licenses/LICENSE-2.0.txt
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 *
 *=========================================================================*/
#ifndef itkRegularStepGradientDescentOptimizerv4_h
#define itkRegularStepGradientDescentOptimizerv4_h

#include "itkGradientDescentOptimizerv4.h"
#include "itkCompensatedSummation.h"

namespace itk
{
/**
 * \class RegularStepGradientDescentOptimizerv4
 *  \brief Regular Step Gradient descent optimizer.
 *
 *   This optimizer is a variant of gradient descent that attempts to prevent it
 *   from taking steps that are too large. At each iteration, this optimizer
 *   will take a step along the direction of the metric derivative. Each time the
 *   direction of the derivative abruptly changes, the optimizer assumes that a
 *   local extrema has been passed and reacts by reducing the step length by a
 *   relaxation factor that is set to 0.5 by default.
 *   The default value for the initial step length is 1, and this value can only
 *   be changed manually via SetLearningRate() since this optimizer does not use
 *   the ScaleEstimator to automatically estimate the learning rate.
 *   Also note that unlike the previous version of RegularStepGradientDescentOptimizer,
 *   ITKv4 does not have a "maximize/minimize" option to modify the effect of
 *   the metric derivative. The assigned metric is assumed to return a parameter
 *   derivative result that "improves" the optimization.
 *
 * \ingroup ITKOptimizersv4
 */
template <typename TInternalComputationValueType = double>
class ITK_TEMPLATE_EXPORT RegularStepGradientDescentOptimizerv4
  : public GradientDescentOptimizerv4Template<TInternalComputationValueType>
{
public:
  ITK_DISALLOW_COPY_AND_MOVE(RegularStepGradientDescentOptimizerv4);

  /** Standard class type aliases. */
  using Self = RegularStepGradientDescentOptimizerv4;
  using Superclass = GradientDescentOptimizerv4Template<TInternalComputationValueType>;
  using Pointer = SmartPointer<Self>;
  using ConstPointer = SmartPointer<const Self>;

  /** \see LightObject::GetNameOfClass() */
  itkOverrideGetNameOfClassMacro(RegularStepGradientDescentOptimizerv4);

  /** New macro for creation of through a Smart Pointer. */
  itkNewMacro(Self);


  /** It should be possible to derive the internal computation type from the class object. */
  using InternalComputationValueType = TInternalComputationValueType;

  /** Derivative type. */
  using typename Superclass::DerivativeType;

  /** Metric type over which this class is templated. */
  using typename Superclass::MeasureType;
  using typename Superclass::IndexRangeType;
  using typename Superclass::ScalesType;
  using typename Superclass::ParametersType;

  /** Compensated summation type. */
  using CompensatedSummationType = CompensatedSummation<InternalComputationValueType>;

  /** Minimum step length (learning rate) value for convergence checking.
   *  When the local minima is passed by taking a large step, the step
   *  length is adjusted (decreased) by the relaxation factor, so that smaller
   *  steps are taken towards the minimum point (convergence).
   *  When the step length value reaches a small value, it would be treated
   *  as converged.
   *
   *  The default value is set to 1e-4 to pass all tests.
   */
  itkSetMacro(MinimumStepLength, TInternalComputationValueType);
  itkGetConstReferenceMacro(MinimumStepLength, TInternalComputationValueType);

  /** Set/Get relaxation factor value. */
  itkSetMacro(RelaxationFactor, TInternalComputationValueType);
  itkGetConstReferenceMacro(RelaxationFactor, TInternalComputationValueType);

  /** Set/Get gradient magnitude tolerance value for convergence checking. */
  itkSetMacro(GradientMagnitudeTolerance, TInternalComputationValueType);
  itkGetConstReferenceMacro(GradientMagnitudeTolerance, TInternalComputationValueType);

  /** Set/Get current scale for learning rate. */
  itkSetMacro(CurrentLearningRateRelaxation, MeasureType);
  itkGetConstReferenceMacro(CurrentLearningRateRelaxation, MeasureType);

  /** Start and run the optimization. */
  void
  StartOptimization(bool doOnlyInitialization = false) override;

  /** Estimate the learning rate based on the current gradient. */
  void
  EstimateLearningRate() override;

  /** Get current gradient step value. */
  double
  GetCurrentStepLength() const;

protected:
  /** Advance one Step following the gradient direction.
   * Includes transform update. */
  void
  AdvanceOneStep() override;

  /** Modify the input gradient over a given index range. */
  void
  ModifyGradientByScalesOverSubRange(const IndexRangeType & subrange) override;
  void
  ModifyGradientByLearningRateOverSubRange(const IndexRangeType & subrange) override;


  /** Default constructor. */
  RegularStepGradientDescentOptimizerv4();

  /** Destructor. */
  ~RegularStepGradientDescentOptimizerv4() override = default;

  void
  PrintSelf(std::ostream & os, Indent indent) const override;


private:
  TInternalComputationValueType m_RelaxationFactor{};

  TInternalComputationValueType m_MinimumStepLength{};

  TInternalComputationValueType m_GradientMagnitudeTolerance{};

  MeasureType m_CurrentLearningRateRelaxation{};
};

} // end namespace itk

#ifndef ITK_MANUAL_INSTANTIATION
#  include "itkRegularStepGradientDescentOptimizerv4.hxx"
#endif

#endif