File: itkGradientDescentOptimizerBasev4Test.cxx

package info (click to toggle)
insighttoolkit5 5.4.3-5
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 704,384 kB
  • sloc: cpp: 783,592; ansic: 628,724; xml: 44,704; fortran: 34,250; python: 22,874; sh: 4,078; pascal: 2,636; lisp: 2,158; makefile: 464; yacc: 328; asm: 205; perl: 203; lex: 146; tcl: 132; javascript: 98; csh: 81
file content (193 lines) | stat: -rw-r--r-- 5,526 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
/*=========================================================================
 *
 *  Copyright NumFOCUS
 *
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *
 *         https://www.apache.org/licenses/LICENSE-2.0.txt
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 *
 *=========================================================================*/
#include "itkGradientDescentOptimizerBasev4.h"
#include "itkImage.h"
#include "itkTestingMacros.h"

/* Create a simple metric to use for testing here. */
template <typename TFixedObject, typename TMovingObject>
class GradientDescentOptimizerBasev4TestMetric : public itk::ObjectToObjectMetricBase
{
public:
  ITK_DISALLOW_COPY_AND_MOVE(GradientDescentOptimizerBasev4TestMetric);

  /** Standard class type aliases. */
  using Self = GradientDescentOptimizerBasev4TestMetric;
  using Superclass = itk::ObjectToObjectMetricBase;
  using Pointer = itk::SmartPointer<Self>;
  using ConstPointer = itk::SmartPointer<const Self>;

  using typename Superclass::MeasureType;
  using typename Superclass::DerivativeType;
  using typename Superclass::ParametersType;
  using typename Superclass::ParametersValueType;

  itkOverrideGetNameOfClassMacro(GradientDescentOptimizerBasev4TestMetric);

  itkNewMacro(Self);

  // Pure virtual functions that all Metrics must provide
  unsigned int
  GetNumberOfParameters() const override
  {
    return 5;
  }

  MeasureType
  GetValue() const override
  {
    return itk::NumericTraits<MeasureType>::OneValue();
  }

  void
  GetDerivative(DerivativeType & derivative) const override
  {
    derivative.Fill(ParametersValueType{});
  }

  void
  GetValueAndDerivative(MeasureType & value, DerivativeType & derivative) const override
  {
    value = itk::NumericTraits<MeasureType>::OneValue();
    derivative.Fill(ParametersValueType{});
  }

  unsigned int
  GetNumberOfLocalParameters() const override
  {
    return 3;
  }

  void
  UpdateTransformParameters(const DerivativeType &, ParametersValueType) override
  {}

  const ParametersType &
  GetParameters() const override
  {
    return m_Parameters;
  }

  void
  SetParameters(ParametersType &) override
  {}

  bool
  HasLocalSupport() const override
  {
    return false;
  }

  void
  Initialize() override
  {}

  void
  PrintSelf(std::ostream & os, itk::Indent indent) const override
  {
    Superclass::PrintSelf(os, indent);
  }

protected:
  ~GradientDescentOptimizerBasev4TestMetric() override = default;

private:
  GradientDescentOptimizerBasev4TestMetric() = default;
  ParametersType m_Parameters;
};

/* Define a simple derived optimizer class.
 * \class GradientDescentOptimizerBasev4TestOptimizer */
class GradientDescentOptimizerBasev4TestOptimizer : public itk::GradientDescentOptimizerBasev4
{
public:
  ITK_DISALLOW_COPY_AND_MOVE(GradientDescentOptimizerBasev4TestOptimizer);

  /** Standard "Self" type alias. */
  using Self = GradientDescentOptimizerBasev4TestOptimizer;
  using Superclass = itk::GradientDescentOptimizerBasev4;
  using Pointer = itk::SmartPointer<Self>;
  using ConstPointer = itk::SmartPointer<const Self>;

  /** Method for creation through the object factory. */
  itkNewMacro(Self);

  /** \see LightObject::GetNameOfClass() */
  itkOverrideGetNameOfClassMacro(GradientDescentOptimizerBasev4TestOptimizer);

  /* Provide an override for the pure virtual StartOptimization */
  void
  StartOptimization(bool doOnlyInitialization = false) override
  {
    Superclass::StartOptimization(doOnlyInitialization);
    std::cout << "StartOptimization called. doOnlyInitialization: " << doOnlyInitialization << std::endl;
  }

  void
  ResumeOptimization() override
  {
    std::cout << "ResumeOptimization called." << std::endl;
  }

  void
  ModifyGradientByScalesOverSubRange(const IndexRangeType & index) override
  {
    std::cout << "ModifyGradientByScalesOverSubRange called with index:" << index << std::endl;
  }

  void
  ModifyGradientByLearningRateOverSubRange(const IndexRangeType & index) override
  {
    std::cout << "ModifyGradientByLearningRateOverSubRange called with index:" << index << std::endl;
  }

protected:
  GradientDescentOptimizerBasev4TestOptimizer() = default;
  ~GradientDescentOptimizerBasev4TestOptimizer() override = default;
};


int
itkGradientDescentOptimizerBasev4Test(int, char *[])
{
  constexpr int ImageDimension = 2;
  using ImageType = itk::Image<double, ImageDimension>;

  using MetricType = GradientDescentOptimizerBasev4TestMetric<ImageType, ImageType>;

  auto metric = MetricType::New();
  auto optimizer = GradientDescentOptimizerBasev4TestOptimizer::New();

  bool doEstimateScales = true;
  ITK_TEST_SET_GET_BOOLEAN(optimizer, DoEstimateScales, doEstimateScales);

  optimizer->SetMetric(metric);
  ITK_TEST_SET_GET_VALUE(metric, optimizer->GetMetric());

  ITK_TEST_SET_GET_VALUE(0.0, optimizer->GetCurrentMetricValue());

  optimizer->SetNumberOfWorkUnits(2);

  ITK_TRY_EXPECT_NO_EXCEPTION(optimizer->StartOptimization());

  std::cout << "Printing self.." << std::endl;
  std::cout << optimizer << std::endl;

  std::cout << "Test passed." << std::endl;
  return EXIT_SUCCESS;
}