File: ImageMutualInformation1.cxx

package info (click to toggle)
insighttoolkit4 4.13.3withdata-dfsg1-4
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 489,260 kB
  • sloc: cpp: 557,342; ansic: 146,850; fortran: 34,788; python: 16,572; sh: 2,187; lisp: 2,070; tcl: 993; java: 362; perl: 200; makefile: 129; csh: 81; pascal: 69; xml: 19; ruby: 10
file content (465 lines) | stat: -rw-r--r-- 15,192 bytes parent folder | download | duplicates (5)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
/*=========================================================================
 *
 *  Copyright Insight Software Consortium
 *
 *  Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *
 *         http://www.apache.org/licenses/LICENSE-2.0.txt
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 *
 *=========================================================================*/

// Software Guide : BeginLatex
//
// This example illustrates how to compute the Mutual Information between two
// images using classes from the Statistics framework. Note that you could also
// use for this purpose the ImageMetrics designed for the image registration
// framework.
//
// For example, you could use:
//
// \begin{itemize}
// \item \doxygen{MutualInformationImageToImageMetric}
// \item \doxygen{MattesMutualInformationImageToImageMetric}
// \item \doxygen{MutualInformationHistogramImageToImageMetric}
// \item \doxygen{MutualInformationImageToImageMetric}
// \item \doxygen{NormalizedMutualInformationHistogramImageToImageMetric}
// \item \doxygen{KullbackLeiblerCompareHistogramImageToImageMetric}
// \end{itemize}
//
// Mutual Information as computed in this example, and as commonly used in the
// context of image registration provides a measure of how much uncertainty on
// the value of a pixel in one image is reduced by measuring the homologous
// pixel in the other image. Note that Mutual Information as used here does not
// measure the amount of information that one image provides on the other
// image; this would require us to take into account the spatial
// structures in the images as well as the semantics of the image context in
// terms of an observer.
//
// This implies that there is still an enormous unexploited potential on the
// use of the Mutual Information concept in the domain of medical images,
// among the most interesting of which is the semantic description of
// image in terms of anatomical structures.
//
// \index{Mutual Information!Statistics}
// \index{Statistics!Mutual Information}
// \index{Joint Entropy!Statistics}
// \index{Statistics!Joint Entropy}
// \index{Joint Histogram!Statistics}
// \index{Statistics!Joint Histogram}
//
// Software Guide : EndLatex


// Software Guide : BeginLatex
//
// In this particular example we make use of classes from the Statistics
// framework in order to compute the measure of Mutual Information between two
// images. We assume that both images have the same number of pixels along
// every dimension and that they have the same origin and spacing. Therefore
// the pixels from one image are perfectly aligned with those of the other
// image.
//
// We must start by including the header files of the image, histogram
// filter, reader and Join image filter. We will read both images and use
// the Join image filter in order to compose an image of two components using
// the information of each one of the input images in one component. This is
// the natural way of using the Statistics framework in ITK given that the
// fundamental statistical classes are expecting to receive multi-valued
// measures.
//
// Software Guide : EndLatex

// Software Guide : BeginCodeSnippet
#include "itkImage.h"
#include "itkImageFileReader.h"
#include "itkJoinImageFilter.h"
#include "itkImageToHistogramFilter.h"
// Software Guide : EndCodeSnippet


int main( int argc, char * argv [] )
{

  if( argc < 3 )
    {
    std::cerr << "Missing command line arguments" << std::endl;
    std::cerr << "Usage :  ImageMutualInformation1  inputImage1 inputImage2 " << std::endl;
    return EXIT_FAILURE;
    }


  // Software Guide : BeginLatex
  //
  // We define the pixel type and dimension of the images to be read.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  typedef unsigned char                                 PixelComponentType;
  const unsigned int                                    Dimension = 2;

  typedef itk::Image< PixelComponentType, Dimension >   ImageType;
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // Using the image type we proceed to instantiate the readers for both input
  // images. Then, we take their filenames from the command line arguments.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  typedef itk::ImageFileReader< ImageType >             ReaderType;

  ReaderType::Pointer reader1 = ReaderType::New();
  ReaderType::Pointer reader2 = ReaderType::New();

  reader1->SetFileName( argv[1] );
  reader2->SetFileName( argv[2] );
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // Using the \doxygen{JoinImageFilter} we use the two input images and put them
  // together in an image of two components.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  typedef itk::JoinImageFilter< ImageType, ImageType >  JoinFilterType;

  JoinFilterType::Pointer joinFilter = JoinFilterType::New();

  joinFilter->SetInput1( reader1->GetOutput() );
  joinFilter->SetInput2( reader2->GetOutput() );
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // At this point we trigger the execution of the pipeline by invoking the
  // \code{Update()} method on the Join filter. We must put the call inside a
  // try/catch block because the Update() call may potentially result in
  // exceptions being thrown.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  try
    {
    joinFilter->Update();
    }
  catch( itk::ExceptionObject & excp )
    {
    std::cerr << excp << std::endl;
    return EXIT_FAILURE;
    }
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // We now prepare the types to be used for the computation of the joint
  // histogram. For this purpose, we take the type of the image resulting from
  // the JoinImageFilter and use it as template argument of the
  // \doxygen{ImageToHistogramFilter}. We then construct one by invoking the
  // \code{New()} method.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  typedef JoinFilterType::OutputImageType               VectorImageType;

  typedef itk::Statistics::ImageToHistogramFilter<
                                       VectorImageType >  HistogramFilterType;

  HistogramFilterType::Pointer histogramFilter = HistogramFilterType::New();
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // We pass the multiple-component image as input to the histogram filter,
  // and setup the marginal scale value that will define the precision to be used
  // for classifying values into the histogram bins.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  histogramFilter->SetInput(  joinFilter->GetOutput()  );

  histogramFilter->SetMarginalScale( 10.0 );
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // We must now define the number of bins to use for each one of the components
  // in the joint image. For this purpose we take the \code{HistogramSizeType} from the
  // traits of the histogram filter type.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  typedef HistogramFilterType::HistogramSizeType   HistogramSizeType;

  HistogramSizeType size( 2 );

  size[0] = 255;  // number of bins for the first  channel
  size[1] = 255;  // number of bins for the second channel

  histogramFilter->SetHistogramSize( size );
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // Finally, we must specify the upper and lower bounds for the histogram
  // using the \code{SetHistogramBinMinimum()} and
  // \code{SetHistogramBinMaximum()} methods. The \code{Update()} method is then
  // called in order to trigger the computation of the histogram.
  //
  // Software Guide : EndLatexex

  // Software Guide : BeginCodeSnippet
  typedef HistogramFilterType::HistogramMeasurementVectorType
    HistogramMeasurementVectorType;

  HistogramMeasurementVectorType binMinimum( 3 );
  HistogramMeasurementVectorType binMaximum( 3 );

  binMinimum[0] = -0.5;
  binMinimum[1] = -0.5;
  binMinimum[2] = -0.5;

  binMaximum[0] = 255.5;
  binMaximum[1] = 255.5;
  binMaximum[2] = 255.5;

  histogramFilter->SetHistogramBinMinimum( binMinimum );
  histogramFilter->SetHistogramBinMaximum( binMaximum );

  histogramFilter->Update();
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // The histogram can be recovered from the filter by creating a variable
  // with the histogram type taken from the filter traits.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  typedef HistogramFilterType::HistogramType  HistogramType;

  const HistogramType * histogram = histogramFilter->GetOutput();
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // We now walk over all the bins of the joint histogram and compute their
  // contribution to the value of the joint entropy. For this purpose we use
  // histogram iterators, and the \code{Begin()} and \code{End()} methods.  Since
  // the values returned from the histogram are measuring frequency we must
  // convert them to an estimation of probability by dividing them over the total
  // sum of frequencies returned by the \code{GetTotalFrequency()} method.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  HistogramType::ConstIterator itr = histogram->Begin();
  HistogramType::ConstIterator end = histogram->End();

  const double Sum = histogram->GetTotalFrequency();
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // We initialize to zero the variable to use for accumulating the value of the
  // joint entropy, and then use the iterator for visiting all the bins of the
  // joint histogram. For every bin we compute their contribution to the reduction
  // of uncertainty. Note that in order to avoid logarithmic operations on zero
  // values, we skip over those bins that have less than one count. The entropy
  // contribution must be computed using logarithms in base two in order to
  // express entropy in \textbf{bits}.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  double JointEntropy = 0.0;

  while( itr != end )
    {
    const double count = itr.GetFrequency();
    if( count > 0.0 )
      {
      const double probability = count / Sum;
      JointEntropy +=
        - probability * std::log( probability ) / std::log( 2.0 );
      }
    ++itr;
    }
  // Software Guide : EndCodeSnippet

  std::cout << "Joint Entropy      = " << JointEntropy << " bits " << std::endl;


  // Software Guide : BeginLatex
  //
  // Now that we have the value of the joint entropy we can proceed to estimate
  // the values of the entropies for each image independently. This can be done
  // by simply changing the number of bins and then recomputing the histogram.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  size[0] = 255;  // number of bins for the first  channel
  size[1] =   1;  // number of bins for the second channel

  histogramFilter->SetHistogramSize( size );
  histogramFilter->Update();
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // We initialize to zero another variable in order to start accumulating the
  // entropy contributions from every bin.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  itr = histogram->Begin();
  end = histogram->End();

  double Entropy1 = 0.0;

  while( itr != end )
    {
    const double count = itr.GetFrequency();
    if( count > 0.0 )
      {
      const double probability = count / Sum;
      Entropy1 += - probability * std::log( probability ) / std::log( 2.0 );
      }
    ++itr;
    }
  // Software Guide : EndCodeSnippet

  std::cout << "Image1 Entropy   = " << Entropy1 << " bits " << std::endl;


  // Software Guide : BeginLatex
  //
  // The same process is used for computing the entropy of the other component,
  // simply by swapping the number of bins in the histogram.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  size[0] =   1;  // number of bins for the first channel
  size[1] = 255;  // number of bins for the second channel

  histogramFilter->SetHistogramSize( size );
  histogramFilter->Update();
  // Software Guide : EndCodeSnippet


  // Software Guide : BeginLatex
  //
  // The entropy is computed in a similar manner, just by visiting all the bins on
  // the histogram and accumulating their entropy contributions.
  //
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  itr = histogram->Begin();
  end = histogram->End();

  double Entropy2 = 0.0;

  while( itr != end )
    {
    const double count = itr.GetFrequency();
    if( count > 0.0 )
      {
      const double probability = count / Sum;
      Entropy2 += - probability * std::log( probability ) / std::log( 2.0 );
      }
    ++itr;
    }
  // Software Guide : EndCodeSnippet

  std::cout << "Image2 Entropy   = " << Entropy2 << " bits " << std::endl;


  // Software Guide : BeginLatex
  //
  // At this point we can compute any of the popular measures of Mutual
  // Information. For example
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  double MutualInformation = Entropy1 + Entropy2 - JointEntropy;
  // Software Guide : EndCodeSnippet

  std::cout << "Mutual Information = " << MutualInformation << " bits " << std::endl;


  // Software Guide : BeginLatex
  //
  // or Normalized Mutual Information, where the value of Mutual Information is
  // divided by the mean entropy of the input images.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  double NormalizedMutualInformation1 =
                     2.0 * MutualInformation / ( Entropy1 + Entropy2 );
  // Software Guide : EndCodeSnippet

  std::cout << "Normalized Mutual Information 1 = " << NormalizedMutualInformation1 <<  std::endl;


  // Software Guide : BeginLatex
  //
  // A second form of Normalized Mutual Information has been defined as the mean
  // entropy of the two images divided by their joint entropy.
  //
  // Software Guide : EndLatex

  // Software Guide : BeginCodeSnippet
  double NormalizedMutualInformation2 = ( Entropy1 + Entropy2 ) / JointEntropy;
  // Software Guide : EndCodeSnippet


  std::cout << "Normalized Mutual Information 2 = " << NormalizedMutualInformation2 <<  std::endl;


  // Software Guide : BeginLatex
  //
  // You probably will find very interesting how the value of Mutual Information
  // is strongly dependent on the number of bins over which the histogram is
  // defined.
  //
  // Software Guide : EndLatex


  return EXIT_SUCCESS;

}