File: BayesClassifierMachine.cc

package info (click to toggle)
torch 2-1
  • links: PTS
  • area: main
  • in suites: woody
  • size: 5,488 kB
  • ctags: 3,217
  • sloc: cpp: 14,272; makefile: 201
file content (104 lines) | stat: -rw-r--r-- 3,123 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
// Copyright (C) 2002 Samy Bengio (bengio@idiap.ch)
//                and Bison Ravi (francois.belisle@idiap.ch)
//                
//
// This file is part of Torch. Release II.
// [The Ultimate Machine Learning Library]
//
// Torch is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// Torch is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Torch; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA


#include "BayesClassifierMachine.h"

namespace Torch {

BayesClassifierMachine::BayesClassifierMachine(Trainer** trainers_, int n_trainers_, List** trainers_measurers_, ClassFormat* class_format_,real* log_priors_)
{
  allocated_log_priors = false;

  trainers = trainers_;
  n_trainers = n_trainers_;
  trainers_measurers = trainers_measurers_;
  class_format = class_format_;

  log_posteriors = (real*) xalloc (n_trainers * sizeof(real));
  list_log_posteriors = NULL;
  addToList(&list_log_posteriors, n_trainers, log_posteriors);

  //if we are not given any log_prior class probabilities, 
  //then we will assume training set proportions.
  
  if(log_priors_ != NULL)
    log_priors = log_priors_;
  else {
    allocated_log_priors = true;
    log_priors = (real*) xalloc (n_trainers * sizeof(real));
    //as a first approximation
    for(int i = 0;i < n_trainers;i++)
      log_priors[i] = -log((real)n_trainers);
  }

  n_outputs = class_format->getOutputSize();
  addToList(&outputs, n_outputs, (real*) xalloc(n_outputs * sizeof(real)));
}
 
BayesClassifierMachine::~BayesClassifierMachine()
{
  free(log_posteriors);
  if(allocated_log_priors)
    free(log_priors);
  free(outputs->ptr);
  freeList(&outputs);
  freeList(&list_log_posteriors);
}

void BayesClassifierMachine::forward(List* inputs)
{

  for(int trainer = 0;trainer < n_trainers;trainer++) {
    trainers[trainer]->machine->forward(inputs);
    
    real* trainer_output = (real*) trainers[trainer]->machine->outputs->ptr;
    // *trainer_output contains the negative log probabililty
    log_posteriors[trainer] = log_priors[trainer] - *trainer_output;
  }
  
  //transform the output from one_hot representation to class_format
  class_format->fromOneHot(outputs,list_log_posteriors);
}

void BayesClassifierMachine::reset()
{
/* I think this is not necessary as it is done in the train method...
  for(int i = 0;i < n_trainers;i++)
    trainers[i]->machine->reset();
*/
}

void BayesClassifierMachine::loadFILE(FILE* file)
{
  for(int i = 0;i < n_trainers;i++)
    trainers[i]->loadFILE(file);
}

void BayesClassifierMachine::saveFILE(FILE* file)
{
  for(int i = 0;i < n_trainers;i++)
    trainers[i]->saveFILE(file);
}


}