File: HMMExample.cpp

package info (click to toggle)
gtsam 4.2.0%2Bdfsg-1
  • links: PTS, VCS
  • area: main
  • in suites: trixie
  • size: 46,096 kB
  • sloc: cpp: 127,191; python: 14,312; xml: 8,442; makefile: 250; sh: 119; ansic: 101
file content (94 lines) | stat: -rw-r--r-- 2,599 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
/* ----------------------------------------------------------------------------

 * GTSAM Copyright 2010-2020, Georgia Tech Research Corporation,
 * Atlanta, Georgia 30332-0415
 * All Rights Reserved
 * Authors: Frank Dellaert, et al. (see THANKS for the full author list)

 * See LICENSE for the license information

 * -------------------------------------------------------------------------- */

/**
 * @file  DiscreteBayesNetExample.cpp
 * @brief   Hidden Markov Model example, discrete.
 * @author  Frank Dellaert
 * @date  July 12, 2020
 */

#include <gtsam/discrete/DiscreteFactorGraph.h>
#include <gtsam/discrete/DiscreteMarginals.h>
#include <gtsam/inference/BayesNet.h>

#include <iomanip>
#include <sstream>

using namespace std;
using namespace gtsam;

int main(int argc, char **argv) {
  const int nrNodes = 4;
  const size_t nrStates = 3;

  // Define variables as well as ordering
  Ordering ordering;
  vector<DiscreteKey> keys;
  for (int k = 0; k < nrNodes; k++) {
    DiscreteKey key_i(k, nrStates);
    keys.push_back(key_i);
    ordering.emplace_back(k);
  }

  // Create HMM as a DiscreteBayesNet
  DiscreteBayesNet hmm;

  // Define backbone
  const string transition = "8/1/1 1/8/1 1/1/8";
  for (int k = 1; k < nrNodes; k++) {
    hmm.add(keys[k] | keys[k - 1] = transition);
  }

  // Add some measurements, not needed for all time steps!
  hmm.add(keys[0] % "7/2/1");
  hmm.add(keys[1] % "1/9/0");
  hmm.add(keys.back() % "5/4/1");

  // print
  hmm.print("HMM");

  // Convert to factor graph
  DiscreteFactorGraph factorGraph(hmm);

  // Do max-prodcut
  auto mpe = factorGraph.optimize();
  GTSAM_PRINT(mpe);

  // Create solver and eliminate
  // This will create a DAG ordered with arrow of time reversed
  DiscreteBayesNet::shared_ptr chordal =
      factorGraph.eliminateSequential(ordering);
  chordal->print("Eliminated");

  // We can also sample from it
  cout << "\n10 samples:" << endl;
  for (size_t k = 0; k < 10; k++) {
    auto sample = chordal->sample();
    GTSAM_PRINT(sample);
  }

  // Or compute the marginals. This re-eliminates the FG into a Bayes tree
  cout << "\nComputing Node Marginals .." << endl;
  DiscreteMarginals marginals(factorGraph);
  for (int k = 0; k < nrNodes; k++) {
    Vector margProbs = marginals.marginalProbabilities(keys[k]);
    stringstream ss;
    ss << "marginal " << k;
    print(margProbs, ss.str());
  }

  // TODO(frank): put in the glue to have DiscreteMarginals produce *arbitrary*
  // joints efficiently, by the Bayes tree shortcut magic. All the code is there
  // but it's not yet connected.

  return 0;
}