File: t_Gibbs_mixture.py

package info (click to toggle)
openturns 1.24-4
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 66,204 kB
  • sloc: cpp: 256,662; python: 63,381; ansic: 4,414; javascript: 406; sh: 180; xml: 164; yacc: 123; makefile: 98; lex: 55
file content (79 lines) | stat: -rwxr-xr-x 2,357 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
#! /usr/bin/env python

# We sample the from the posterior distribution of the parameters of a mixture model.
# This example is drawn from Example 9.2 from *Monte-Carlo Statistical methods* by Robert and Casella (2004).

import openturns as ot
import openturns.testing as ott
import numpy as np

ot.TESTPREAMBLE()

ot.RandomGenerator.SetSeed(100)

# Sample data with :math:`\mu_0 = 0` and :math:`\mu_1 = 2.7`.
N = 500
p = 0.3

mu0 = 0.0
mu1 = 2.7
nor0 = ot.Normal(mu0, 1.0)
nor1 = ot.Normal(mu1, 1.0)
true_distribution = ot.Mixture([nor0, nor1], [1 - p, p])
observations = np.array(true_distribution.getSample(500))


def nor0post(pt):
    z = np.array(pt)[2:]
    x0 = observations[z == 0]
    mu0 = x0.sum() / (0.1 + len(x0))
    sigma0 = 1.0 / (0.1 + len(x0))
    return [mu0, sigma0]


def nor1post(pt):
    z = np.array(pt)[2:]
    x1 = observations[z == 1]
    mu1 = x1.sum() / (0.1 + len(x1))
    sigma1 = 1.0 / (0.1 + len(x1))
    return [mu1, sigma1]


def zpost(pt):
    mu0 = pt[0]
    mu1 = pt[1]
    term1 = p * np.exp(-((observations - mu1) ** 2) / 2)
    term0 = (1.0 - p) * np.exp(-((observations - mu0) ** 2) / 2)
    res = term1 / (term1 + term0)
    # output must be a 1d list or array in order to create a PythonFunction
    return res.reshape(-1)


nor0posterior = ot.PythonFunction(2 + N, 2, nor0post)
nor1posterior = ot.PythonFunction(2 + N, 2, nor1post)
zposterior = ot.PythonFunction(2 + N, N, zpost)

# We can now construct the Gibbs algorithm
initialState = [0.0] * (N + 2)
sampler0 = ot.RandomVectorMetropolisHastings(
    ot.RandomVector(ot.Normal()), initialState, [0], nor0posterior
)
sampler1 = ot.RandomVectorMetropolisHastings(
    ot.RandomVector(ot.Normal()), initialState, [1], nor1posterior
)
big_bernoulli = ot.JointDistribution([ot.Bernoulli()] * N)
sampler2 = ot.RandomVectorMetropolisHastings(
    ot.RandomVector(big_bernoulli), initialState, range(2, N + 2), zposterior
)
gibbs = ot.Gibbs([sampler0, sampler1, sampler2])

# Run the Gibbs algorithm
s = gibbs.getSample(1000)

# Extract the relevant marginals: the first (:math:`mu_0`) and the second (:math:`\mu_1`).
posterior_sample = s[:, 0:2]
mean = posterior_sample.computeMean()
stddev = posterior_sample.computeStandardDeviation()
print(mean, stddev)
ott.assert_almost_equal(mean, [-0.078428, 2.80587])
ott.assert_almost_equal(stddev, [0.0463082, 0.108863])