File: t_RandomWalkMetropolisHastings_std.py

package info (click to toggle)
openturns 1.7-3
  • links: PTS, VCS
  • area: main
  • in suites: stretch
  • size: 38,588 kB
  • ctags: 26,495
  • sloc: cpp: 144,032; python: 26,855; ansic: 7,868; sh: 419; makefile: 263; yacc: 123; lex: 44
file content (95 lines) | stat: -rwxr-xr-x 3,113 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
#! /usr/bin/env python

from __future__ import print_function
from openturns import *

TESTPREAMBLE()
RandomGenerator.SetSeed(0)

try:

    # this analitycal example is taken from "Bayesian Modeling Using WinBUGS" - Ioannis Ntzoufras
    # 1.5.3: Inference for the mean or normal data with known variance

    # Variable of interest: Y=N(mu, sigma)
    # Prior for mu: Normal(mu0, sigma0), sigma is known
    # Posterior for mu: E(mu|y)=w*y_mean+(1-w)*mu0, and Var(mu|y)=w*(sigmay^2)/n
    # => weighted average of the prior an the sample mean
    # with w = n*sigma0^2 / (n*sigma0^2 + sigma^2)

    # Log::Show(Log::ALL)
    # observations
    size = 10
    realDist = Normal(31., 1.2)

    data = realDist.getSample(size)

    # calibration parameters
    calibrationColl = [CalibrationStrategy()] * 2

    # proposal distribution
    proposalColl = DistributionCollection()
    mean_proposal = Uniform(-2.0, 2.0)
    std_proposal = Uniform(-2.0, 2.0)
    proposalColl.add(mean_proposal)
    proposalColl.add(std_proposal)

    # prior distribution
    mu0 = 25.

    sigma0s = [0.1, 1.0]
    # sigma0s.append(2.0)

    # play with the variance of the prior:
    # if the prior variance is low (information concernig the mu parameter is strong)
    # then the posterior mean will be equal to the prior mean
    # if large, the the posterior distribution is equivalent to the
    # distribution of the sample mean
    for i in range(len(sigma0s)):

        sigma0 = sigma0s[i]
        mean_prior = Normal(mu0, sigma0)
        std_prior = Dirac(2.0)  # standard dev is known
        prior = ComposedDistribution([mean_prior, std_prior])

        # choose the initial state within the prior
        initialState = prior.getRealization()

        # conditional distribution
        conditional = Normal()

        # create a metropolis-hastings sampler
        sampler = RandomWalkMetropolisHastings(
            prior, conditional, data, initialState, proposalColl)
        sampler.setVerbose(True)
        sampler.setThinning(2)
        sampler.setBurnIn(500)
        sampler.setCalibrationStrategyPerComponent(calibrationColl)
        realization = sampler.getRealization()

        sigmay = ConditionalDistribution(
            Normal(), prior).getStandardDeviation()[0]
        w = size * sigma0 ** 2. / (size * sigma0 ** 2. + sigmay ** 2.0)

        print("prior variance= %.12g" % (sigma0 ** 2.))
        print("  realization=", realization)

        print("  w= %.12g" % w)

        # the posterior for mu is analytical
        print("  expected posterior ~N( %.12g" % (w * data.computeMean()
                                                  [0] + (1. - w) * mu0), ",  %.12g" % ((w * sigmay ** 2.0 / size) ** 0.5), ")")

        # try to generate a sample
        sample = sampler.getSample(50)

        print("  obtained posterior ~N( %.12g" % sample.computeMean()[
              0], ",  %.12g" % sample.computeStandardDeviationPerComponent()[0], ")")

        print("  acceptance rate=", sampler.getAcceptanceRate())


except:
    import sys
    import traceback
    traceback.print_exc(file=sys.stdout)