File: MonteCarlo.inp

package info (click to toggle)
gretl 2022c-1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 59,552 kB
  • sloc: ansic: 409,074; sh: 4,449; makefile: 3,222; cpp: 2,777; xml: 599; perl: 364
file content (108 lines) | stat: -rw-r--r-- 2,041 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
/*
In this script, we give an example of how HIP can be used
for heavy computational jobs such as a Monte Carlo simulation.
We investigate the properties of the ml estimate of \beta_2 for
varying degrees of over-identification.
*/

set echo off 
set messages off
include HIP.gfn

set seed 12345
nulldata 256

# ----- set up the experiment ----------------------

scalar p = 1
scalar k1 = 1
scalar k2 = 4

scalar k = k1 + k2
matrix Sigma = 0.5 .* (I(p) + ones(p,p)./p)
matrix Pi    = 0.1 .* mnormal(k,p)
matrix beta1 = 0.25 .* ones(k1,1)
matrix beta2 = ones(p,1)
matrix lambda = 0.1 .* ones(p,1)
matrix Omega = ((1 ~ lambda') | (lambda ~ Sigma))

# build exogenous vars

list X1 = const
loop i=1..(k1-1) --quiet
    series x_$i = normal()
    list X1 += x_$i
endloop

list X2 = null
loop i=1..k2 --quiet
    series z_$i = normal()
    list X2 += z_$i
endloop

list X = X1 X2

# now run the Monte Carlo experiment proper
Repli = 400

string ToSave = ""
loop i=1..k2 --quiet
    ToSave += "b$i "
    scalar b$i = NA
endloop

matrix K = cholesky(Omega)'
iter = 0

printf "Be patient; this might take a while.\n"
flush

loop h = 1 .. Repli --quiet --progressive
    # build disturbances

    if (h%10 == 0)
	printf "experiment %d of %d\n", h, Repli
	flush
    endif
    
    matrix E = mnormal($nobs, p+1) * K
    series epsilon = E[,1]
    loop for i=1..p --quiet
	series u_$i = E[,i+1]
    endloop

    # build observables

    list Y = null

    loop i=1..p --quiet
	series y_$i = lincomb(X, Pi[,i]) + u_$i
	list Y += y_$i
    endloop

    series ystar = lincomb(X1, beta1) + lincomb(Y, beta2) + epsilon
    series y = (ystar > 0)

    list Inst = null

    loop i=1..k2 --quiet
	list Inst += z_$i

	# estimate only; we don't neet the printout
	# so we _don't_ call HIP_printout()

	bundle MyModel = HIP_setup(y, X1, Y, Inst)
	scalar err = HIP_estimate(&MyModel)
	if err==0
	    matrix param = MyModel["theta"]
	    scalar b$i = param[1+k1]
	else
	    scalar b$i = NA
	endif
    endloop

    store foo.gdt @ToSave
endloop

open foo.gdt
summary