File: trust_region.m

package info (click to toggle)
dynare 4.6.3-4
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 74,896 kB
  • sloc: cpp: 98,057; ansic: 28,929; pascal: 13,844; sh: 5,947; objc: 4,236; yacc: 4,215; makefile: 2,583; lex: 1,534; fortran: 877; python: 647; ruby: 291; lisp: 152; xml: 22
file content (227 lines) | stat: -rw-r--r-- 6,217 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
function [x,check,info] = trust_region(fcn,x0,j1,j2,jacobian_flag,gstep,tolf,tolx,maxiter,debug,varargin)
% Solves systems of non linear equations of several variables, using a
% trust-region method.
%
% INPUTS
%    fcn:             name of the function to be solved
%    x0:              guess values
%    j1:              equations index for which the model is solved
%    j2:              unknown variables index
%    jacobian_flag=true: jacobian given by the 'func' function
%    jacobian_flag=false: jacobian obtained numerically
%    gstep            increment multiplier in numercial derivative
%                     computation
%    tolf             tolerance for residuals
%    tolx             tolerance for solution variation
%    maxiter          maximum number of iterations
%    debug            debug flag
%    varargin:        list of arguments following bad_cond_flag
%
% OUTPUTS
%    x:               results
%    check=1:         the model can not be solved
%    info:            detailed exitcode
% SPECIAL REQUIREMENTS
%    none

% Copyright (C) 2008-2012 VZLU Prague, a.s.
% Copyright (C) 2014-2019 Dynare Team
%
% This file is part of Dynare.
%
% Dynare is free software: you can redistribute it and/or modify
% it under the terms of the GNU General Public License as published by
% the Free Software Foundation, either version 3 of the License, or
% (at your option) any later version.
%
% Dynare is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
% GNU General Public License for more details.
%
% You should have received a copy of the GNU General Public License
% along with Dynare.  If not, see <http://www.gnu.org/licenses/>.
%
% Initial author: Jaroslav Hajek <highegg@gmail.com>, for GNU Octave

if (ischar (fcn))
    fcn = str2func (fcn);
end

n = length(j1);

% These defaults are rather stringent. I think that normally, user
% prefers accuracy to performance.

macheps = eps (class (x0));

niter = 1;

x = x0;
info = 0;

% Initial evaluation.
% Handle arbitrary shapes of x and f and remember them.
fvec = fcn (x, varargin{:});
fvec = fvec(j1);
fn = norm (fvec);
recompute_jacobian = true;

% Outer loop.
while (niter < maxiter && ~info)

    % Calculate Jacobian (possibly via FD).
    if recompute_jacobian
        if jacobian_flag
            [~, fjac] = fcn (x, varargin{:});
            fjac = fjac(j1,j2);
        else
            dh = max(abs(x(j2)),gstep(1)*ones(n,1))*eps^(1/3);

            for j = 1:n
                xdh = x ;
                xdh(j2(j)) = xdh(j2(j))+dh(j) ;
                t = fcn(xdh,varargin{:});
                fjac(:,j) = (t(j1) - fvec)./dh(j) ;
            end
        end
        recompute_jacobian = false;
    end

    % Get column norms, use them as scaling factors.
    jcn = sqrt(sum(fjac.*fjac))';
    if (niter == 1)
        dg = jcn;
        dg(dg == 0) = 1;
    else
        % Rescale adaptively.
        % FIXME: the original minpack used the following rescaling strategy:
        %   dg = max (dg, jcn);
        % but it seems not good if we start with a bad guess yielding Jacobian
        % columns with large norms that later decrease, because the corresponding
        % variable will still be overscaled. So instead, we only give the old
        % scaling a small momentum, but do not honor it.

        dg = max (0.1*dg, jcn);
    end

    if (niter == 1)
        xn = norm (dg .* x(j2));
        % FIXME: something better?
        delta = max (xn, 1);
    end

    % Get trust-region model (dogleg) minimizer.
    s = - dogleg (fjac, fvec, dg, delta);
    w = fvec + fjac * s;

    sn = norm (dg .* s);
    if (niter == 1)
        delta = min (delta, sn);
    end

    x2 = x;
    x2(j2) = x2(j2) + s;
    fvec1 = fcn (x2, varargin{:});
    fvec1 = fvec1(j1);
    fn1 = norm (fvec1);

    if (fn1 < fn)
        % Scaled actual reduction.
        actred = 1 - (fn1/fn)^2;
    else
        actred = -1;
    end

    % Scaled predicted reduction, and ratio.
    t = norm (w);
    if (t < fn)
        prered = 1 - (t/fn)^2;
        ratio = actred / prered;
    else
        prered = 0;
        ratio = 0;
    end

    % Update delta.
    if (ratio < 0.1)
        delta = 0.5*delta;
        if (delta <= 1e1*macheps*xn)
            % Trust region became uselessly small.
            if (fn1 <= tolf)
                info = 1;
            else
                info = -3;
            end
            break
        end
    elseif (abs (1-ratio) <= 0.1)
        delta = 1.4142*sn;
    elseif (ratio >= 0.5)
        delta = max (delta, 1.4142*sn);
    end

    if (ratio >= 1e-4)
        % Successful iteration.
        x(j2) = x(j2) + s;
        xn = norm (dg .* x(j2));
        fvec = fvec1;
        fn = fn1;
        recompute_jacobian = true;
    end

    niter = niter + 1;


    % Tests for termination condition
    if (fn <= tolf)
        info = 1;
    end
end
if info==1
    check = 0;
else
    check = 1;
end
end


% Solve the double dogleg trust-region least-squares problem:
% Minimize norm(r*x-b) subject to the constraint norm(d.*x) <= delta,
% x being a convex combination of the gauss-newton and scaled gradient.

% TODO: error checks
% TODO: handle singularity, or leave it up to mldivide?

function x = dogleg (r, b, d, delta)
% Get Gauss-Newton direction.
x = r \ b;
xn = norm (d .* x);
if (xn > delta)
    % GN is too big, get scaled gradient.
    s = (r' * b) ./ d;
    sn = norm (s);
    if (sn > 0)
        % Normalize and rescale.
        s = (s / sn) ./ d;
        % Get the line minimizer in s direction.
        tn = norm (r*s);
        snm = (sn / tn) / tn;
        if (snm < delta)
            % Get the dogleg path minimizer.
            bn = norm (b);
            dxn = delta/xn; snmd = snm/delta;
            t = (bn/sn) * (bn/xn) * snmd;
            t = t - dxn * snmd^2 + sqrt ((t-dxn)^2 + (1-dxn^2)*(1-snmd^2));
            alpha = dxn*(1-snmd^2) / t;
        else
            alpha = 0;
        end
    else
        alpha = delta / xn;
        snm = 0;
    end
    % Form the appropriate convex combination.
    x = alpha * x + ((1-alpha) * min (snm, delta)) * s;
end
end