File: LaplacianLinearKernelRegression.cpp

package info (click to toggle)
stopt 5.12%2Bdfsg-3
  • links: PTS, VCS
  • area: main
  • in suites: trixie
  • size: 8,860 kB
  • sloc: cpp: 70,456; python: 5,950; makefile: 72; sh: 57
file content (302 lines) | stat: -rw-r--r-- 10,030 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
// Copyright (C) 2018 EDF
// All Rights Reserved
// This code is published under the GNU Lesser General Public License (GNU LGPL)
#include <Eigen/SVD>
#include <Eigen/Cholesky>
#include "StOpt/regression/LaplacianLinearKernelRegression.h"
#include "StOpt/regression/nDDominanceKernel.h"

using namespace std ;
using namespace Eigen ;

namespace StOpt
{

LaplacianLinearKernelRegression::LaplacianLinearKernelRegression(const bool &p_bZeroDate,
        const ArrayXXd  &p_particles,
        const ArrayXd   &p_h):
    BaseRegression(p_bZeroDate, p_particles, false), m_h(p_h), m_tree(p_particles)
{
}

LaplacianLinearKernelRegression::LaplacianLinearKernelRegression(const ArrayXd   &p_h):
    BaseRegression(false), m_h(p_h)
{
}

LaplacianLinearKernelRegression::LaplacianLinearKernelRegression(const bool &p_bZeroDate,
        const ArrayXXd  &p_particles):
    BaseRegression(p_bZeroDate, p_particles, false), m_tree(p_particles)
{
}


void LaplacianLinearKernelRegression::updateSimulations(const bool &p_bZeroDate, const ArrayXXd &p_particles)
{
    BaseRegression::updateSimulationsBase(p_bZeroDate, p_particles);
    m_tree = KDTree(p_particles);

}

ArrayXXd LaplacianLinearKernelRegression::regressFunction(const ArrayXXd &p_fToRegress) const
{
    // dimension
    int nD = m_particles.rows();
    // number of functions to calculate for regressions
    int nbFuncReg = (nD + 1) * (nD + 2) / 2;
    int nbFuncSecMem = (nD + 1) * p_fToRegress.rows()   ;
    // creation of the 2^d terms
    int nbSum = pow(2, nD);
    vector< shared_ptr<ArrayXXd> > vecToAdd(nbSum);
    // calculate exp values
    Eigen::ArrayXi iCoord(nD) ;
    for (int i = 0; i < nbSum; ++i)
    {
        int ires = i;
        for (int id = nD - 1 ; id >= 0  ; --id)
        {
            unsigned int idec = (ires >> id) ;
            iCoord(id) = -(2 * idec - 1);
            ires -= (idec << id);
        }
        vecToAdd[i] = make_shared<ArrayXXd>(nbFuncReg + nbFuncSecMem, m_particles.cols());
        for (int is = 0; is < m_particles.cols(); ++is)
        {
            double ssum = 0;
            for (int id = 0; id < nD; ++id)
                ssum += iCoord(id) * m_particles(id, is) / m_h(id);
            double expSum =  exp(ssum);
            int iloc = 0;
            // lower triangular matrix
            (*vecToAdd[i])(iloc++, is) = expSum;
            for (int id = 0; id < nD; ++id)
            {
                (*vecToAdd[i])(iloc++, is) = expSum * m_particles(id, is);
                for (int idd = 0; idd <= id; ++idd)
                    (*vecToAdd[i])(iloc++, is) = expSum * m_particles(id, is) * m_particles(idd, is)  ;
            }
            for (int ifunc = 0; ifunc < p_fToRegress.rows(); ++ifunc)
            {
                (*vecToAdd[i])(iloc++, is) =  expSum * p_fToRegress(ifunc, is);
                for (int id = 0; id < nD ; ++id)
                    (*vecToAdd[i])(iloc++, is) =  expSum * p_fToRegress(ifunc, is) * m_particles(id, is) ;
            }
        }
    }

    vector< shared_ptr<ArrayXXd> > fDomin(nbSum);


    // kernel resolution
    nDDominanceKernel(m_particles, vecToAdd, fDomin);


    // reconstruction of the 2^d terms for each matrix term
    ArrayXd forMatrix(nbFuncReg);
    ArrayXd secMem(nbFuncSecMem);
    MatrixXd  matA(1 + nD, 1 + nD);
    VectorXd  vecB(1 + nD);
    ArrayXXd ret(p_fToRegress.rows(), p_fToRegress.cols());
    for (int is = 0; is < m_particles.cols(); ++is)
    {
        // calculate matrix Coeff
        int iPosLoc = 0;
        forMatrix(iPosLoc) = 1.;
        for (int iSum = 0; iSum < nbSum; ++iSum)
        {
            forMatrix(iPosLoc) += (*fDomin[iSum])(0, is) * (*vecToAdd[nbSum - 1 - iSum])(0, is);
        }
        iPosLoc += 1;
        for (int id = 0; id < nD; ++id)
        {
            forMatrix(iPosLoc) = m_particles(id, is);
            for (int iSum = 0; iSum < nbSum; ++iSum)
                forMatrix(iPosLoc) += (*fDomin[iSum])(iPosLoc, is) * (*vecToAdd[nbSum - 1 - iSum])(0, is);
            iPosLoc += 1;
            for (int idd = 0; idd <= id; ++idd)
            {
                forMatrix(iPosLoc) = m_particles(id, is) * m_particles(idd, is)  ;
                for (int iSum = 0; iSum < nbSum; ++iSum)
                    forMatrix(iPosLoc) += (*fDomin[iSum])(iPosLoc, is) * (*vecToAdd[nbSum - 1 - iSum])(0, is);
                iPosLoc += 1;
            }
        }
        // create second member
        int iSecMem = 0 ;
        for (int ifunc = 0 ; ifunc < p_fToRegress.rows(); ++ifunc)
        {
            secMem(iSecMem)  = p_fToRegress(ifunc, is);
            for (int iSum = 0; iSum < nbSum; ++iSum)
            {
                secMem(iSecMem) += (*fDomin[iSum])(nbFuncReg + iSecMem, is) * (*vecToAdd[nbSum - 1 - iSum])(0, is);
            }
            iSecMem += 1;
            for (int id = 0; id < nD; ++id)
            {
                secMem(iSecMem)  = p_fToRegress(ifunc, is) * m_particles(id, is) ;
                for (int iSum = 0; iSum < nbSum; ++iSum)
                {
                    secMem(iSecMem) += (*fDomin[iSum])(nbFuncReg + iSecMem, is) * (*vecToAdd[nbSum - 1 - iSum])(0, is);
                }
                iSecMem += 1;
            }
        }
        // create regression matrix
        int iloc = 0;
        for (int id = 0; id <= nD; ++id)
            for (int idd = 0; idd <= id; ++idd)
                matA(id, idd) = forMatrix(iloc++);
        for (int id = 0; id <= nD; ++id)
            for (int idd = id + 1; idd <= nD; ++idd)
                matA(id, idd) =  matA(idd, id);
        // second member and inverse
        int iloc1 = 0;
        // inverse
        LLT<MatrixXd>  lltA(matA);
        for (int ifunc = 0 ; ifunc < p_fToRegress.rows(); ++ifunc)
        {
            for (int id = 0; id <= nD; ++id)
                vecB(id) = secMem(iloc1++);
            VectorXd coeff = lltA.solve(vecB);
            ret(ifunc, is) = coeff(0);
            for (int id  = 0; id < nD; ++id)
                ret(ifunc, is)  += coeff(id + 1) * m_particles(id, is);
        }
    }
    return ret;
}



ArrayXd  LaplacianLinearKernelRegression::getCoordBasisFunction(const ArrayXd &p_fToRegress) const
{
    if (!BaseRegression::m_bZeroDate)
    {
        const ArrayXXd  fToRegress = Map<const ArrayXXd>(p_fToRegress.data(), 1, p_fToRegress.size()) ;
        ArrayXXd  regressed = regressFunction(fToRegress);
        ArrayXd toReturn = Map<ArrayXd>(regressed.data(), regressed.size());
        return toReturn;
    }
    else
    {
        ArrayXd retAverage(1);
        retAverage(0) = p_fToRegress.mean();
        return retAverage;
    }
}

ArrayXXd  LaplacianLinearKernelRegression::getCoordBasisFunctionMultiple(const ArrayXXd &p_fToRegress) const
{
    if (!BaseRegression::m_bZeroDate)
    {
        return regressFunction(p_fToRegress);
    }
    else
    {
        ArrayXXd retAverage(p_fToRegress.rows(), 1);
        for (int nsm = 0; nsm <  p_fToRegress.rows(); ++nsm)
            retAverage.row(nsm).setConstant(p_fToRegress.row(nsm).mean());
        return retAverage;
    }
}

ArrayXd  LaplacianLinearKernelRegression::getAllSimulations(const ArrayXd &p_fToRegress) const
{
    if (!BaseRegression::m_bZeroDate)
    {
        const ArrayXXd  fToRegress = Map<const ArrayXXd>(p_fToRegress.data(), 1, p_fToRegress.size()) ;
        ArrayXXd  regressed = regressFunction(fToRegress);
        ArrayXd toReturn = Map<ArrayXd>(regressed.data(), regressed.size());
        return toReturn;
    }
    else
    {
        return ArrayXd::Constant(p_fToRegress.size(), p_fToRegress.mean());
    }
}

ArrayXXd  LaplacianLinearKernelRegression::getAllSimulationsMultiple(const ArrayXXd &p_fToRegress) const
{
    if (!BaseRegression::m_bZeroDate)
    {
        return regressFunction(p_fToRegress);
    }
    else
    {
        ArrayXXd ret(p_fToRegress.rows(), p_fToRegress.cols());
        for (int ism = 0; ism < p_fToRegress.rows(); ++ism)
            ret.row(ism).setConstant(p_fToRegress.row(ism).mean());
        return ret;
    }
}


ArrayXd LaplacianLinearKernelRegression::reconstruction(const ArrayXd   &p_basisCoefficients) const
{
    if (!BaseRegression::m_bZeroDate)
        return p_basisCoefficients; // basis function are here regressed values !
    else
    {
        return ArrayXd::Constant(m_particles.cols(), p_basisCoefficients(0));
    }
}


ArrayXXd LaplacianLinearKernelRegression::reconstructionMultiple(const ArrayXXd   &p_basisCoefficients) const
{
    if (!BaseRegression::m_bZeroDate)
    {
        return p_basisCoefficients; // basis function are here regressed values !
    }
    else
    {
        ArrayXXd retValue(p_basisCoefficients.rows(), m_particles.cols());
        for (int nsm = 0; nsm < p_basisCoefficients.rows(); ++nsm)
            retValue.row(nsm).setConstant(p_basisCoefficients(nsm, 0));
        return retValue ;
    }
}

double  LaplacianLinearKernelRegression::reconstructionASim(const int &p_isim, const ArrayXd   &p_basisCoefficients) const
{
    double ret ;
    if (!BaseRegression::m_bZeroDate)
    {
        ret = p_basisCoefficients(p_isim);
    }
    else
    {
        ret = p_basisCoefficients(0);
    }
    return ret ;
}

double LaplacianLinearKernelRegression::getValue(const ArrayXd   &p_coordinates,
        const ArrayXd   &p_coordBasisFunction)  const
{
    double ret  ;
    if (!BaseRegression::m_bZeroDate)
    {
        // Use KDTree to find nearest point close to a given one and regress the associated
        // regressed value
        return p_coordBasisFunction(m_tree.nearestIndex(p_coordinates));
    }
    else
        ret =  p_coordBasisFunction(0);
    return ret ;
}


double LaplacianLinearKernelRegression::getAValue(const ArrayXd &p_coordinates,  const ArrayXd &p_ptOfStock,
        const vector< shared_ptr<InterpolatorSpectral> > &p_interpFuncBasis) const
{
    if (!BaseRegression::m_bZeroDate)
    {
        return p_interpFuncBasis[m_tree.nearestIndex(p_coordinates)]->apply(p_ptOfStock);
    }
    else
    {
        return p_interpFuncBasis[0]->apply(p_ptOfStock);
    }
}
}