1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88
|
#! /usr/bin/env python
import openturns as ot
ot.TESTPREAMBLE()
# Set Numerical precision to 4
ot.PlatformInfo.SetNumericalPrecision(4)
sampleSize = 40
inputDimension = 1
# Create the function to estimate
model = ot.SymbolicFunction(["x0"], ["x0"])
X = ot.Sample(sampleSize, inputDimension)
for i in range(sampleSize):
X[i, 0] = 3.0 + (8.0 * i) / sampleSize
Y = model(X)
# Add a small noise to data
Y += (
ot.GaussianProcess(ot.AbsoluteExponential([0.1], [0.2]), ot.Mesh(X))
.getRealization()
.getValues()
)
basis = ot.LinearBasisFactory(inputDimension).build()
# Case of a misspecified covariance model
covarianceModel = ot.DiracCovarianceModel(inputDimension)
print("===================================================\n")
algo = ot.GeneralLinearModelAlgorithm(X, Y, covarianceModel, basis)
algo.run()
result = algo.getResult()
print("\ncovariance (dirac, optimized)=", result.getCovarianceModel())
print("trend (dirac, optimized)=", result.getTrendCoefficients())
print("===================================================\n")
# Now without estimating covariance parameters
basis = ot.LinearBasisFactory(inputDimension).build()
covarianceModel = ot.DiracCovarianceModel(inputDimension)
algo = ot.GeneralLinearModelAlgorithm(X, Y, covarianceModel, basis, True)
algo.setOptimizeParameters(False)
algo.run()
result = algo.getResult()
print("\ncovariance (dirac, not optimized)=", result.getCovarianceModel())
print("trend (dirac, not optimized)=", result.getTrendCoefficients())
print("===================================================\n")
# Now without estimating covariance parameters
basis = ot.LinearBasisFactory(inputDimension).build()
covarianceModel = ot.DiracCovarianceModel(inputDimension)
algo = ot.GeneralLinearModelAlgorithm(X, Y, covarianceModel, basis, True)
algo.setOptimizeParameters(False)
algo.run()
result = algo.getResult()
print("\ncovariance (dirac, not optimized)=", result.getCovarianceModel())
print("trend (dirac, not optimized)=", result.getTrendCoefficients())
print("===================================================\n")
# Case of a well specified covariance model
# Test the optimization when the amplitude is deduced analytically from
# the scale
covarianceModel = ot.AbsoluteExponential(inputDimension)
algo = ot.GeneralLinearModelAlgorithm(X, Y, covarianceModel, basis)
algo.run()
result = algo.getResult()
print("\ncovariance (reduced, unbiased)=", result.getCovarianceModel())
print("trend (reduced, unbiased)=", result.getTrendCoefficients())
print("===================================================\n")
ot.ResourceMap.SetAsBool("GeneralLinearModelAlgorithm-UnbiasedVariance", False)
algo = ot.GeneralLinearModelAlgorithm(X, Y, covarianceModel, basis)
algo.run()
result = algo.getResult()
print("\ncovariance (reduced, biased)=", result.getCovarianceModel())
print("trend (reduced, biased)=", result.getTrendCoefficients())
print("===================================================\n")
ot.ResourceMap.SetAsBool(
"GeneralLinearModelAlgorithm-UseAnalyticalAmplitudeEstimate", False
)
algo = ot.GeneralLinearModelAlgorithm(X, Y, covarianceModel, basis)
# Define interval
bounds = ot.Interval([1e-2] * 2, [100] * 2)
algo.setOptimizationBounds(bounds)
algo.run()
result = algo.getResult()
print("\ncovariance (full optim)=", result.getCovarianceModel())
print("trend (full optim)=", result.getTrendCoefficients())
print("===================================================\n")
|