1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114
|
#! /usr/bin/env python
import openturns as ot
from math import pi
ot.TESTPREAMBLE()
ot.PlatformInfo.SetNumericalPrecision(3)
# Problem parameters
dimension = 3
a = 7.0
b = 0.1
# Create the Ishigami function
inputVariables = ["xi1", "xi2", "xi3"]
formula = [
"sin(xi1) + (" + str(a) + ") * (sin(xi2)) ^ 2 + (" + str(b) + ") * xi3^4 * sin(xi1)"
]
model = ot.SymbolicFunction(inputVariables, formula)
# Create the input distribution
distribution = ot.JointDistribution([ot.Uniform(-pi, pi)] * dimension)
# Fix sampling size
samplingSize = 100
# Get input & output sample
lhs = ot.LHSExperiment(distribution, samplingSize)
inputSample = lhs.generate()
outputSample = model(inputSample)
# Validation of results on independent samples
validationSize = 10
inputValidation = distribution.getSample(validationSize)
outputValidation = model(inputValidation)
# 1) SPC algorithm
# Create the orthogonal basis
polynomialCollection = [ot.LegendreFactory()] * dimension
enumerateFunction = ot.LinearEnumerateFunction(dimension)
productBasis = ot.OrthogonalProductPolynomialFactory(
polynomialCollection, enumerateFunction
)
# Create the adaptive strategy
degree = 8
basisSize = enumerateFunction.getStrataCumulatedCardinal(degree)
adaptiveStrategy = ot.FixedStrategy(productBasis, basisSize)
# Select the fitting algorithm
fittingAlgorithm = ot.KFold()
leastSquaresFactory = ot.LeastSquaresMetaModelSelectionFactory(
ot.LARS(), fittingAlgorithm
)
# Projection strategy
projectionStrategy = ot.LeastSquaresStrategy(
inputSample, outputSample, leastSquaresFactory
)
algo = ot.FunctionalChaosAlgorithm(
inputSample, outputSample, distribution, adaptiveStrategy, projectionStrategy
)
# Reinitialize the RandomGenerator to see the effect of the sampling
# method only
ot.RandomGenerator.SetSeed(0)
algo.run()
# Get the results
result = algo.getResult()
# MetaModelValidation - SPC
metamodel = result.getMetaModel()
metamodelPredictions = metamodel(inputValidation)
metaModelValidationSPC = ot.MetaModelValidation(outputValidation, metamodelPredictions)
print("")
print("Sparse chaos scoring")
print("R2 = ", metaModelValidationSPC.computeR2Score())
print("Residual sample = ", repr(metaModelValidationSPC.getResidualSample()))
# 2) Kriging algorithm
# KrigingAlgorithm
basis = ot.QuadraticBasisFactory(dimension).build()
# model already computed, separately
covarianceModel = ot.GeneralizedExponential([3.52, 2.15, 2.99], [11.41], 2.0)
algo2 = ot.KrigingAlgorithm(inputSample, outputSample, covarianceModel, basis)
algo2.setOptimizeParameters(False)
algo2.run()
result2 = algo2.getResult()
# MetaModelValidation - KG
metamodel = result2.getMetaModel()
metamodelPredictions = metamodel(inputValidation)
metaModelValidationKG = ot.MetaModelValidation(outputValidation, metamodelPredictions)
print("")
print("Kriging scoring")
print("R2 = ", metaModelValidationKG.computeR2Score())
ot.PlatformInfo.SetNumericalPrecision(2)
print("Residual sample = ", repr(metaModelValidationKG.getResidualSample()))
# 2-d
dist = ot.Uniform(-pi / 2, pi / 2)
model = ot.SymbolicFunction(["x"], ["sin(x)", "cos(x)"])
metaModel = ot.SymbolicFunction(["x"], ["x - x^3/6.0 + x^5/120.0", "cos(1.2*x)"])
x = dist.getSample(1000)
y = model(x)
metamodelPredictions = metaModel(x)
val = ot.MetaModelValidation(y, metamodelPredictions)
r2 = val.computeR2Score()
residual = val.getResidualSample()
residual_dist_smooth = val.getResidualDistribution()
residual_dist = val.getResidualDistribution(False)
graph = val.drawValidation()
print(r2)
|