1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
|
#! /usr/bin/env python
import openturns as ot
ot.TESTPREAMBLE()
for dim in [2, 5, 10]:
print("dimension = ", dim)
# We create a numerical math function
inputVar = ["X" + str(i) for i in range(dim)]
myFunction = ot.SymbolicFunction(inputVar, ["0"])
# We create a normal distribution point of dimension 1
mean = [0.0] * dim
sigma = [1.0] * dim
R = ot.IdentityMatrix(dim)
myDistribution = ot.Normal(mean, sigma, R)
# We create a 'usual' RandomVector from the Distribution
vect = ot.RandomVector(myDistribution)
# We create a composite random vector
output = ot.CompositeRandomVector(myFunction, vect)
# We create a StandardEvent from this RandomVector
myStandardEvent = ot.StandardEvent(output, ot.Less(), 2.0)
std = ot.Normal()
beta = ot.Point(3)
beta[0] = round(-std.computeQuantile(1e-3)[0])
beta[1] = round(-std.computeQuantile(1e-5)[0])
beta[2] = round(-std.computeQuantile(1e-7)[0])
importanceLevel = ot.Point(3)
importanceLevel[0] = 0.01
importanceLevel[1] = 0.05
importanceLevel[2] = 0.10
accuracyLevel = ot.Point(3)
accuracyLevel[0] = 1.5
accuracyLevel[1] = 2.0
accuracyLevel[2] = 4.0
confidenceLevel = ot.Point(3)
confidenceLevel[0] = 0.90
confidenceLevel[1] = 0.95
confidenceLevel[2] = 0.99
pointNumber = ot.UnsignedIntegerCollection(3)
pointNumber[0] = 10
pointNumber[1] = 100
pointNumber[2] = 1000
# TABLE 1 : we impose beta, the importance level, the accuracy level,
# tne confidence level and we calculate the corresponding deltaEpsilon
# and pointNumber N
print(
"beta ",
"importanceLevel ",
"accuracyLevel ",
"confidenceLevel ",
"deltaEpsilon ",
"pointNumber",
)
# loop on beta
for indexBeta in range(beta.getDimension()):
# We create the design point
designPoint = ot.Point(dim, 0.0)
designPoint[0] = beta[indexBeta]
# loop on the importance level epsilon
for indexImportanceLevel in range(importanceLevel.getDimension()):
# loop on the accuracy level tau
for indexAccuracyLevel in range(accuracyLevel.getDimension()):
# loop on the confidence level (1-q)
for indexConfidenceLevel in range(confidenceLevel.getDimension()):
# we calculate the corresponding deltaEpsilon and
# pointNumber N
myTest = ot.StrongMaximumTest(
myStandardEvent,
designPoint,
importanceLevel[indexImportanceLevel],
accuracyLevel[indexAccuracyLevel],
confidenceLevel[indexConfidenceLevel],
)
print(
"%.6f" % beta[indexBeta],
" %.6f" % importanceLevel[indexImportanceLevel],
" %.6f" % accuracyLevel[indexAccuracyLevel],
" %.6f" % confidenceLevel[indexConfidenceLevel],
" %.6f" % myTest.getDeltaEpsilon(),
" %.6f" % myTest.getPointNumber(),
)
# TABLE 2 : we impose beta, the importance level, the accuracy level, the pointNumber N and we calculate the corresponding deltaEpsilon and confidence level
# std::cout << std::right
# << std::setw(10) << "beta "
# << std::setw(16) << "importanceLevel "
# << "accuracyLevel " << "pointNumber " << "deltaEpsilon " << "confidenceLevel" << std::endl
print(
"beta ",
"importanceLevel ",
"accuracyLevel ",
"pointNumber",
"deltaEpsilon ",
"confidenceLevel",
)
# loop on beta
for indexBeta in range(beta.getDimension()):
# We create the design point
designPoint = ot.Point(dim, 0.0)
designPoint[0] = beta[indexBeta]
# loop on the importance level epsilon
for indexImportanceLevel in range(importanceLevel.getDimension()):
# loop on the accuracy level tau
for indexAccuracyLevel in range(accuracyLevel.getDimension()):
# loop on the pointNumber N
for indexPointNumber in range(pointNumber.getSize()):
# we calculate the corresponding deltaEpsilon and
# confidenceLevel
myTest = ot.StrongMaximumTest(
myStandardEvent,
designPoint,
importanceLevel[indexImportanceLevel],
accuracyLevel[indexAccuracyLevel],
pointNumber[indexPointNumber],
)
print(
"%.6f" % beta[indexBeta],
" %.6f" % importanceLevel[indexImportanceLevel],
" %.6f" % accuracyLevel[indexAccuracyLevel],
" %.6f" % pointNumber[indexPointNumber],
" %.6f" % myTest.getDeltaEpsilon(),
" %.6f" % myTest.getConfidenceLevel(),
)
|