1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318
|
"""
EfficientGlobalOptimization examples
====================================
"""
# %%
# The EGO algorithm [jones1998]_ is an adaptative optimization method based on
# Gaussian Process metamodel.
#
# An initial design of experiment is used to build a first metamodel.
# At each iteration a new point that maximizes a criterion is chosen as
# optimizer candidate.
#
# The criterion uses a tradeoff between the metamodel value and the conditional
# variance.
#
# Then the new point is evaluated using the original model and the metamodel is
# relearnt on the extended design of experiments.
# %%
from openturns.usecases import branin_function
from openturns.usecases import ackley_function
import openturns as ot
import openturns.experimental as otexp
import openturns.viewer as otv
# %%
# Ackley test-case
# ----------------
#
# We first apply the EGO algorithm on the :ref:`Ackley function<use-case-ackley>`.
# %%
# Define the problem
# ^^^^^^^^^^^^^^^^^^
# %%
# The Ackley model is defined in the usecases module in a data class `AckleyModel` :
am = ackley_function.AckleyModel()
# %%
# We get the Ackley function :
model = am.model
# %%
# We specify the domain of the model :
dim = am.dim
lowerbound = [-4.0] * dim
upperbound = [4.0] * dim
# %%
# We know that the global minimum is at the center of the domain. It is stored in the `AckleyModel` data class.
xexact = am.x0
# %%
# The minimum value attained `fexact` is :
fexact = model(xexact)
fexact
# %%
graph = model.draw(lowerbound, upperbound, [100] * dim)
graph.setTitle("Ackley function")
view = otv.View(graph)
# %%
# We see that the Ackley function has many local minimas. The global minimum, however, is unique and located at the center of the domain.
# %%
# Create the initial GP
# ^^^^^^^^^^^^^^^^^^^^^
#
# Before using the EGO algorithm, we must create an initial GP metamodel.
# In order to do this, we must create a design of experiment which fills the space.
# In this situation, the :class:`~openturns.LHSExperiment` is a good place to start (but other design of experiments may allow one to better fill the space).
# We use a uniform distribution in order to create a LHS design.
# The length of the first LHS is set to ten times the problem dimension as recommended in [jones1998]_.
# %%
listUniformDistributions = [
ot.Uniform(lowerbound[i], upperbound[i]) for i in range(dim)
]
distribution = ot.JointDistribution(listUniformDistributions)
sampleSize = 10 * dim
experiment = ot.LHSExperiment(distribution, sampleSize)
inputSample = experiment.generate()
outputSample = model(inputSample)
# %%
graph = ot.Graph(
"Initial LHS design of experiment - n=%d" % (sampleSize), "$x_0$", "$x_1$", True
)
cloud = ot.Cloud(inputSample)
graph.add(cloud)
view = otv.View(graph)
# %%
# We now create the GP metamodel.
# We selected the :class:`~openturns.MaternModel` covariance model with a constant basis as recommended in [leriche2021]_.
# %%
covarianceModel = ot.MaternModel([1.0] * dim, [0.5], 2.5)
basis = ot.ConstantBasisFactory(dim).build()
fitter = otexp.GaussianProcessFitter(inputSample, outputSample, covarianceModel, basis)
fitter.run()
gpr = otexp.GaussianProcessRegression(fitter.getResult())
gpr.run()
# %%
# Create the optimization problem
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# We finally create the :class:`~openturns.OptimizationProblem` and solve it with class:`~openturns.EfficientGlobalOptimization`.
# %%
problem = ot.OptimizationProblem()
problem.setObjective(model)
bounds = ot.Interval(lowerbound, upperbound)
problem.setBounds(bounds)
# %%
# In order to show the various options, we configure them all, even if most could be left to default settings in this case.
#
# The most important method is :class:`~openturns.experimental.EfficientGlobalOptimization.setMaximumCallsNumber` which limits the number of iterations that the algorithm can perform.
# In the Ackley example, we choose to perform 30 iterations of the algorithm.
# %%
algo = otexp.EfficientGlobalOptimization(problem, gpr.getResult())
algo.setMaximumCallsNumber(30)
algo.run()
result = algo.getResult()
# %%
result.getIterationNumber()
# %%
result.getOptimalPoint()
# %%
result.getOptimalValue()
# %%
fexact
# %%
# Compared to the minimum function value, we see that the EGO algorithm
# provides solution which is accurate.
# Indeed, the optimal point is in the neighbourhood of the exact solution,
# and this is quite an impressive success given the limited amount of function
# evaluations: only 20 evaluations for the initial DOE and 30 iterations of
# the EGO algorithm, for a total equal to 50 function evaluations.
# %%
graph = result.drawOptimalValueHistory()
optimum_curve = ot.Curve(ot.Sample([[0, fexact[0]], [29, fexact[0]]]))
graph.add(optimum_curve)
view = otv.View(graph)
# %%
inputHistory = result.getInputSample()
# %%
graph = model.draw(lowerbound, upperbound, [100] * dim)
graph.setTitle("Ackley function")
cloud = ot.Cloud(inputSample, "initial")
cloud.setPointStyle("bullet")
cloud.setColor("black")
graph.add(cloud)
cloud = ot.Cloud(inputHistory, "solution")
cloud.setPointStyle("diamond")
cloud.setColor("forestgreen")
graph.add(cloud)
view = otv.View(graph)
# %%
# We see that the initial (black) points are dispersed in the whole domain,
# while the solution points are much closer to the solution.
# %%
# Branin test-case
# ----------------
#
# We now take a look at the :ref:`Branin-Hoo<use-case-branin>` function.
# %%
# Define the problem
# ^^^^^^^^^^^^^^^^^^
# %%
# The Branin model is defined in the usecases module in a data class `BraninModel` :
bm = branin_function.BraninModel()
# %%
# We load the dimension,
dim = bm.dim
# %%
# the domain boundaries,
lowerbound = bm.lowerbound
upperbound = bm.upperbound
# %%
# and we load the model function and its noise:
objectiveFunction = bm.model
noise = bm.trueNoiseFunction
# %%
# We build a sample out of the three minima :
xexact = ot.Sample([bm.xexact1, bm.xexact2, bm.xexact3])
# %%
# The minimum value attained `fexact` is :
fexact = objectiveFunction(xexact)
fexact
# %%
graph = objectiveFunction.draw(lowerbound, upperbound, [100] * dim)
graph.setTitle("Branin function")
view = otv.View(graph)
# %%
# The Branin function has three local minimas.
# %%
# Create the initial GP
# ^^^^^^^^^^^^^^^^^^^^^
# %%
distribution = ot.JointDistribution([ot.Uniform(0.0, 1.0)] * dim)
sampleSize = 10 * dim
experiment = ot.LHSExperiment(distribution, sampleSize)
inputSample = experiment.generate()
outputSample = objectiveFunction(inputSample)
# %%
graph = ot.Graph(
"Initial LHS design of experiment - n=%d" % (sampleSize), "$x_0$", "$x_1$", True
)
cloud = ot.Cloud(inputSample)
graph.add(cloud)
view = otv.View(graph)
# %%
# Configure the covariance model with the noise
covarianceModel = ot.MaternModel([1.0] * dim, [0.5], 2.5)
covarianceModel.setNuggetFactor(noise)
# %%
# Build the initial GP
basis = ot.ConstantBasisFactory(dim).build()
fitter = otexp.GaussianProcessFitter(inputSample, outputSample, covarianceModel, basis)
fitter.run()
gpr = otexp.GaussianProcessRegression(fitter.getResult())
gpr.run()
# %%
# Create and solve the problem
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# %%
# We define the problem :
problem = ot.OptimizationProblem()
problem.setObjective(objectiveFunction)
bounds = ot.Interval(lowerbound, upperbound)
problem.setBounds(bounds)
# %%
# We configure the algorithm
# the nugget factor set in the covariance model with enable the AEI formulation
algo = otexp.EfficientGlobalOptimization(problem, gpr.getResult())
algo.setMaximumCallsNumber(30)
# %%
# We run the algorithm and get the result:
algo.run()
result = algo.getResult()
# %%
result.getIterationNumber()
# %%
result.getOptimalPoint()
# %%
result.getOptimalValue()
# %%
fexact
# %%
inputHistory = result.getInputSample()
# %%
graph = objectiveFunction.draw(lowerbound, upperbound, [100] * dim)
graph.setTitle("Branin function")
cloud = ot.Cloud(inputSample, "initial")
cloud.setPointStyle("bullet")
cloud.setColor("black")
graph.add(cloud)
cloud = ot.Cloud(inputHistory, "solution")
cloud.setPointStyle("diamond")
cloud.setColor("forestgreen")
graph.add(cloud)
view = otv.View(graph)
# %%
# We see that the EGO algorithm reached the different optima locations.
# %%
graph = result.drawOptimalValueHistory()
optimum_curve = ot.Curve(ot.Sample([[0, fexact[0][0]], [29, fexact[0][0]]]))
graph.add(optimum_curve)
view = otv.View(graph, axes_kw={"xticks": range(0, result.getIterationNumber(), 5)})
# %%
otv.View.ShowAll()
|