File: plot_optimization_dlib.py

package info (click to toggle)
openturns 1.26-4
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 67,708 kB
  • sloc: cpp: 261,605; python: 67,030; ansic: 4,378; javascript: 406; sh: 185; xml: 164; makefile: 101
file content (188 lines) | stat: -rw-r--r-- 6,317 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
"""
Optimization using dlib
=======================
"""

# %%
# In this example we are going to explore optimization using the interface to the `dlib <http://dlib.net/>`_ library.

# %%
import openturns as ot
import openturns.viewer as otv


# %%
# List available algorithms
for algo in ot.Dlib.GetAlgorithmNames():
    print(algo)

# %%
# More details on dlib algorithms are available `here <http://dlib.net/optimization.html>`_ .

# %%
# Solving an unconstrained problem with conjugate gradient algorithm
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# The following example will demonstrate the use of dlib conjugate gradient algorithm to find the minimum of `Rosenbrock function <https://en.wikipedia.org/wiki/Rosenbrock_function>`_.
# The optimal point can be computed analytically, and its value is [1.0, 1.0].

# %%
# Define the problem based on Rosebrock function
rosenbrock = ot.SymbolicFunction(["x1", "x2"], ["(1-x1)^2+(x2-x1^2)^2"])
problem = ot.OptimizationProblem(rosenbrock)

# %%
# The optimization algorithm is instantiated from the problem to solve and the name of the algorithm
algo = ot.Dlib(problem, "cg")
print("Dlib algorithm, type ", algo.getAlgorithmName())
print("Maximum iteration number: ", algo.getMaximumIterationNumber())
print("Maximum evaluation number: ", algo.getMaximumCallsNumber())
print("Maximum absolute error: ", algo.getMaximumAbsoluteError())
print("Maximum relative error: ", algo.getMaximumRelativeError())
print("Maximum residual error: ", algo.getMaximumResidualError())
print("Maximum constraint error: ", algo.getMaximumConstraintError())

# %%
# When using conjugate gradient, BFGS/LBFGS, Newton, least squares or trust region methods, optimization proceeds until one of the following criteria is met:
#
# - the errors (absolute, relative, residual, constraint) are all below the limits set by the user ;
# - the process reaches the maximum number of iterations or function evaluations.

# %%
# Adjust number of iterations/evaluations
algo.setMaximumIterationNumber(1000)
algo.setMaximumCallsNumber(10000)
algo.setMaximumAbsoluteError(1e-3)
algo.setMaximumRelativeError(1e-3)
algo.setMaximumResidualError(1e-3)

# %%
# Solve the problem
startingPoint = [1.5, 0.5]
algo.setStartingPoint(startingPoint)

algo.run()

# %%
# Retrieve results
result = algo.getResult()
print("x^ = ", result.getOptimalPoint())
print("f(x^) = ", result.getOptimalValue())
print("Iteration number: ", result.getIterationNumber())
print("Evaluation number: ", result.getCallsNumber())
print("Absolute error: ", result.getAbsoluteError())
print("Relative error: ", result.getRelativeError())
print("Residual error: ", result.getResidualError())
print("Constraint error: ", result.getConstraintError())

# %%
# Solving problem with bounds, using LBFGS strategy
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# In the following example, the input variables will be bounded so the function global optimal point is not included in the search interval.
#
# The problem will be solved using LBFGS strategy, which allows the user to limit the amount of memory used by the optimization process.

# %%
# Define the bounds and the problem
bounds = ot.Interval([0.0, 0.0], [0.8, 2.0])
boundedProblem = ot.OptimizationProblem(
    rosenbrock, ot.Function(), ot.Function(), bounds
)

# %%
# Define the Dlib algorithm
boundedAlgo = ot.Dlib(boundedProblem, "lbfgs")
boundedAlgo.setMaxSize(15)  # Default value for LBFGS' maxSize parameter is 10

startingPoint = [0.5, 1.5]
boundedAlgo.setStartingPoint(startingPoint)

boundedAlgo.run()

# %%
# Retrieve results
result = boundedAlgo.getResult()
print("x^ = ", result.getOptimalPoint())
print("f(x^) = ", result.getOptimalValue())
print("Iteration number: ", result.getIterationNumber())
print("Evaluation number: ", result.getCallsNumber())
print("Absolute error: ", result.getAbsoluteError())
print("Relative error: ", result.getRelativeError())
print("Residual error: ", result.getResidualError())
print("Constraint error: ", result.getConstraintError())

# %%
# **Remark:**
# The bounds defined for input variables are always strictly respected when using dlib algorithms. Consequently, the constraint error is always 0.

# %%
# Draw optimal value history
graph = result.drawOptimalValueHistory()
view = otv.View(graph)

# %%
# Solving least squares problem
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# In least squares problem, the user provides the residual function to minimize.
# Here the underlying OptimizationProblem is defined as a LeastSquaresProblem.
#
# dlib least squares algorithms use the same stop criteria as CG, BFGS/LBFGS and Newton algorithms.
# However, optimization will stop earlier if no significant improvement can be achieved during the process.

# %%
# Define residual function
n = 3
m = 20
x = [[0.5 + 0.1 * i] for i in range(m)]

model = ot.SymbolicFunction(["a", "b", "c", "x"], ["a + b * exp(-c *x^2)"])
p_ref = [2.8, 1.2, 0.5]  # Reference a, b, c
modelx = ot.ParametricFunction(model, [0, 1, 2], p_ref)

# Generate reference sample (with normal noise)
x00 = modelx(x[0])[0]
y = [x00 * ot.Normal(1.0, 0.05).getRealization()[0] for i in range(m)]


def residualFunction(params):
    modelx = ot.ParametricFunction(model, [0, 1, 2], params)
    return [modelx(x[i])[0] - y[i] for i in range(m)]


# %%
# Definition of residual as ot.PythonFunction and optimization problem
residual = ot.PythonFunction(n, m, residualFunction)
lsqProblem = ot.LeastSquaresProblem(residual)

# %%
# Definition of Dlib solver, setting starting point
lsqAlgo = ot.Dlib(lsqProblem, "least_squares")
lsqAlgo.setStartingPoint([0.0, 0.0, 0.0])

lsqAlgo.run()

# %%
# Retrieve results
result = lsqAlgo.getResult()
print("x^ = ", result.getOptimalPoint())
print("f(x^) = ", result.getOptimalValue())
print("Iteration number: ", result.getIterationNumber())
print("Evaluation number: ", result.getCallsNumber())
print("Absolute error: ", result.getAbsoluteError())
print("Relative error: ", result.getRelativeError())
print("Residual error: ", result.getResidualError())
print("Constraint error: ", result.getConstraintError())

# %%
# Draw errors history
graph = result.drawErrorHistory()
view = otv.View(graph)

# %%
# Draw optimal value history
graph = result.drawOptimalValueHistory()
view = otv.View(graph)

otv.View.ShowAll()