File: optimizer-with-different-base-estimator.py

package info (click to toggle)
scikit-optimize 0.10.2-5
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 7,684 kB
  • sloc: python: 10,659; javascript: 438; makefile: 136; sh: 6
file content (176 lines) | stat: -rw-r--r-- 4,642 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
"""
==============================================
Use different base estimators for optimization
==============================================

Sigurd Carlen, September 2019.
Reformatted by Holger Nahrstaedt 2020

.. currentmodule:: skopt


To use different base_estimator or create a regressor with different parameters,
we can create a regressor object and set it as kernel.

This example uses :class:`plots.plot_gaussian_process` which is available
since version 0.8.
"""

print(__doc__)

import numpy as np

np.random.seed(1234)
import matplotlib.pyplot as plt

from skopt import Optimizer
from skopt.plots import plot_gaussian_process

#############################################################################
# Toy example
# -----------
#
# Let assume the following noisy function :math:`f`:

noise_level = 0.1

# Our 1D toy problem, this is the function we are trying to
# minimize


def objective(x, noise_level=noise_level):
    return np.sin(5 * x[0]) * (1 - np.tanh(x[0] ** 2)) + np.random.randn() * noise_level


def objective_wo_noise(x):
    return objective(x, noise_level=0)


#############################################################################

opt_gp = Optimizer(
    [(-2.0, 2.0)],
    base_estimator="GP",
    n_initial_points=5,
    acq_optimizer="sampling",
    random_state=42,
)

#############################################################################


def plot_optimizer(res, n_iter, max_iters=5):
    if n_iter == 0:
        show_legend = True
    else:
        show_legend = False
    ax = plt.subplot(max_iters, 2, 2 * n_iter + 1)
    # Plot GP(x) + contours
    ax = plot_gaussian_process(
        res,
        ax=ax,
        objective=objective_wo_noise,
        noise_level=noise_level,
        show_legend=show_legend,
        show_title=True,
        show_next_point=False,
        show_acq_func=False,
    )
    ax.set_ylabel("")
    ax.set_xlabel("")
    if n_iter < max_iters - 1:
        ax.get_xaxis().set_ticklabels([])
    # Plot EI(x)
    ax = plt.subplot(max_iters, 2, 2 * n_iter + 2)
    ax = plot_gaussian_process(
        res,
        ax=ax,
        noise_level=noise_level,
        show_legend=show_legend,
        show_title=False,
        show_next_point=True,
        show_acq_func=True,
        show_observations=False,
        show_mu=False,
    )
    ax.set_ylabel("")
    ax.set_xlabel("")
    if n_iter < max_iters - 1:
        ax.get_xaxis().set_ticklabels([])


#############################################################################
# GP kernel
# ---------

fig = plt.figure()
fig.suptitle("Standard GP kernel")
for i in range(10):
    next_x = opt_gp.ask()
    f_val = objective(next_x)
    res = opt_gp.tell(next_x, f_val)
    if i >= 5:
        plot_optimizer(res, n_iter=i - 5, max_iters=5)
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.plot()

#############################################################################
# Test different kernels
# ----------------------

from sklearn.gaussian_process.kernels import (
    RBF,
    ConstantKernel,
    DotProduct,
    ExpSineSquared,
    Matern,
    RationalQuadratic,
)

from skopt.learning import GaussianProcessRegressor
from skopt.learning.gaussian_process.kernels import ConstantKernel, Matern

# Gaussian process with Matérn kernel as surrogate model


kernels = [
    (1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)), "RBF"),
    (1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1), "RationalQuadratic"),
    (1.0
    * ExpSineSquared(
        length_scale=1.0,
        periodicity=3.0,
        length_scale_bounds=(0.1, 10.0),
        periodicity_bounds=(1.0, 10.0),
    ), "ExpSineSquared"),
    # (ConstantKernel(0.1, (0.01, 10.0))
    # * (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.1, 10.0)) ** 2), "ConstantKernel"),
    (1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0), nu=2.5), "Matern"),
]
#############################################################################

for kernel, label in kernels:
    gpr = GaussianProcessRegressor(
        kernel=kernel,
        alpha=noise_level**2,
        normalize_y=True,
        noise="gaussian",
        n_restarts_optimizer=2,
    )
    opt = Optimizer(
        [(-2.0, 2.0)],
        base_estimator=gpr,
        n_initial_points=5,
        acq_optimizer="sampling",
        random_state=42,
    )
    fig = plt.figure()
    fig.suptitle(label)
    for i in range(10):
        next_x = opt.ask()
        f_val = objective(next_x)
        res = opt.tell(next_x, f_val)
        if i >= 5:
            plot_optimizer(res, n_iter=i - 5, max_iters=5)
    plt.tight_layout(rect=[0, 0.03, 1, 0.95])
    plt.show()