File: plot_smolyak_merge.py

package info (click to toggle)
openturns 1.24-4
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 66,204 kB
  • sloc: cpp: 256,662; python: 63,381; ansic: 4,414; javascript: 406; sh: 180; xml: 164; yacc: 123; makefile: 98; lex: 55
file content (197 lines) | stat: -rw-r--r-- 7,472 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
"""
Merge nodes in Smolyak quadrature
=================================
"""

# %%
# The goal of this example is to see the effect of the merge algorithm in
# Smolyak's quadrature implemented in :class:`openturns.SmolyakExperiment`.
# We analyse the sensitivity of the number of nodes to the relative
# and absolute tolerances.
# Then we analyse the effect of the merge algorithm on the number of nodes.

# %%
import numpy as np
import openturns as ot
import openturns.viewer as otv
import matplotlib.pyplot as plt

# %%
# The following examples shows how to get the relative and absolute tolerances.
# These tolerances are used in the algorithm in order to identify potentially
# duplicated nodes, taking into account for rounding errors.

epsilon_r = ot.ResourceMap.GetAsScalar("SmolyakExperiment-MergeRelativeEpsilon")
epsilon_a = ot.ResourceMap.GetAsScalar("SmolyakExperiment-MergeAbsoluteEpsilon")
print("Default epsilon_r = ", epsilon_r)
print("Default epsilon_a = ", epsilon_a)

# %%
# The following examples shows how to set the relative and absolute tolerances.
epsilon_r = 1.0e-14
epsilon_a = 1.0e-14
ot.ResourceMap.SetAsScalar("SmolyakExperiment-MergeRelativeEpsilon", epsilon_r)
ot.ResourceMap.SetAsScalar("SmolyakExperiment-MergeAbsoluteEpsilon", epsilon_a)


# %%
# We are interested in the sensitivity of the number of nodes to the tolerances
# of the merging algorithm.
# The next function takes the level and tolerances as input arguments,
# and returns the size of Smolyak's quadrature using Gauss-Legendre nodes.


def computeNumberOfSmolyakNodes(level, epsilon_a, epsilon_r):
    ot.ResourceMap.SetAsScalar("SmolyakExperiment-MergeRelativeEpsilon", epsilon_r)
    ot.ResourceMap.SetAsScalar("SmolyakExperiment-MergeAbsoluteEpsilon", epsilon_a)
    uniform = ot.GaussProductExperiment(ot.Uniform(0.0, 1.0))
    collection = [uniform] * 2
    experiment = ot.SmolyakExperiment(collection, level)
    nodes, weights = experiment.generateWithWeights()
    size = nodes.getSize()
    return size


# %%
# In the following experiments, we use
# :math:`\epsilon = \epsilon_a = \epsilon_r` i.e.
# the relative and absolute tolerances are set to be equal.


# %%
# In general, we want the tolerance to be as small as possible.
# This is because we want the merge algorithm to detect candidate nodes
# which truly overlap.
# Indeed, our goal is to make the algorithm be insensitive to rounding errors,
# but we do not want to merge two nodes which are significantly different.
# We first want to see what is the effect when the tolerance is set to
# a too small value.

level = 9
epsilon = 0.0
size = computeNumberOfSmolyakNodes(level, epsilon, epsilon)
print("epsilon = %.2e, level = %d, size = %d" % (epsilon, level, size))
epsilon = 1.0e-20
size = computeNumberOfSmolyakNodes(level, epsilon, epsilon)
print("epsilon = %.2e, level = %d, size = %d" % (epsilon, level, size))


# %%
# Hence, 283 nodes are produced with a tolerance set to zero,
# or to a very small value.

epsilon = 1.0e-8
size = computeNumberOfSmolyakNodes(level, epsilon, epsilon)
print("epsilon = %.2e, level = %d, size = %d" % (epsilon, level, size))

# %%
# If the tolerance is set to a larger value, then 2 nodes are detected
# as duplicate, so that the total number of nodes is equal to 281.

# We conclude that the tolerance must not be set to a too
# small value.
#
# In the next `for` loops, for each value of the tolerance from :math:`10^{-1}`
# down to :math:`10^{-20}`, we increase the `level` and see how this changes
# the number of nodes.

graph = ot.Graph("Sensitivity to tolerance", "Level", "Size", True)
point_styles = ["circle", "fdiamond", "fsquare", "ftriangleup", "triangledown"]
number_of_epsilons = 5
epsilon_array = np.logspace(-1, -20, number_of_epsilons)
index = 0
for epsilon in epsilon_array:
    size_list = []
    level_list = list(range(1, 10))
    for level in level_list:
        size = computeNumberOfSmolyakNodes(level, epsilon, epsilon)
        size_list.append(size)
    cloud = ot.Cloud(level_list, size_list)
    cloud.setLegend("epsilon = %.2e" % (epsilon))
    cloud.setPointStyle(point_styles[index])
    graph.add(cloud)
    index += 1
graph.setIntegerXTick(True)
graph.setLegendPosition("upper left")
graph.setLegendCorner([1.0, 1.0])
view = otv.View(graph, figure_kw={"figsize": (6.0, 3.0)})

# %%
# We see that changing the tolerance from :math:`10^{-6}` down
# to :math:`10^{-20}` does not change much the size of Smolyak's
# quadrature.
# Using :math:`\epsilon = 10^{-1}` reduces the number of nodes by a too large
# amount.
# This is because a relatively large tolerance considers that many candidate
# nodes are close to each other, so that these nodes are merged.
# In this case, the quadrature is not accurate enough.

# %%
# In the next `for` loop, for each value of the `level` parameter,
# we decrease the tolerance from :math:`10^{-1}`
# down to :math:`10^{-5}` and see how this changes the number of nodes.

graph = ot.Graph("Sensitivity to tolerance", "Epsilon", "Size", True)
point_styles = ["circle", "fdiamond", "fsquare", "ftriangleup", "triangledown"]
number_of_epsilons = 12
level_list = list(range(1, 10, 2))
index = 0
for level in level_list:
    size_list = []
    epsilon_array = np.logspace(0, -5, number_of_epsilons)
    for epsilon in epsilon_array:
        size = computeNumberOfSmolyakNodes(level, epsilon, epsilon)
        size_list.append(size)
    cloud = ot.Cloud(epsilon_array, size_list)
    cloud.setLegend("Level = %d" % (level))
    cloud.setPointStyle(point_styles[index])
    graph.add(cloud)
    index += 1
graph.setLegendPosition("upper left")
graph.setLogScale(ot.GraphImplementation.LOGXY)
graph.setLegendCorner([1.0, 1.0])
view = otv.View(graph, figure_kw={"figsize": (4.0, 3.0)})

# %%
# We see that changing the tolerance from :math:`1` down to :math:`10^{-3}`
# changes the number of nodes.
# Below this threshold, the number of nodes does not change much.

# %%
# The `SmolyakExperiment-MergeQuadrature` key of the `ResourceMap` allows
# to disable or enable the merge algorithm.
# In order to see its effect, the following script performs a loop over
# the levels and compute the size without and with the merge algorithm.
# Then we plot the number of nodes which have been detected as duplicated
# and plot it versus the level.

# Restore to default values
ot.ResourceMap.SetAsScalar("SmolyakExperiment-MergeRelativeEpsilon", 1.0e-8)
ot.ResourceMap.SetAsScalar("SmolyakExperiment-MergeAbsoluteEpsilon", 1.0e-8)
level_list = list(range(1, 10))
size_decrease_list = []
for level in level_list:
    ot.ResourceMap.SetAsBool("SmolyakExperiment-MergeQuadrature", False)
    sizeNoMerge = computeNumberOfSmolyakNodes(level, epsilon, epsilon)
    ot.ResourceMap.SetAsBool("SmolyakExperiment-MergeQuadrature", True)
    sizeWithMerge = computeNumberOfSmolyakNodes(level, epsilon, epsilon)
    sizeDecrease = sizeNoMerge - sizeWithMerge
    size_decrease_list.append(sizeDecrease)

graph = ot.Graph("Sensitivity to merge", "Level", "Size decrease", True)
cloud = ot.Cloud(level_list, size_decrease_list)
graph.add(cloud)
graph.setLegendCorner([1.0, 1.0])
graph.setLegendPosition("upper left")
view = otv.View(graph, figure_kw={"figsize": (4.0, 3.0)})
plt.tight_layout()

# %%
# We see that the number of nodes is reduced when the merge algorithm is
# enabled.
# Moreover, the number of nodes which are duplicated increases when the
# level increases.

# %%
# Reset default settings
ot.ResourceMap.Reload()