File: t_Distribution_roughness.py

package info (click to toggle)
openturns 1.24-4
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 66,204 kB
  • sloc: cpp: 256,662; python: 63,381; ansic: 4,414; javascript: 406; sh: 180; xml: 164; yacc: 123; makefile: 98; lex: 55
file content (102 lines) | stat: -rwxr-xr-x 3,260 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
#! /usr/bin/env python
import openturns as ot
import math as m
import openturns.testing as ott

ot.PlatformInfo.SetNumericalPrecision(5)
ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)


def compute_roughness_sampling(distribution, size=500000):
    """
    Sampling method for computing Roughness
    This allows comparing sampling & integrating methods
    """
    dimension = distribution.getDimension()
    ot.JointDistribution([ot.Uniform(0, 1) for i in range(dimension)])
    sequence = ot.SobolSequence(dimension)
    experiment = ot.LowDiscrepancyExperiment(sequence, distribution, size, False)
    sample = experiment.generate()
    pdf = distribution.computePDF(sample)
    return pdf.computeMean()[0]


class Quartic(ot.PythonDistribution):
    def __init__(self):
        super(Quartic, self).__init__(1)
        self.c = 15.0 / 16

    def computeCDF(self, x):
        u = x[0]
        if u <= -1:
            p = 0.0
        elif u >= 1:
            p = 1.0
        else:
            p = 0.5 + 15.0 / 16.0 * u - 5.0 / 8.0 * pow(u, 3) + 3.0 / 16.0 * pow(u, 5)
        return p

    def computePDF(self, x):
        u = x[0]
        if u < -1 or u > 1:
            y = 0.0
        else:
            y = self.c * (1 - u**2) ** 2
        return y

    def getRange(self):
        return ot.Interval(-1.0, 1.0)


# Using some reference values
# See https://en.wikipedia.org/wiki/Kernel_(statistics)#Kernel_functions_in_common_use
# First Normal dist with default ctor
distribution = ot.Normal()
ott.assert_almost_equal(distribution.getRoughness(), 0.5 / m.sqrt(m.pi))

# Dimension 2 (Fix https://github.com/openturns/openturns/issues/1485)
# Indep copula : product of integrales
distribution = ot.Normal(2)
ott.assert_almost_equal(
    distribution.getRoughness(), compute_roughness_sampling(distribution)
)

# 2D Normal with scale & correlation
# This allows checking that Normal::getRoughness is well implemented
corr = ot.CorrelationMatrix(2)
corr[1, 0] = 0.3
distribution = ot.Normal([0, 0], [1, 2], corr)
ott.assert_almost_equal(
    distribution.getRoughness(), compute_roughness_sampling(distribution)
)

distribution = ot.Epanechnikov()
ott.assert_almost_equal(distribution.getRoughness(), 3 / 5)

distribution = ot.Triangular()
ott.assert_almost_equal(distribution.getRoughness(), 2 / 3)

distribution = ot.Distribution(Quartic())
ott.assert_almost_equal(distribution.getRoughness(), 5 / 7)

# Testing Histogram ==> getSingularities
distribution = ot.HistogramFactory().buildAsHistogram(
    ot.Uniform(0, 1).getSample(100000)
)
ott.assert_almost_equal(distribution.getRoughness(), 1.0, 5e-4, 1e-5)
# Compute the roughness using width and height
width = distribution.getWidth()
height = distribution.getHeight()
roughness = sum([width[i] * height[i] ** 2 for i in range(len(height))])
ott.assert_almost_equal(distribution.getRoughness(), roughness)

# Large dimension with independent copula
# With small rho value, we should have results similar to
# independent copula. But here we use the sampling method
# This allows the validation of this sampling method
corr = ot.CorrelationMatrix(5)
corr[1, 0] = 0.001
distribution = ot.Normal([0] * 5, [1] * 5, corr)

ott.assert_almost_equal(distribution.getRoughness(), pow(0.5 / m.sqrt(m.pi), 5))