File: bench_glm.py

package info (click to toggle)
scikit-learn 0.18-5
  • links: PTS, VCS
  • area: main
  • in suites: stretch
  • size: 71,040 kB
  • ctags: 91,142
  • sloc: python: 97,257; ansic: 8,360; cpp: 5,649; makefile: 242; sh: 238
file content (58 lines) | stat: -rw-r--r-- 1,515 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
"""
A comparison of different methods in GLM

Data comes from a random square matrix.

"""
from datetime import datetime
import numpy as np
from sklearn import linear_model
from sklearn.utils.bench import total_seconds


if __name__ == '__main__':

    import matplotlib.pyplot as plt

    n_iter = 40

    time_ridge = np.empty(n_iter)
    time_ols = np.empty(n_iter)
    time_lasso = np.empty(n_iter)

    dimensions = 500 * np.arange(1, n_iter + 1)

    for i in range(n_iter):

        print('Iteration %s of %s' % (i, n_iter))

        n_samples, n_features = 10 * i + 3, 10 * i + 3

        X = np.random.randn(n_samples, n_features)
        Y = np.random.randn(n_samples)

        start = datetime.now()
        ridge = linear_model.Ridge(alpha=1.)
        ridge.fit(X, Y)
        time_ridge[i] = total_seconds(datetime.now() - start)

        start = datetime.now()
        ols = linear_model.LinearRegression()
        ols.fit(X, Y)
        time_ols[i] = total_seconds(datetime.now() - start)

        start = datetime.now()
        lasso = linear_model.LassoLars()
        lasso.fit(X, Y)
        time_lasso[i] = total_seconds(datetime.now() - start)

    plt.figure('scikit-learn GLM benchmark results')
    plt.xlabel('Dimensions')
    plt.ylabel('Time (s)')
    plt.plot(dimensions, time_ridge, color='r')
    plt.plot(dimensions, time_ols, color='g')
    plt.plot(dimensions, time_lasso, color='b')

    plt.legend(['Ridge', 'OLS', 'LassoLars'], loc='upper left')
    plt.axis('tight')
    plt.show()