File: test_base.py

package info (click to toggle)
scikit-learn 1.7.2%2Bdfsg-3
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 25,752 kB
  • sloc: python: 219,120; cpp: 5,790; ansic: 846; makefile: 191; javascript: 110
file content (52 lines) | stat: -rw-r--r-- 1,566 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import numpy as np
import pytest

from sklearn._loss import HalfPoissonLoss
from sklearn.neural_network._base import binary_log_loss, log_loss, poisson_loss


def test_binary_log_loss_1_prob_finite():
    # y_proba is equal to one should result in a finite logloss
    y_true = np.array([[0, 0, 1]]).T
    y_prob = np.array([[0.9, 1.0, 1.0]]).T

    loss = binary_log_loss(y_true, y_prob)
    assert np.isfinite(loss)


@pytest.mark.parametrize(
    "y_true, y_prob",
    [
        (
            np.array([[1, 0, 0], [0, 1, 0]]),
            np.array([[0.0, 1.0, 0.0], [0.9, 0.05, 0.05]]),
        ),
        (np.array([[0, 0, 1]]).T, np.array([[0.9, 1.0, 1.0]]).T),
    ],
)
def test_log_loss_1_prob_finite(y_true, y_prob):
    # y_proba is equal to 1 should result in a finite logloss
    loss = log_loss(y_true, y_prob)
    assert np.isfinite(loss)


def test_poisson_loss(global_random_seed):
    """Test Poisson loss against well tested HalfPoissonLoss."""
    n = 1000
    rng = np.random.default_rng(global_random_seed)
    y_true = rng.integers(low=0, high=10, size=n).astype(float)
    y_raw = rng.standard_normal(n)
    y_pred = np.exp(y_raw)
    sw = rng.uniform(low=0.1, high=10, size=n)

    assert 0 in y_true

    loss = poisson_loss(y_true=y_true, y_pred=y_pred, sample_weight=sw)
    pl = HalfPoissonLoss()
    loss_ref = (
        pl(y_true=y_true, raw_prediction=y_raw, sample_weight=sw)
        + pl.constant_to_optimal_zero(y_true=y_true, sample_weight=sw).mean()
        / sw.mean()
    )

    assert loss == pytest.approx(loss_ref, rel=1e-12)