File: test_validation.py

package info (click to toggle)
python-thinc 8.1.7-1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 5,804 kB
  • sloc: python: 15,818; javascript: 1,554; ansic: 342; makefile: 20; sh: 13
file content (35 lines) | stat: -rw-r--r-- 1,369 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
import pytest
from thinc.api import chain, Relu, reduce_max, Softmax, with_ragged
from thinc.api import ParametricAttention, list2ragged, reduce_sum
from thinc.util import DataValidationError, data_validation


def test_validation():
    model = chain(Relu(10), Relu(10), with_ragged(reduce_max()), Softmax())
    with data_validation(True):
        with pytest.raises(DataValidationError):
            model.initialize(X=model.ops.alloc2f(1, 10), Y=model.ops.alloc2f(1, 10))
        with pytest.raises(DataValidationError):
            model.initialize(X=model.ops.alloc3f(1, 10, 1), Y=model.ops.alloc2f(1, 10))
        with pytest.raises(DataValidationError):
            model.initialize(X=[model.ops.alloc2f(1, 10)], Y=model.ops.alloc2f(1, 10))


def test_validation_complex():
    good_model = chain(list2ragged(), reduce_sum(), Relu(12, dropout=0.5), Relu(1))
    X = [good_model.ops.xp.zeros((4, 75), dtype="f")]
    Y = good_model.ops.xp.zeros((1,), dtype="f")
    good_model.initialize(X, Y)
    good_model.predict(X)

    bad_model = chain(
        list2ragged(),
        reduce_sum(),
        Relu(12, dropout=0.5),
        # ERROR: Why can't I attach a Relu to an attention layer?
        ParametricAttention(12),
        Relu(1),
    )
    with data_validation(True):
        with pytest.raises(DataValidationError):
            bad_model.initialize(X, Y)