1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44
|
import pytest
from thinc.api import (
ParametricAttention,
Relu,
Softmax,
chain,
list2ragged,
reduce_max,
reduce_sum,
with_ragged,
)
from thinc.util import DataValidationError, data_validation
def test_validation():
model = chain(Relu(10), Relu(10), with_ragged(reduce_max()), Softmax())
with data_validation(True):
with pytest.raises(DataValidationError):
model.initialize(X=model.ops.alloc2f(1, 10), Y=model.ops.alloc2f(1, 10))
with pytest.raises(DataValidationError):
model.initialize(X=model.ops.alloc3f(1, 10, 1), Y=model.ops.alloc2f(1, 10))
with pytest.raises(DataValidationError):
model.initialize(X=[model.ops.alloc2f(1, 10)], Y=model.ops.alloc2f(1, 10))
def test_validation_complex():
good_model = chain(list2ragged(), reduce_sum(), Relu(12, dropout=0.5), Relu(1))
X = [good_model.ops.xp.zeros((4, 75), dtype="f")]
Y = good_model.ops.xp.zeros((1,), dtype="f")
good_model.initialize(X, Y)
good_model.predict(X)
bad_model = chain(
list2ragged(),
reduce_sum(),
Relu(12, dropout=0.5),
# ERROR: Why can't I attach a Relu to an attention layer?
ParametricAttention(12),
Relu(1),
)
with data_validation(True):
with pytest.raises(DataValidationError):
bad_model.initialize(X, Y)
|