File: test_deadline.py

package info (click to toggle)
python-hypothesis 6.138.0-1
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 15,272 kB
  • sloc: python: 62,853; ruby: 1,107; sh: 253; makefile: 41; javascript: 6
file content (173 lines) | stat: -rw-r--r-- 4,637 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
# This file is part of Hypothesis, which may be found at
# https://github.com/HypothesisWorks/hypothesis/
#
# Copyright the Hypothesis Authors.
# Individual contributors are listed in AUTHORS.rst and the git log.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.

import gc
import re
import time

import pytest

from hypothesis import given, settings, strategies as st
from hypothesis.errors import DeadlineExceeded, FlakyFailure, InvalidArgument

from tests.common.utils import assert_falsifying_output, fails_with

pytestmark = pytest.mark.skipif(
    settings._current_profile == "threading",
    reason="takes a long time because we don't monkeypatch time",
)


def test_raises_deadline_on_slow_test():
    @settings(deadline=500)
    @given(st.integers())
    def slow(i):
        time.sleep(1)

    with pytest.raises(DeadlineExceeded):
        slow()


@pytest.mark.skipif(
    settings.get_profile(settings._current_profile).deadline is None,
    reason="not expected to fail if deadline is disabled",
)
@fails_with(DeadlineExceeded)
@given(st.integers())
def test_slow_tests_are_errors_by_default(i):
    time.sleep(1)


def test_non_numeric_deadline_is_an_error():
    with pytest.raises(InvalidArgument):
        settings(deadline="3 seconds")


@given(st.integers())
@settings(deadline=None)
def test_slow_with_none_deadline(i):
    time.sleep(1)


def test_raises_flaky_if_a_test_becomes_fast_on_rerun():
    once = True

    @settings(deadline=500, backend="hypothesis")
    @given(st.integers())
    def test_flaky_slow(i):
        nonlocal once
        if once:
            once = False
            time.sleep(1)

    with pytest.raises(FlakyFailure):
        test_flaky_slow()


def test_deadlines_participate_in_shrinking():
    @settings(deadline=500, max_examples=1000, database=None)
    @given(st.integers(min_value=0))
    def slow_if_large(i):
        if i >= 1000:
            time.sleep(1)

    assert_falsifying_output(
        slow_if_large,
        expected_exception=DeadlineExceeded,
        i=1000,
    )


def test_keeps_you_well_above_the_deadline():
    seen = set()
    failed_once = False

    @settings(deadline=100, backend="hypothesis")
    @given(st.integers(0, 2000))
    def slow(i):
        nonlocal failed_once
        # Make sure our initial failure isn't something that immediately goes flaky.
        if not failed_once:
            if i * 0.9 <= 100:
                return
            else:
                failed_once = True

        t = i / 1000
        if i in seen:
            time.sleep(0.9 * t)
        else:
            seen.add(i)
            time.sleep(t)

    with pytest.raises(DeadlineExceeded):
        slow()


def test_gives_a_deadline_specific_flaky_error_message():
    once = True

    @settings(deadline=100, backend="hypothesis")
    @given(st.integers())
    def slow_once(i):
        nonlocal once
        if once:
            once = False
            time.sleep(0.2)

    with pytest.raises(FlakyFailure) as err:
        slow_once()
    assert "Unreliable test timing" in "\n".join(err.value.__notes__)
    # this used to be "took 2", but we saw that flake (on pypy, though unsure if
    # that means anything) with "took 199.59ms". It's possible our gc accounting
    # is incorrect, or we could just be running into rare non-guarantees of
    # time.sleep.
    assert re.search(r"took \d", "\n".join(err.value.__notes__))


@pytest.mark.parametrize("slow_strategy", [False, True])
@pytest.mark.parametrize("slow_test", [False, True])
def test_should_only_fail_a_deadline_if_the_test_is_slow(slow_strategy, slow_test):
    s = st.integers()
    if slow_strategy:
        s = s.map(lambda x: time.sleep(0.08))

    @settings(deadline=50)
    @given(st.data())
    def test(data):
        data.draw(s)
        if slow_test:
            time.sleep(0.1)

    if slow_test:
        with pytest.raises(DeadlineExceeded):
            test()
    else:
        test()


@pytest.mark.skipif(not hasattr(gc, "callbacks"), reason="CPython specific gc delay")
def test_should_not_fail_deadline_due_to_gc():
    @settings(max_examples=1, deadline=50)
    @given(st.integers())
    def test(i):
        before = time.perf_counter()
        gc.collect()
        assert time.perf_counter() - before >= 0.1  # verify that we're slow

    def delay(phase, _info):
        if phase == "start":
            time.sleep(0.1)

    try:
        gc.callbacks.append(delay)
        test()
    finally:
        gc.callbacks.remove(delay)