File: conftest.py

package info (click to toggle)
python-noseofyeti 2.4.9-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 360 kB
  • sloc: python: 2,581; sh: 31; makefile: 12
file content (157 lines) | stat: -rw-r--r-- 4,616 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
import os
import re
import shutil
import tempfile
from contextlib import contextmanager
from io import StringIO
from textwrap import dedent
from tokenize import untokenize

import pytest

from noseOfYeti.tokeniser.tokeniser import WITH_IT_RETURN_TYPE_ENV_NAME


@pytest.fixture(autouse=True)
def remove_with_it_return_type_env(monkeypatch):
    monkeypatch.delenv(WITH_IT_RETURN_TYPE_ENV_NAME, raising=False)


@pytest.fixture()
def a_temp_file():
    @contextmanager
    def a_temp_file(contents):
        tempfle = None
        try:
            tempfle = tempfile.NamedTemporaryFile(delete=False)
            with open(tempfle.name, "w") as fle:
                fle.write(dedent(contents))

            yield tempfle.name
        finally:
            if tempfle:
                if os.path.exists(tempfle.name):
                    os.remove(tempfle.name)

    return a_temp_file


@pytest.fixture()
def a_temp_dir():
    @contextmanager
    def a_temp_dir():
        tempdir = None
        try:
            tempdir = tempfile.mkdtemp()
            yield tempdir
        finally:
            if tempdir:
                if os.path.exists(tempdir):
                    shutil.rmtree(tempdir)

    return a_temp_dir


def pytest_configure():
    @pytest.helpers.register
    def assert_regex_lines(got_lines, want_lines, lstrip=True, rstrip=False):
        __tracebackhide__ = True

        got_lines = dedent(got_lines)
        want_lines = dedent(want_lines)

        if lstrip:
            got_lines = got_lines.lstrip()
            want_lines = want_lines.lstrip()

        if lstrip:
            got_lines = got_lines.rstrip()
            want_lines = want_lines.rstrip()

        print("GOT LINES\n=========")
        print(got_lines)

        print("\n\n\nWANT LINES\n==========")
        print(want_lines)

        for i, (wl, gl) in enumerate(zip(want_lines.split("\n"), got_lines.split("\n"))):
            try:
                m = re.match(wl, gl)
            except re.error as error:
                pytest.fail(f"Failed to turn line into a regex\nCONVERTING: {wl}\nERROR: {error}")

            if not m:
                pytest.fail(f"line {i} does not match. Wanted:\n{wl}\n\nGot:\n{gl}")

    @pytest.helpers.register
    def assert_lines(got_lines, want_lines, lstrip=True, rstrip=False, rstrip_got_lines=True):
        __tracebackhide__ = True

        got_lines = dedent(got_lines)
        want_lines = dedent(want_lines)

        if lstrip:
            got_lines = got_lines.lstrip()
            want_lines = want_lines.lstrip()

        if lstrip:
            got_lines = got_lines.rstrip()
            want_lines = want_lines.rstrip()

        print("GOT LINES\n=========")
        print(got_lines)

        print("\n\n\nWANT LINES\n==========")
        print(want_lines)

        for i, (wl, gl) in enumerate(zip(want_lines.split("\n"), got_lines.split("\n"))):
            if rstrip_got_lines:
                gl = gl.rstrip()
            if wl != gl:
                pytest.fail(f"line {i} does not match. Wanted:\n{wl}\n\nGot:\n{gl}")

    @pytest.helpers.register
    def assert_conversion(
        original, want_lines, *, tokeniser=None, regex=False, lstrip=True, rstrip=False
    ):
        __tracebackhide__ = True

        from noseOfYeti.tokeniser import Tokeniser

        original = dedent(original)
        if lstrip:
            original = original.lstrip()
        if rstrip:
            original = original.rstrip()

        if tokeniser is None:
            tokeniser = {}

        for give_return_types, ret in ((False, ""), (True, "->None ")):
            orig = original.replace("$RET", ret)
            want = want_lines.replace("$RET", ret)

            if give_return_types:
                kwargs = {**tokeniser, "with_it_return_type": True}
            else:
                kwargs = {**tokeniser, "with_it_return_type": False}

            s = StringIO(orig)
            tok = Tokeniser(**kwargs)
            got_lines = untokenize(tok.translate(s.readline))

            if regex:
                pytest.helpers.assert_regex_lines(got_lines, want, lstrip=lstrip, rstrip=rstrip)
            else:
                pytest.helpers.assert_lines(got_lines, want, lstrip=lstrip, rstrip=rstrip)

    @pytest.helpers.register
    def assert_example(example, convert_to_tabs=False, **kwargs):
        __tracebackhide__ = True

        original, desired = example
        if convert_to_tabs:
            original = original.replace("    ", "\t")
            desired = desired.replace("    ", "\t")

        pytest.helpers.assert_conversion(original, desired, tokeniser=kwargs)