File: test_unsupported_syntax.py

package info (click to toggle)
python-pegen 0.3.0-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 10,980 kB
  • sloc: python: 15,064; makefile: 89
file content (199 lines) | stat: -rw-r--r-- 6,914 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
"""Test identifying unsupported syntax construction in older Python versions.

Note that we can request the parser to apply stricter bounds on the parsing but
not broader since we would not be able to generate the proper ast nodes.

"""
import io
import tokenize

import pytest
from pegen.tokenizer import Tokenizer


# matrix mul 3.5
@pytest.mark.parametrize("source", ["a @ b", "a @= b"])
def test_mat_mult(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 4))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "The '@' operator is" in e.exconly()


# await 3.5
def test_await(python_parser_cls):
    temp = io.StringIO("await b")
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 4))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Await expressions are" in e.exconly()


# async 3.5
@pytest.mark.parametrize(
    "source, message",
    [
        ("async def f():\n    pass", "Async functions are"),
        ("async with a:\n    pass", "Async with statements are"),
        ("async for a in b:\n    pass", "Async for loops are"),
    ],
)
def test_async(python_parser_cls, source, message):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 4))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert message in e.exconly()


# async comprehension 3.6
def test_async_comprehension(python_parser_cls):
    temp = io.StringIO("""[a async for a in b if c]""")
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 5))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")
    assert "Async comprehensions are" in e.exconly()


# variable annotation 3.6
@pytest.mark.parametrize("source", ["a: int = 1", "(a): int "])
def test_variable_annotation(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 5))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Variable annotation syntax is" in e.exconly()


# pos only args 3.8
@pytest.mark.parametrize("source", ["def f(a,/):\n\tpass", "def f(a=1,/):\n\tpass"])
def test_pos_only_args(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 7))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Positional only arguments are" in e.exconly()


# assignment operator 3.8
@pytest.mark.parametrize("source", ["a := 1"])
def test_assignment_operator(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 7))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "The ':=' operator is" in e.exconly()


# generic decorators 3.9
@pytest.mark.parametrize("source", ["@f[1]\ndef f():\n\tpass"])
def test_generic_decorators(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 8))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Generic decorator are" in e.exconly()


# parenthesized with items 3.9
@pytest.mark.parametrize("source", ["with (a, b):\n\tpass"])
def test_parenthesized_with_items(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 8))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Parenthesized with items" in e.exconly()


# match 3.10
@pytest.mark.parametrize(
    "source", ["match a:\n\tcase 1:\n\t\tpass", "match a", "match a:\ncase b"]
)
def test_match_statement(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 9))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Pattern matching is" in e.exconly()


# try except * 3.11
@pytest.mark.parametrize("source", ["try:\n\ta = 1\nexcept *ValueError:\n\tpass"])
def test_exceptgroup_statement(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 10))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Exception groups are" in e.exconly()


# type alias and type vars 3.12
@pytest.mark.parametrize("source", ["type T = int", "type T[U] = tuple[U]"])
def test_type_params_statement(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 11))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Type statement is" in e.exconly() or "Type parameter lists are" in e.exconly()


# type alias and type vars 3.12
@pytest.mark.parametrize("source", ["def f[T]():\n\tpass", "async def f[T]():\n\tpass"])
def test_generic_function_statement(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 11))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Type parameter lists are" in e.exconly()


# generic classes 3.12
@pytest.mark.parametrize("source", ["class A[T]:\n\tpass"])
def test_generic_class_statement(python_parser_cls, source):
    temp = io.StringIO(source)
    tokengen = tokenize.generate_tokens(temp.readline)
    tokenizer = Tokenizer(tokengen, verbose=False)
    pp = python_parser_cls(tokenizer, py_version=(3, 11))
    with pytest.raises(SyntaxError) as e:
        pp.parse("file")

    assert "Type parameter lists are" in e.exconly()