1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
|
import io
import sys
from tokenize import NEWLINE, NUMBER, ENDMARKER, TokenInfo, generate_tokens
from pegen.tokenizer import Tokenizer
def test_peek_getnext():
source = io.StringIO("# test\n1")
t = Tokenizer(generate_tokens(source.readline))
assert t.peek() == TokenInfo(NUMBER, "1", (2, 0), (2, 1), "1")
assert t.getnext() == TokenInfo(NUMBER, "1", (2, 0), (2, 1), "1")
assert t.peek() == TokenInfo(
NEWLINE, "", (2, 1), (2, 2), "1" if sys.version_info >= (3, 12) else ""
)
assert t.getnext() == TokenInfo(
NEWLINE, "", (2, 1), (2, 2), "1" if sys.version_info >= (3, 12) else ""
)
def test_mark_reset():
source = io.StringIO("\n1 2")
t = Tokenizer(generate_tokens(source.readline))
index = t.mark()
assert t.peek() == TokenInfo(NUMBER, "1", (2, 0), (2, 1), "1 2")
assert t.getnext() == TokenInfo(NUMBER, "1", (2, 0), (2, 1), "1 2")
t.reset(index)
assert t.peek() == TokenInfo(NUMBER, "1", (2, 0), (2, 1), "1 2")
assert t.getnext() == TokenInfo(NUMBER, "1", (2, 0), (2, 1), "1 2")
def test_last_non_whitespace():
source = io.StringIO("\n1\n2")
t = Tokenizer(generate_tokens(source.readline))
assert t.peek() == TokenInfo(NUMBER, "1", (2, 0), (2, 1), "1\n")
assert t.getnext() == TokenInfo(NUMBER, "1", (2, 0), (2, 1), "1\n")
assert t.getnext() == TokenInfo(NEWLINE, "\n", (2, 1), (2, 2), "1\n")
assert t.get_last_non_whitespace_token() == TokenInfo(NUMBER, "1", (2, 0), (2, 1), "1\n")
def test_get_lines():
source = io.StringIO("1\n2\n3")
t = Tokenizer(generate_tokens(source.readline))
while True:
if t.getnext().type == ENDMARKER:
break
assert t.get_lines([1, 2, 3]) == ["1\n", "2\n", "3"]
|