1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
|
# Copyright (c) 2017-2026 Juancarlo AƱez (apalala@gmail.com)
# SPDX-License-Identifier: BSD-4-Clause
from __future__ import annotations
from itertools import starmap
from typing import Any, NamedTuple, Protocol, runtime_checkable
class LineIndexInfo(NamedTuple):
filename: str
line: int
@staticmethod
def block_index(name, n) -> list[LineIndexInfo]:
return list(starmap(LineIndexInfo, zip(n * [name], range(n), strict=False)))
class LineInfo(NamedTuple):
filename: str
line: int
col: int
start: int
end: int
text: str
@runtime_checkable
class Tokenizer(Protocol):
def __init__(self, *ags: Any, **kwargs: Any): ...
@property
def text(self) -> str: ...
@property
def filename(self) -> str: ...
@property
def ignorecase(self) -> bool: ...
@property
def pos(self) -> int: ...
@property
def line(self) -> int: ...
def goto(self, pos) -> None: ...
def atend(self) -> bool: ...
def ateol(self) -> bool: ...
@property
def current(self) -> str | None: ...
@property
def token(self):
return self.current
def next(self) -> str | None: ...
def next_token(self) -> None: ...
def match(self, token: str) -> str | None: ...
def matchre(self, pattern: str) -> str | None: ...
def posline(self, pos: int | None = None) -> int: ...
def lineinfo(self, pos: int | None = None) -> LineInfo: ...
def get_line(self, n: int | None = None) -> str: ...
def get_lines(
self,
start: int | None = None,
end: int | None = None,
) -> list[str]: ...
def line_index(
self,
start: int = 0,
end: int | None = None,
) -> list[LineIndexInfo]: ...
def lookahead(self) -> str: ...
def lookahead_pos(self) -> str: ...
class NullTokenizer(Tokenizer):
def __init__(self, *ags: Any, **kwargs: Any):
pass
@property
def text(self) -> str:
return ''
@property
def filename(self) -> str:
return ''
@property
def ignorecase(self) -> bool:
return False
@property
def pos(self) -> int:
return 0
@property
def line(self) -> int:
return 0
def goto(self, pos) -> None:
return
def atend(self) -> bool:
return False
def ateol(self) -> bool:
return False
@property
def current(self) -> str | None:
return None
@property
def token(self):
return self.current
def next(self) -> str | None:
return None
def next_token(self) -> None:
return None
def match(self, token: str) -> str | None:
return None
def matchre(self, pattern: str) -> str | None:
return None
def posline(self, pos: int | None = None) -> int:
return 0
def lineinfo(self, pos: int | None = None) -> LineInfo:
return LineInfo('', 0, 0, 0, 0, '')
def get_line(self, n: int | None = None) -> str:
return ''
def get_lines(self, start: int | None = None, end: int | None = None) -> list[str]:
return []
def line_index(self, start: int = 0, end: int | None = None) -> list[LineIndexInfo]:
return []
def lookahead(self) -> str:
return ''
def lookahead_pos(self) -> str:
return ''
|