1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34
|
from typing import Tuple, Optional, Callable, Iterable, Text, Sequence
_Place = Tuple[int, int]
_Spec = Tuple[Text, Tuple]
class Token:
type: Text
value: Text
start: Optional[_Place]
end: Optional[_Place]
name: Text
def __init__(
self,
type: Text,
value: Text,
start: Optional[_Place] = ...,
end: Optional[_Place] = ...,
) -> None: ...
def pformat(self) -> Text: ...
class TokenSpec:
name: Text
pattern: Text
flags: int
def __init__(self, name: Text, pattern: Text, flags: int = ...) -> None: ...
def make_tokenizer(
specs: Sequence[TokenSpec | _Spec],
) -> Callable[[Text], Iterable[Token]]: ...
class LexerError(Exception):
place: Tuple[int, int]
msg: Text
def __init__(self, place: _Place, msg: Text) -> None: ...
|