File: tokenize.pyi

package info (click to toggle)
mypy 0.470-complete-1
  • links: PTS, VCS
  • area: main
  • in suites: stretch
  • size: 5,864 kB
  • ctags: 3,264
  • sloc: python: 21,838; makefile: 18
file content (143 lines) | stat: -rw-r--r-- 4,377 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
# Automatically generated by pytype, manually fixed up. May still contain errors.

from typing import Any, Callable, Dict, Generator, Iterator, List, Tuple, Union, Iterable

__all__ = ...  # type: List[str]
__author__ = ...  # type: str
__credits__ = ...  # type: str

AMPER = ...  # type: int
AMPEREQUAL = ...  # type: int
AT = ...  # type: int
BACKQUOTE = ...  # type: int
Binnumber = ...  # type: str
Bracket = ...  # type: str
CIRCUMFLEX = ...  # type: int
CIRCUMFLEXEQUAL = ...  # type: int
COLON = ...  # type: int
COMMA = ...  # type: int
COMMENT = ...  # type: int
Comment = ...  # type: str
ContStr = ...  # type: str
DEDENT = ...  # type: int
DOT = ...  # type: int
DOUBLESLASH = ...  # type: int
DOUBLESLASHEQUAL = ...  # type: int
DOUBLESTAR = ...  # type: int
DOUBLESTAREQUAL = ...  # type: int
Decnumber = ...  # type: str
Double = ...  # type: str
Double3 = ...  # type: str
ENDMARKER = ...  # type: int
EQEQUAL = ...  # type: int
EQUAL = ...  # type: int
ERRORTOKEN = ...  # type: int
Expfloat = ...  # type: str
Exponent = ...  # type: str
Floatnumber = ...  # type: str
Funny = ...  # type: str
GREATER = ...  # type: int
GREATEREQUAL = ...  # type: int
Hexnumber = ...  # type: str
INDENT = ...  # type: int

def ISEOF(x: int) -> bool: ...
def ISNONTERMINAL(x: int) -> bool: ...
def ISTERMINAL(x: int) -> bool: ...

Ignore = ...  # type: str
Imagnumber = ...  # type: str
Intnumber = ...  # type: str
LBRACE = ...  # type: int
LEFTSHIFT = ...  # type: int
LEFTSHIFTEQUAL = ...  # type: int
LESS = ...  # type: int
LESSEQUAL = ...  # type: int
LPAR = ...  # type: int
LSQB = ...  # type: int
MINEQUAL = ...  # type: int
MINUS = ...  # type: int
NAME = ...  # type: int
NEWLINE = ...  # type: int
NL = ...  # type: int
NOTEQUAL = ...  # type: int
NT_OFFSET = ...  # type: int
NUMBER = ...  # type: int
N_TOKENS = ...  # type: int
Name = ...  # type: str
Number = ...  # type: str
OP = ...  # type: int
Octnumber = ...  # type: str
Operator = ...  # type: str
PERCENT = ...  # type: int
PERCENTEQUAL = ...  # type: int
PLUS = ...  # type: int
PLUSEQUAL = ...  # type: int
PlainToken = ...  # type: str
Pointfloat = ...  # type: str
PseudoExtras = ...  # type: str
PseudoToken = ...  # type: str
RBRACE = ...  # type: int
RIGHTSHIFT = ...  # type: int
RIGHTSHIFTEQUAL = ...  # type: int
RPAR = ...  # type: int
RSQB = ...  # type: int
SEMI = ...  # type: int
SLASH = ...  # type: int
SLASHEQUAL = ...  # type: int
STAR = ...  # type: int
STAREQUAL = ...  # type: int
STRING = ...  # type: int
Single = ...  # type: str
Single3 = ...  # type: str
Special = ...  # type: str
String = ...  # type: str
TILDE = ...  # type: int
Token = ...  # type: str
Triple = ...  # type: str
VBAR = ...  # type: int
VBAREQUAL = ...  # type: int
Whitespace = ...  # type: str
chain = ...  # type: type
double3prog = ...  # type: type
endprogs = ...  # type: Dict[str, Any]
pseudoprog = ...  # type: type
re = ...  # type: module
single3prog = ...  # type: type
single_quoted = ...  # type: Dict[str, str]
string = ...  # type: module
sys = ...  # type: module
t = ...  # type: str
tabsize = ...  # type: int
tok_name = ...  # type: Dict[int, str]
token = ...  # type: module
tokenprog = ...  # type: type
triple_quoted = ...  # type: Dict[str, str]
x = ...  # type: str

_Pos = Tuple[int, int]
_TokenType = Tuple[int, str, _Pos, _Pos, str]

def any(*args, **kwargs) -> str: ...
def generate_tokens(readline: Callable[[], str]) -> Generator[_TokenType, None, None]: ...
def group(*args: str) -> str: ...
def maybe(*args: str) -> str: ...
def printtoken(type: int, token: str, srow_scol: _Pos, erow_ecol: _Pos, line: str) -> None: ...
def tokenize(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
def tokenize_loop(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
def untokenize(iterable: Iterable[_TokenType]) -> str: ...

class StopTokenizing(Exception):
    pass

class TokenError(Exception):
    pass

class Untokenizer:
    prev_col = ...  # type: int
    prev_row = ...  # type: int
    tokens = ...  # type: List[str]
    def __init__(self) -> None: ...
    def add_whitespace(self, _Pos) -> None: ...
    def compat(self, token: Tuple[int, Any], iterable: Iterator[_TokenType]) -> None: ...
    def untokenize(self, iterable: Iterable[_TokenType]) -> str: ...